summaryrefslogtreecommitdiff
path: root/include/gst/video
diff options
context:
space:
mode:
authorGeorge Hazan <ghazan@miranda.im>2022-08-03 21:02:36 +0300
committerGeorge Hazan <ghazan@miranda.im>2022-08-03 21:02:36 +0300
commit5323a782c4e8c42781f22ce2f488962a18f82554 (patch)
treef71537197b16f0f8fd0d6937f7120d018d220814 /include/gst/video
parent50acf9d37183f86f6f623aad410003392b0af41f (diff)
Jabber: initial version of Jingle support
Diffstat (limited to 'include/gst/video')
-rw-r--r--include/gst/video/colorbalance.h124
-rw-r--r--include/gst/video/colorbalancechannel.h89
-rw-r--r--include/gst/video/gstvideoaffinetransformationmeta.h81
-rw-r--r--include/gst/video/gstvideoaggregator.h332
-rw-r--r--include/gst/video/gstvideocodecalphameta.h88
-rw-r--r--include/gst/video/gstvideodecoder.h558
-rw-r--r--include/gst/video/gstvideoencoder.h395
-rw-r--r--include/gst/video/gstvideofilter.h89
-rw-r--r--include/gst/video/gstvideometa.h418
-rw-r--r--include/gst/video/gstvideopool.h88
-rw-r--r--include/gst/video/gstvideosink.h153
-rw-r--r--include/gst/video/gstvideotimecode.h257
-rw-r--r--include/gst/video/gstvideoutils.h341
-rw-r--r--include/gst/video/navigation.h338
-rw-r--r--include/gst/video/video-anc.h504
-rw-r--r--include/gst/video/video-blend.h41
-rw-r--r--include/gst/video/video-chroma.h118
-rw-r--r--include/gst/video/video-color.h304
-rw-r--r--include/gst/video/video-converter.h318
-rw-r--r--include/gst/video/video-dither.h81
-rw-r--r--include/gst/video/video-enumtypes.h248
-rw-r--r--include/gst/video/video-event.h69
-rw-r--r--include/gst/video/video-format.h809
-rw-r--r--include/gst/video/video-frame.h254
-rw-r--r--include/gst/video/video-hdr.h143
-rw-r--r--include/gst/video/video-info.h484
-rw-r--r--include/gst/video/video-multiview.h108
-rw-r--r--include/gst/video/video-overlay-composition.h310
-rw-r--r--include/gst/video/video-prelude.h41
-rw-r--r--include/gst/video/video-resampler.h178
-rw-r--r--include/gst/video/video-scaler.h101
-rw-r--r--include/gst/video/video-tile.h140
-rw-r--r--include/gst/video/video.h200
-rw-r--r--include/gst/video/videodirection.h64
-rw-r--r--include/gst/video/videoorientation.h111
-rw-r--r--include/gst/video/videooverlay.h119
36 files changed, 8096 insertions, 0 deletions
diff --git a/include/gst/video/colorbalance.h b/include/gst/video/colorbalance.h
new file mode 100644
index 0000000000..282ded9fcd
--- /dev/null
+++ b/include/gst/video/colorbalance.h
@@ -0,0 +1,124 @@
+/* GStreamer Color Balance
+ * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * color-balance.h: image color balance interface design
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_COLOR_BALANCE_H__
+#define __GST_COLOR_BALANCE_H__
+
+#include <gst/gst.h>
+#include <gst/video/colorbalancechannel.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_COLOR_BALANCE \
+ (gst_color_balance_get_type ())
+#define GST_COLOR_BALANCE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_COLOR_BALANCE, GstColorBalance))
+#define GST_IS_COLOR_BALANCE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_COLOR_BALANCE))
+#define GST_COLOR_BALANCE_GET_INTERFACE(inst) \
+ (G_TYPE_INSTANCE_GET_INTERFACE ((inst), GST_TYPE_COLOR_BALANCE, GstColorBalanceInterface))
+
+typedef struct _GstColorBalance GstColorBalance;
+typedef struct _GstColorBalanceInterface GstColorBalanceInterface;
+
+/**
+ * GstColorBalanceType:
+ * @GST_COLOR_BALANCE_HARDWARE: Color balance is implemented with dedicated
+ * hardware.
+ * @GST_COLOR_BALANCE_SOFTWARE: Color balance is implemented via software
+ * processing.
+ *
+ * An enumeration indicating whether an element implements color balancing
+ * operations in software or in dedicated hardware. In general, dedicated
+ * hardware implementations (such as those provided by xvimagesink) are
+ * preferred.
+ */
+typedef enum
+{
+ GST_COLOR_BALANCE_HARDWARE,
+ GST_COLOR_BALANCE_SOFTWARE
+} GstColorBalanceType;
+
+/**
+ * GstColorBalanceInterface:
+ * @iface: the parent interface
+ * @get_balance_type: implementation type
+ * @list_channels: list handled channels
+ * @set_value: set a channel value
+ * @get_value: get a channel value
+ * @value_changed: default handler for value changed notification
+ *
+ * Color-balance interface.
+ */
+struct _GstColorBalanceInterface {
+ GTypeInterface iface;
+
+ /* virtual functions */
+ const GList * (* list_channels) (GstColorBalance *balance);
+
+ void (* set_value) (GstColorBalance *balance,
+ GstColorBalanceChannel *channel,
+ gint value);
+ gint (* get_value) (GstColorBalance *balance,
+ GstColorBalanceChannel *channel);
+ GstColorBalanceType (*get_balance_type) (GstColorBalance *balance);
+
+ /* signals */
+ void (* value_changed) (GstColorBalance *balance,
+ GstColorBalanceChannel *channel,
+ gint value);
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+GST_VIDEO_API
+GType gst_color_balance_get_type (void);
+
+/* virtual class function wrappers */
+
+GST_VIDEO_API
+const GList *
+ gst_color_balance_list_channels (GstColorBalance *balance);
+
+GST_VIDEO_API
+void gst_color_balance_set_value (GstColorBalance *balance,
+ GstColorBalanceChannel *channel,
+ gint value);
+
+GST_VIDEO_API
+gint gst_color_balance_get_value (GstColorBalance *balance,
+ GstColorBalanceChannel *channel);
+
+GST_VIDEO_API
+GstColorBalanceType
+ gst_color_balance_get_balance_type (GstColorBalance *balance);
+
+/* trigger signal */
+
+GST_VIDEO_API
+void gst_color_balance_value_changed (GstColorBalance *balance,
+ GstColorBalanceChannel *channel,
+ gint value);
+
+G_END_DECLS
+
+#endif /* __GST_COLOR_BALANCE_H__ */
diff --git a/include/gst/video/colorbalancechannel.h b/include/gst/video/colorbalancechannel.h
new file mode 100644
index 0000000000..35aca0f0e8
--- /dev/null
+++ b/include/gst/video/colorbalancechannel.h
@@ -0,0 +1,89 @@
+/* GStreamer Color Balance
+ * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * colorbalancechannel.h: individual channel object
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_COLOR_BALANCE_CHANNEL_H__
+#define __GST_COLOR_BALANCE_CHANNEL_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_COLOR_BALANCE_CHANNEL \
+ (gst_color_balance_channel_get_type ())
+#define GST_COLOR_BALANCE_CHANNEL(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_COLOR_BALANCE_CHANNEL, \
+ GstColorBalanceChannel))
+#define GST_COLOR_BALANCE_CHANNEL_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_COLOR_BALANCE_CHANNEL, \
+ GstColorBalanceChannelClass))
+#define GST_IS_COLOR_BALANCE_CHANNEL(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_COLOR_BALANCE_CHANNEL))
+#define GST_IS_COLOR_BALANCE_CHANNEL_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_COLOR_BALANCE_CHANNEL))
+
+typedef struct _GstColorBalanceChannel GstColorBalanceChannel;
+typedef struct _GstColorBalanceChannelClass GstColorBalanceChannelClass;
+
+/**
+ * GstColorBalanceChannel:
+ * @label: A string containing a descriptive name for this channel
+ * @min_value: The minimum valid value for this channel.
+ * @max_value: The maximum valid value for this channel.
+ */
+struct _GstColorBalanceChannel {
+ GObject parent;
+
+ /*< public >*/
+ gchar *label;
+ gint min_value;
+ gint max_value;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+/**
+ * GstColorBalanceChannelClass:
+ * @parent: the parent class
+ * @value_changed: default handler for value changed notification
+ *
+ * Color-balance channel class.
+ */
+struct _GstColorBalanceChannelClass {
+ GObjectClass parent;
+
+ /* signals */
+ void (* value_changed) (GstColorBalanceChannel *channel,
+ gint value);
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+GST_VIDEO_API
+GType gst_color_balance_channel_get_type (void);
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstColorBalanceChannel, gst_object_unref)
+
+G_END_DECLS
+
+#endif /* __GST_COLOR_BALANCE_CHANNEL_H__ */
diff --git a/include/gst/video/gstvideoaffinetransformationmeta.h b/include/gst/video/gstvideoaffinetransformationmeta.h
new file mode 100644
index 0000000000..8dee6d9027
--- /dev/null
+++ b/include/gst/video/gstvideoaffinetransformationmeta.h
@@ -0,0 +1,81 @@
+/* GStreamer
+ * Copyright (C) Collabora Ltd.
+ * Author: Matthieu Bouron <matthieu.bouron@collabora.com>
+ * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_AFFINE_TRANSFORMATION_META_H__
+#define __GST_VIDEO_AFFINE_TRANSFORMATION_META_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+G_BEGIN_DECLS
+
+#define GST_VIDEO_AFFINE_TRANSFORMATION_META_API_TYPE (gst_video_affine_transformation_meta_api_get_type())
+#define GST_VIDEO_AFFINE_TRANSFORMATION_META_INFO (gst_video_affine_transformation_meta_get_info())
+
+typedef struct _GstVideoAffineTransformationMeta GstVideoAffineTransformationMeta;
+typedef gboolean (*GstVideoAffineTransformationGetMatrix) (GstVideoAffineTransformationMeta * meta, gfloat * matrix);
+
+#define GST_CAPS_FEATURE_META_GST_VIDEO_AFFINE_TRANSFORMATION_META "meta:GstVideoAffineTransformation"
+#define GST_BUFFER_POOL_OPTION_VIDEO_AFFINE_TRANSFORMATION_META "GstBufferPoolOptionVideoAffineTransformation"
+
+/**
+ * GstVideoAffineTransformationMeta:
+ * @meta: parent #GstMeta
+ * @matrix: the column-major 4x4 transformation matrix
+ *
+ * Extra buffer metadata for performing an affine transformation using a 4x4
+ * matrix. The transformation matrix can be composed with
+ * gst_video_affine_transformation_meta_apply_matrix().
+ *
+ * The vertices operated on are all in the range 0 to 1, not in
+ * Normalized Device Coordinates (-1 to +1). Transforming points in this space
+ * are assumed to have an origin at (0.5, 0.5, 0.5) in a left-handed coordinate
+ * system with the x-axis moving horizontally (positive values to the right),
+ * the y-axis moving vertically (positive values up the screen) and the z-axis
+ * perpendicular to the screen (positive values into the screen).
+ *
+ * Since: 1.8
+ */
+struct _GstVideoAffineTransformationMeta
+{
+ GstMeta meta;
+
+ gfloat matrix[16];
+};
+
+GST_VIDEO_API
+GType gst_video_affine_transformation_meta_api_get_type (void);
+
+GST_VIDEO_API
+const GstMetaInfo *gst_video_affine_transformation_meta_get_info (void);
+
+#define gst_buffer_get_video_affine_transformation_meta(b) \
+ ((GstVideoAffineTransformationMeta *)gst_buffer_get_meta((b),GST_VIDEO_AFFINE_TRANSFORMATION_META_API_TYPE))
+GST_VIDEO_API
+GstVideoAffineTransformationMeta *gst_buffer_add_video_affine_transformation_meta (GstBuffer * buffer);
+
+GST_VIDEO_API
+void gst_video_affine_transformation_meta_apply_matrix (GstVideoAffineTransformationMeta * meta,
+ const gfloat matrix[16]);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_AFFINE_TRANSFORMATION_META_H__ */
diff --git a/include/gst/video/gstvideoaggregator.h b/include/gst/video/gstvideoaggregator.h
new file mode 100644
index 0000000000..368c723e8e
--- /dev/null
+++ b/include/gst/video/gstvideoaggregator.h
@@ -0,0 +1,332 @@
+/* Generic video aggregator plugin
+ * Copyright (C) 2008 Wim Taymans <wim@fluendo.com>
+ * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_AGGREGATOR_H__
+#define __GST_VIDEO_AGGREGATOR_H__
+
+#include <gst/video/video.h>
+#include <gst/base/gstaggregator.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstVideoAggregator GstVideoAggregator;
+typedef struct _GstVideoAggregatorClass GstVideoAggregatorClass;
+typedef struct _GstVideoAggregatorPrivate GstVideoAggregatorPrivate;
+
+/*************************
+ * GstVideoAggregatorPad *
+ *************************/
+
+#define GST_TYPE_VIDEO_AGGREGATOR_PAD (gst_video_aggregator_pad_get_type())
+#define GST_VIDEO_AGGREGATOR_PAD(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_AGGREGATOR_PAD, GstVideoAggregatorPad))
+#define GST_VIDEO_AGGREGATOR_PAD_CAST(obj) ((GstVideoAggregatorPad *)(obj))
+#define GST_VIDEO_AGGREGATOR_PAD_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_AGGREGATOR_PAD, GstVideoAggregatorPadClass))
+#define GST_IS_VIDEO_AGGREGATOR_PAD(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_AGGREGATOR_PAD))
+#define GST_IS_VIDEO_AGGREGATOR_PAD_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_AGGREGATOR_PAD))
+#define GST_VIDEO_AGGREGATOR_PAD_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_VIDEO_AGGREGATOR_PAD,GstVideoAggregatorPadClass))
+
+typedef struct _GstVideoAggregatorPad GstVideoAggregatorPad;
+typedef struct _GstVideoAggregatorPadClass GstVideoAggregatorPadClass;
+typedef struct _GstVideoAggregatorPadPrivate GstVideoAggregatorPadPrivate;
+
+/**
+ * GstVideoAggregatorPad:
+ * @info: The #GstVideoInfo currently set on the pad
+ *
+ * Since: 1.16
+ */
+struct _GstVideoAggregatorPad
+{
+ GstAggregatorPad parent;
+
+ /*< public >*/
+ /* read-only, with OBJECT_LOCK */
+ GstVideoInfo info;
+
+ /* < private > */
+ GstVideoAggregatorPadPrivate *priv;
+
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+/**
+ * GstVideoAggregatorPadClass:
+ * @update_conversion_info: Called when either the input or output formats
+ * have changed.
+ * @prepare_frame: Prepare the frame from the pad buffer and sets it to prepared_frame.
+ * Implementations should always return TRUE. Returning FALSE will cease
+ * iteration over subsequent pads.
+ * @clean_frame: clean the frame previously prepared in prepare_frame
+ *
+ * Since: 1.16
+ */
+/**
+ * GstVideoAggregatorPadClass::prepare_frame_start:
+ * @pad: the #GstVideoAggregatorPad
+ * @videoaggregator: the parent #GstVideoAggregator
+ * @buffer: the input #GstBuffer to prepare
+ * @prepared_frame: the #GstVideoFrame to prepare into
+ *
+ * Begin preparing the frame from the pad buffer and sets it to prepared_frame.
+ *
+ * If overriden, `prepare_frame_finish` must also be overriden.
+ *
+ * Since: 1.20
+ */
+/**
+ * GstVideoAggregatorPadClass::prepare_frame_finish:
+ * @pad: the #GstVideoAggregatorPad
+ * @videoaggregator: the parent #GstVideoAggregator
+ * @prepared_frame: the #GstVideoFrame to prepare into
+ *
+ * Finish preparing @prepared_frame.
+ *
+ * If overriden, `prepare_frame_start` must also be overriden.
+ *
+ * Since: 1.20
+ */
+struct _GstVideoAggregatorPadClass
+{
+ GstAggregatorPadClass parent_class;
+ void (*update_conversion_info) (GstVideoAggregatorPad * pad);
+
+ gboolean (*prepare_frame) (GstVideoAggregatorPad * pad,
+ GstVideoAggregator * videoaggregator,
+ GstBuffer * buffer,
+ GstVideoFrame * prepared_frame);
+
+ void (*clean_frame) (GstVideoAggregatorPad * pad,
+ GstVideoAggregator * videoaggregator,
+ GstVideoFrame * prepared_frame);
+
+ void (*prepare_frame_start) (GstVideoAggregatorPad * pad,
+ GstVideoAggregator * videoaggregator,
+ GstBuffer * buffer,
+ GstVideoFrame * prepared_frame);
+
+ void (*prepare_frame_finish) (GstVideoAggregatorPad * pad,
+ GstVideoAggregator * videoaggregator,
+ GstVideoFrame * prepared_frame);
+
+ gpointer _gst_reserved[GST_PADDING_LARGE-2];
+};
+
+GST_VIDEO_API
+GType gst_video_aggregator_pad_get_type (void);
+
+GST_VIDEO_API
+gboolean gst_video_aggregator_pad_has_current_buffer (GstVideoAggregatorPad *pad);
+
+GST_VIDEO_API
+GstBuffer * gst_video_aggregator_pad_get_current_buffer (GstVideoAggregatorPad *pad);
+
+GST_VIDEO_API
+GstVideoFrame * gst_video_aggregator_pad_get_prepared_frame (GstVideoAggregatorPad *pad);
+
+GST_VIDEO_API
+void gst_video_aggregator_pad_set_needs_alpha (GstVideoAggregatorPad *pad, gboolean needs_alpha);
+
+/********************************
+ * GstVideoAggregatorConvertPad *
+ *******************************/
+
+#define GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD (gst_video_aggregator_convert_pad_get_type())
+#define GST_VIDEO_AGGREGATOR_CONVERT_PAD(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD, GstVideoAggregatorConvertPad))
+#define GST_VIDEO_AGGREGATOR_CONVERT_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD, GstVideoAggregatorConvertPadClass))
+#define GST_VIDEO_AGGREGATOR_CONVERT_PAD_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD, GstVideoAggregatorConvertPadClass))
+#define GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD))
+#define GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD))
+
+typedef struct _GstVideoAggregatorConvertPad GstVideoAggregatorConvertPad;
+typedef struct _GstVideoAggregatorConvertPadClass GstVideoAggregatorConvertPadClass;
+typedef struct _GstVideoAggregatorConvertPadPrivate GstVideoAggregatorConvertPadPrivate;
+
+/**
+ * GstVideoAggregatorConvertPad:
+ *
+ * An implementation of GstPad that can be used with #GstVideoAggregator.
+ *
+ * See #GstVideoAggregator for more details.
+ *
+ * Since: 1.16
+ */
+struct _GstVideoAggregatorConvertPad
+{
+ /*< private >*/
+ GstVideoAggregatorPad parent;
+
+ GstVideoAggregatorConvertPadPrivate *priv;
+
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+/**
+ * GstVideoAggregatorConvertPadClass:
+ *
+ * Since: 1.16
+ */
+struct _GstVideoAggregatorConvertPadClass
+{
+ GstVideoAggregatorPadClass parent_class;
+
+ void (*create_conversion_info) (GstVideoAggregatorConvertPad *pad, GstVideoAggregator *agg, GstVideoInfo *conversion_info);
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+GST_VIDEO_API
+GType gst_video_aggregator_convert_pad_get_type (void);
+
+GST_VIDEO_API
+void gst_video_aggregator_convert_pad_update_conversion_info (GstVideoAggregatorConvertPad * pad);
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoAggregatorConvertPad, gst_object_unref)
+
+/****************************************
+ * GstVideoAggregatorParallelConvertPad *
+ ****************************************/
+
+#define GST_TYPE_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD (gst_video_aggregator_parallel_convert_pad_get_type())
+GST_VIDEO_API
+G_DECLARE_DERIVABLE_TYPE (GstVideoAggregatorParallelConvertPad, gst_video_aggregator_parallel_convert_pad, GST, VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD, GstVideoAggregatorConvertPad)
+
+#define GST_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD, GstVideoAggregatorParallelConvertPad))
+#define GST_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD, GstVideoAggregatorConvertPadClass))
+#define GST_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD, GstVideoAggregatorConvertPadClass))
+#define GST_IS_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD))
+#define GST_IS_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD))
+
+/**
+ * GstVideoAggregatorParallelConvertPad:
+ *
+ * An implementation of GstPad that can be used with #GstVideoAggregator.
+ *
+ * See #GstVideoAggregator for more details.
+ *
+ * Since: 1.20
+ */
+
+/**
+ * GstVideoAggregatorParallelConvertPadClass:
+ *
+ * Since: 1.20
+ */
+struct _GstVideoAggregatorParallelConvertPadClass
+{
+ GstVideoAggregatorConvertPadClass parent_class;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+/**********************
+ * GstVideoAggregator *
+ *********************/
+
+#define GST_TYPE_VIDEO_AGGREGATOR (gst_video_aggregator_get_type())
+#define GST_VIDEO_AGGREGATOR(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_AGGREGATOR, GstVideoAggregator))
+#define GST_VIDEO_AGGREGATOR_CAST(obj) ((GstVideoAggregator *)(obj))
+#define GST_VIDEO_AGGREGATOR_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_AGGREGATOR, GstVideoAggregatorClass))
+#define GST_IS_VIDEO_AGGREGATOR(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_AGGREGATOR))
+#define GST_IS_VIDEO_AGGREGATOR_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_AGGREGATOR))
+#define GST_VIDEO_AGGREGATOR_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_VIDEO_AGGREGATOR,GstVideoAggregatorClass))
+
+/**
+ * GstVideoAggregator:
+ * @info: The #GstVideoInfo representing the currently set
+ * srcpad caps.
+ *
+ * Since: 1.16
+ */
+struct _GstVideoAggregator
+{
+ GstAggregator aggregator;
+
+ /*< public >*/
+ /* Output caps */
+ GstVideoInfo info;
+
+ /* < private > */
+ GstVideoAggregatorPrivate *priv;
+ gpointer _gst_reserved[GST_PADDING_LARGE];
+};
+
+/**
+ * GstVideoAggregatorClass:
+ * @update_caps: Optional.
+ * Lets subclasses update the #GstCaps representing
+ * the src pad caps before usage. Return %NULL to indicate failure.
+ * @aggregate_frames: Lets subclasses aggregate frames that are ready. Subclasses
+ * should iterate the GstElement.sinkpads and use the already
+ * mapped #GstVideoFrame from gst_video_aggregator_pad_get_prepared_frame()
+ * or directly use the #GstBuffer from gst_video_aggregator_pad_get_current_buffer()
+ * if it needs to map the buffer in a special way. The result of the
+ * aggregation should land in @outbuffer.
+ * @create_output_buffer: Optional.
+ * Lets subclasses provide a #GstBuffer to be used as @outbuffer of
+ * the #aggregate_frames vmethod.
+ * @find_best_format: Optional.
+ * Lets subclasses decide of the best common format to use.
+ *
+ * Since: 1.16
+ **/
+struct _GstVideoAggregatorClass
+{
+ /*< private >*/
+ GstAggregatorClass parent_class;
+
+ /*< public >*/
+ GstCaps * (*update_caps) (GstVideoAggregator * videoaggregator,
+ GstCaps * caps);
+ GstFlowReturn (*aggregate_frames) (GstVideoAggregator * videoaggregator,
+ GstBuffer * outbuffer);
+ GstFlowReturn (*create_output_buffer) (GstVideoAggregator * videoaggregator,
+ GstBuffer ** outbuffer);
+ void (*find_best_format) (GstVideoAggregator * vagg,
+ GstCaps * downstream_caps,
+ GstVideoInfo * best_info,
+ gboolean * at_least_one_alpha);
+
+ /* < private > */
+ gpointer _gst_reserved[GST_PADDING_LARGE];
+};
+
+GST_VIDEO_API
+GType gst_video_aggregator_get_type (void);
+
+GST_VIDEO_API
+GstTaskPool * gst_video_aggregator_get_execution_task_pool (GstVideoAggregator * vagg);
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoAggregator, gst_object_unref)
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoAggregatorPad, gst_object_unref)
+
+G_END_DECLS
+#endif /* __GST_VIDEO_AGGREGATOR_H__ */
diff --git a/include/gst/video/gstvideocodecalphameta.h b/include/gst/video/gstvideocodecalphameta.h
new file mode 100644
index 0000000000..14e371c393
--- /dev/null
+++ b/include/gst/video/gstvideocodecalphameta.h
@@ -0,0 +1,88 @@
+/* GStreamer
+ * Copyright (C) 2021 Collabora Ltd.
+ * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_CODEC_ALPHA_META_H__
+#define __GST_VIDEO_CODEC_ALPHA_META_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+G_BEGIN_DECLS
+
+/**
+ * GST_VIDEO_CODEC_ALPHA_META_API_TYPE:
+ *
+ * Since: 1.20
+ */
+#define GST_VIDEO_CODEC_ALPHA_META_API_TYPE (gst_video_codec_alpha_meta_api_get_type())
+
+/**
+ * GST_VIDEO_CODEC_ALPHA_META_INFO:
+ *
+ * Since: 1.20
+ */
+#define GST_VIDEO_CODEC_ALPHA_META_INFO (gst_video_codec_alpha_meta_get_info())
+
+typedef struct _GstVideoCodecAlphaMeta GstVideoCodecAlphaMeta;
+
+/**
+ * GstVideoCodecAlphaMeta:
+ * @meta: parent #GstMeta
+ * @buffer: the encoded alpha frame
+ *
+ * Encapsulate an extra frame containing the encoded alpha channel for the
+ * currently negotiated CODEC. The streams must be of the same dimention as
+ * the original one.
+ *
+ * Since: 1.20
+ */
+struct _GstVideoCodecAlphaMeta
+{
+ GstMeta meta;
+
+ GstBuffer *buffer;
+};
+
+GST_VIDEO_API
+GType gst_video_codec_alpha_meta_api_get_type (void);
+
+GST_VIDEO_API
+const GstMetaInfo *gst_video_codec_alpha_meta_get_info (void);
+
+/**
+ * gst_buffer_get_video_codec_alpha_meta:
+ * @b: A #GstBuffer pointer, must be writable.
+ *
+ * Helper macro to get #GstVideoCodecAlphaMeta from an existing #GstBuffer.
+ *
+ * Returns: (nullable): the #GstVideoCodecAlphaMeta pointer, or %NULL if none.
+ *
+ * Since: 1.20
+ */
+#define gst_buffer_get_video_codec_alpha_meta(b) \
+ ((GstVideoCodecAlphaMeta *)gst_buffer_get_meta((b),GST_VIDEO_CODEC_ALPHA_META_API_TYPE))
+
+GST_VIDEO_API
+GstVideoCodecAlphaMeta *gst_buffer_add_video_codec_alpha_meta (GstBuffer * buffer,
+ GstBuffer * alpha_buffer);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_CODEC_ALPHA_META_H__ */
diff --git a/include/gst/video/gstvideodecoder.h b/include/gst/video/gstvideodecoder.h
new file mode 100644
index 0000000000..a38287336a
--- /dev/null
+++ b/include/gst/video/gstvideodecoder.h
@@ -0,0 +1,558 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
+ * Copyright (C) 2011 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ * Copyright (C) 2012 Collabora Ltd.
+ * Author : Edward Hervey <edward@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef _GST_VIDEO_DECODER_H_
+#define _GST_VIDEO_DECODER_H_
+
+#include <gst/base/gstadapter.h>
+#include <gst/video/gstvideoutils.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VIDEO_DECODER \
+ (gst_video_decoder_get_type())
+#define GST_VIDEO_DECODER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_DECODER,GstVideoDecoder))
+#define GST_VIDEO_DECODER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_DECODER,GstVideoDecoderClass))
+#define GST_VIDEO_DECODER_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_VIDEO_DECODER,GstVideoDecoderClass))
+#define GST_IS_VIDEO_DECODER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_DECODER))
+#define GST_IS_VIDEO_DECODER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_DECODER))
+#define GST_VIDEO_DECODER_CAST(obj) ((GstVideoDecoder *)(obj))
+
+/**
+ * GST_VIDEO_DECODER_SINK_NAME:
+ *
+ * The name of the templates for the sink pad.
+ */
+#define GST_VIDEO_DECODER_SINK_NAME "sink"
+/**
+ * GST_VIDEO_DECODER_SRC_NAME:
+ *
+ * The name of the templates for the source pad.
+ */
+#define GST_VIDEO_DECODER_SRC_NAME "src"
+
+/**
+ * GST_VIDEO_DECODER_SRC_PAD:
+ * @obj: a #GstVideoDecoder
+ *
+ * Gives the pointer to the source #GstPad object of the element.
+ */
+#define GST_VIDEO_DECODER_SRC_PAD(obj) (((GstVideoDecoder *) (obj))->srcpad)
+
+/**
+ * GST_VIDEO_DECODER_SINK_PAD:
+ * @obj: a #GstVideoDecoder
+ *
+ * Gives the pointer to the sink #GstPad object of the element.
+ */
+#define GST_VIDEO_DECODER_SINK_PAD(obj) (((GstVideoDecoder *) (obj))->sinkpad)
+/**
+ * GST_VIDEO_DECODER_FLOW_NEED_DATA:
+ *
+ * Returned while parsing to indicate more data is needed.
+ **/
+#define GST_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
+
+/**
+ * GST_VIDEO_DECODER_INPUT_SEGMENT:
+ * @obj: base decoder instance
+ *
+ * Gives the segment of the element.
+ */
+#define GST_VIDEO_DECODER_INPUT_SEGMENT(obj) (GST_VIDEO_DECODER_CAST (obj)->input_segment)
+
+/**
+ * GST_VIDEO_DECODER_OUTPUT_SEGMENT:
+ * @obj: base decoder instance
+ *
+ * Gives the segment of the element.
+ */
+#define GST_VIDEO_DECODER_OUTPUT_SEGMENT(obj) (GST_VIDEO_DECODER_CAST (obj)->output_segment)
+
+/**
+ * GST_VIDEO_DECODER_STREAM_LOCK:
+ * @decoder: video decoder instance
+ *
+ * Obtain a lock to protect the decoder function from concurrent access.
+ */
+#define GST_VIDEO_DECODER_STREAM_LOCK(decoder) g_rec_mutex_lock (&GST_VIDEO_DECODER (decoder)->stream_lock)
+
+/**
+ * GST_VIDEO_DECODER_STREAM_UNLOCK:
+ * @decoder: video decoder instance
+ *
+ * Release the lock that protects the decoder function from concurrent access.
+ */
+#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) g_rec_mutex_unlock (&GST_VIDEO_DECODER (decoder)->stream_lock)
+
+typedef struct _GstVideoDecoder GstVideoDecoder;
+typedef struct _GstVideoDecoderClass GstVideoDecoderClass;
+typedef struct _GstVideoDecoderPrivate GstVideoDecoderPrivate;
+
+
+/* do not use this one, use macro below */
+
+GST_VIDEO_API
+GstFlowReturn _gst_video_decoder_error (GstVideoDecoder *dec, gint weight,
+ GQuark domain, gint code,
+ gchar *txt, gchar *debug,
+ const gchar *file, const gchar *function,
+ gint line);
+
+/**
+ * GST_VIDEO_DECODER_ERROR:
+ * @el: the base video decoder element that generates the error
+ * @w: element defined weight of the error, added to error count
+ * @domain: like CORE, LIBRARY, RESOURCE or STREAM (see #gstreamer-GstGError)
+ * @code: error code defined for that domain (see #gstreamer-GstGError)
+ * @text: the message to display (format string and args enclosed in
+ * parentheses)
+ * @debug: debugging information for the message (format string and args
+ * enclosed in parentheses)
+ * @ret: variable to receive return value
+ *
+ * Utility function that video decoder elements can use in case they encountered
+ * a data processing error that may be fatal for the current "data unit" but
+ * need not prevent subsequent decoding. Such errors are counted and if there
+ * are too many, as configured in the context's max_errors, the pipeline will
+ * post an error message and the application will be requested to stop further
+ * media processing. Otherwise, it is considered a "glitch" and only a warning
+ * is logged. In either case, @ret is set to the proper value to
+ * return to upstream/caller (indicating either GST_FLOW_ERROR or GST_FLOW_OK).
+ */
+#define GST_VIDEO_DECODER_ERROR(el, w, domain, code, text, debug, ret) \
+G_STMT_START { \
+ gchar *__txt = _gst_element_error_printf text; \
+ gchar *__dbg = _gst_element_error_printf debug; \
+ GstVideoDecoder *__dec = GST_VIDEO_DECODER (el); \
+ ret = _gst_video_decoder_error (__dec, w, GST_ ## domain ## _ERROR, \
+ GST_ ## domain ## _ERROR_ ## code, __txt, __dbg, __FILE__, \
+ GST_FUNCTION, __LINE__); \
+} G_STMT_END
+
+/**
+ * GST_VIDEO_DECODER_MAX_ERRORS:
+ *
+ * Default maximum number of errors tolerated before signaling error.
+ */
+#define GST_VIDEO_DECODER_MAX_ERRORS 10
+
+
+/**
+ * GstVideoDecoder:
+ *
+ * The opaque #GstVideoDecoder data structure.
+ */
+struct _GstVideoDecoder
+{
+ /*< private >*/
+ GstElement element;
+
+ /*< protected >*/
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ /* protects all data processing, i.e. is locked
+ * in the chain function, finish_frame and when
+ * processing serialized events */
+ GRecMutex stream_lock;
+
+ /* MT-protected (with STREAM_LOCK) */
+ GstSegment input_segment;
+ GstSegment output_segment;
+
+ GstVideoDecoderPrivate *priv;
+
+ /*< private >*/
+ gpointer padding[GST_PADDING_LARGE];
+};
+
+/**
+ * GstVideoDecoderClass:
+ * @open: Optional.
+ * Called when the element changes to GST_STATE_READY.
+ * Allows opening external resources.
+ * @close: Optional.
+ * Called when the element changes to GST_STATE_NULL.
+ * Allows closing external resources.
+ * @start: Optional.
+ * Called when the element starts processing.
+ * Allows opening external resources.
+ * @stop: Optional.
+ * Called when the element stops processing.
+ * Allows closing external resources.
+ * @set_format: Notifies subclass of incoming data format (caps).
+ * @parse: Required for non-packetized input.
+ * Allows chopping incoming data into manageable units (frames)
+ * for subsequent decoding.
+ * @reset: Optional.
+ * Allows subclass (decoder) to perform post-seek semantics reset.
+ * Deprecated.
+ * @handle_frame: Provides input data frame to subclass. In subframe mode, the subclass needs
+ * to take ownership of @GstVideoCodecFrame.input_buffer as it will be modified
+ * by the base class on the next subframe buffer receiving.
+ * @finish: Optional.
+ * Called to request subclass to dispatch any pending remaining
+ * data at EOS. Sub-classes can refuse to decode new data after.
+ * @drain: Optional.
+ * Called to request subclass to decode any data it can at this
+ * point, but that more data may arrive after. (e.g. at segment end).
+ * Sub-classes should be prepared to handle new data afterward,
+ * or seamless segment processing will break. Since: 1.6
+ * @sink_event: Optional.
+ * Event handler on the sink pad. This function should return
+ * TRUE if the event was handled and should be discarded
+ * (i.e. not unref'ed).
+ * Subclasses should chain up to the parent implementation to
+ * invoke the default handler.
+ * @src_event: Optional.
+ * Event handler on the source pad. This function should return
+ * TRUE if the event was handled and should be discarded
+ * (i.e. not unref'ed).
+ * Subclasses should chain up to the parent implementation to
+ * invoke the default handler.
+ * @negotiate: Optional.
+ * Negotiate with downstream and configure buffer pools, etc.
+ * Subclasses should chain up to the parent implementation to
+ * invoke the default handler.
+ * @decide_allocation: Optional.
+ * Setup the allocation parameters for allocating output
+ * buffers. The passed in query contains the result of the
+ * downstream allocation query.
+ * Subclasses should chain up to the parent implementation to
+ * invoke the default handler.
+ * @propose_allocation: Optional.
+ * Propose buffer allocation parameters for upstream elements.
+ * Subclasses should chain up to the parent implementation to
+ * invoke the default handler.
+ * @flush: Optional.
+ * Flush all remaining data from the decoder without
+ * pushing it downstream. Since: 1.2
+ * @sink_query: Optional.
+ * Query handler on the sink pad. This function should
+ * return TRUE if the query could be performed. Subclasses
+ * should chain up to the parent implementation to invoke the
+ * default handler. Since: 1.4
+ * @src_query: Optional.
+ * Query handler on the source pad. This function should
+ * return TRUE if the query could be performed. Subclasses
+ * should chain up to the parent implementation to invoke the
+ * default handler. Since: 1.4
+ * @getcaps: Optional.
+ * Allows for a custom sink getcaps implementation.
+ * If not implemented, default returns
+ * gst_video_decoder_proxy_getcaps
+ * applied to sink template caps.
+ * @transform_meta: Optional. Transform the metadata on the input buffer to the
+ * output buffer. By default this method is copies all meta without
+ * tags and meta with only the "video" tag. subclasses can
+ * implement this method and return %TRUE if the metadata is to be
+ * copied. Since: 1.6
+ *
+ * Subclasses can override any of the available virtual methods or not, as
+ * needed. At minimum @handle_frame needs to be overridden, and @set_format
+ * and likely as well. If non-packetized input is supported or expected,
+ * @parse needs to be overridden as well.
+ */
+struct _GstVideoDecoderClass
+{
+ /*< private >*/
+ GstElementClass element_class;
+
+ /*< public >*/
+ gboolean (*open) (GstVideoDecoder *decoder);
+
+ gboolean (*close) (GstVideoDecoder *decoder);
+
+ gboolean (*start) (GstVideoDecoder *decoder);
+
+ gboolean (*stop) (GstVideoDecoder *decoder);
+
+ GstFlowReturn (*parse) (GstVideoDecoder *decoder,
+ GstVideoCodecFrame *frame,
+ GstAdapter *adapter,
+ gboolean at_eos);
+
+ gboolean (*set_format) (GstVideoDecoder *decoder,
+ GstVideoCodecState * state);
+
+ gboolean (*reset) (GstVideoDecoder *decoder,
+ gboolean hard);
+
+ GstFlowReturn (*finish) (GstVideoDecoder *decoder);
+
+ /**
+ * GstVideoDecoderClass::handle_frame:
+ * @decoder: The #GstVideoDecoder
+ * @frame: (transfer full): The frame to handle
+ */
+ GstFlowReturn (*handle_frame) (GstVideoDecoder *decoder,
+ GstVideoCodecFrame *frame);
+
+ gboolean (*sink_event) (GstVideoDecoder *decoder,
+ GstEvent *event);
+
+ gboolean (*src_event) (GstVideoDecoder *decoder,
+ GstEvent *event);
+
+ gboolean (*negotiate) (GstVideoDecoder *decoder);
+
+ gboolean (*decide_allocation) (GstVideoDecoder *decoder, GstQuery *query);
+
+ gboolean (*propose_allocation) (GstVideoDecoder *decoder, GstQuery * query);
+
+ gboolean (*flush) (GstVideoDecoder *decoder);
+
+ gboolean (*sink_query) (GstVideoDecoder *decoder,
+ GstQuery *query);
+
+ gboolean (*src_query) (GstVideoDecoder *decoder,
+ GstQuery *query);
+
+ GstCaps* (*getcaps) (GstVideoDecoder *decoder,
+ GstCaps *filter);
+
+ GstFlowReturn (*drain) (GstVideoDecoder *decoder);
+
+ gboolean (*transform_meta) (GstVideoDecoder *decoder,
+ GstVideoCodecFrame *frame,
+ GstMeta * meta);
+
+ /**
+ * GstVideoDecoderClass::handle_missing_data:
+ * @decoder: The #GstVideoDecoder
+ * @timestamp: Timestamp of the missing data
+ * @duration: Duration of the missing data
+ *
+ * Returns: %TRUE if the decoder should be drained afterwards.
+ *
+ * Since: 1.20
+ */
+ gboolean (*handle_missing_data) (GstVideoDecoder *decoder,
+ GstClockTime timestamp,
+ GstClockTime duration);
+
+ /*< private >*/
+ gpointer padding[GST_PADDING_LARGE-7];
+};
+
+/**
+ * GstVideoDecoderRequestSyncPointFlags:
+ * @GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT: discard all following
+ * input until the next sync point.
+ * @GST_VIDEO_DECODER_REQUEST_SYNC_POINT_CORRUPT_OUTPUT: discard all following
+ * output until the next sync point.
+ *
+ * Flags to be used in combination with gst_video_decoder_request_sync_point().
+ * See the function documentation for more details.
+ *
+ * Since: 1.20
+ */
+typedef enum {
+ GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT = (1<<0),
+ GST_VIDEO_DECODER_REQUEST_SYNC_POINT_CORRUPT_OUTPUT = (1<<1),
+} GstVideoDecoderRequestSyncPointFlags;
+
+GST_VIDEO_API
+GType gst_video_decoder_get_type (void);
+
+/* Context parameters */
+
+GST_VIDEO_API
+void gst_video_decoder_set_packetized (GstVideoDecoder * decoder,
+ gboolean packetized);
+
+GST_VIDEO_API
+gboolean gst_video_decoder_get_packetized (GstVideoDecoder * decoder);
+
+GST_VIDEO_API
+void gst_video_decoder_set_subframe_mode (GstVideoDecoder * decoder,
+ gboolean subframe_mode);
+
+GST_VIDEO_API
+gboolean gst_video_decoder_get_subframe_mode (GstVideoDecoder * decoder);
+
+GST_VIDEO_API
+guint gst_video_decoder_get_input_subframe_index (GstVideoDecoder * decoder, GstVideoCodecFrame * frame);
+
+GST_VIDEO_API
+guint gst_video_decoder_get_processed_subframe_index (GstVideoDecoder * decoder, GstVideoCodecFrame * frame);
+
+GST_VIDEO_API
+void gst_video_decoder_set_estimate_rate (GstVideoDecoder * dec,
+ gboolean enabled);
+
+GST_VIDEO_API
+gint gst_video_decoder_get_estimate_rate (GstVideoDecoder * dec);
+
+GST_VIDEO_API
+void gst_video_decoder_set_max_errors (GstVideoDecoder * dec,
+ gint num);
+
+GST_VIDEO_API
+gint gst_video_decoder_get_max_errors (GstVideoDecoder * dec);
+
+GST_VIDEO_API
+void gst_video_decoder_set_needs_format (GstVideoDecoder * dec,
+ gboolean enabled);
+
+GST_VIDEO_API
+gboolean gst_video_decoder_get_needs_format (GstVideoDecoder * dec);
+
+GST_VIDEO_API
+void gst_video_decoder_set_needs_sync_point (GstVideoDecoder * dec,
+ gboolean enabled);
+
+GST_VIDEO_API
+gboolean gst_video_decoder_get_needs_sync_point (GstVideoDecoder * dec);
+
+GST_VIDEO_API
+void gst_video_decoder_set_latency (GstVideoDecoder *decoder,
+ GstClockTime min_latency,
+ GstClockTime max_latency);
+
+GST_VIDEO_API
+void gst_video_decoder_get_latency (GstVideoDecoder *decoder,
+ GstClockTime *min_latency,
+ GstClockTime *max_latency);
+
+GST_VIDEO_API
+void gst_video_decoder_get_allocator (GstVideoDecoder *decoder,
+ GstAllocator **allocator,
+ GstAllocationParams *params);
+
+GST_VIDEO_API
+GstBufferPool *gst_video_decoder_get_buffer_pool (GstVideoDecoder *decoder);
+
+/* Object methods */
+
+GST_VIDEO_API
+GstVideoCodecFrame *gst_video_decoder_get_frame (GstVideoDecoder *decoder,
+ int frame_number);
+
+GST_VIDEO_API
+GstVideoCodecFrame *gst_video_decoder_get_oldest_frame (GstVideoDecoder *decoder);
+
+GST_VIDEO_API
+GList * gst_video_decoder_get_frames (GstVideoDecoder *decoder);
+
+/* Parsing related methods */
+
+GST_VIDEO_API
+void gst_video_decoder_add_to_frame (GstVideoDecoder *decoder,
+ int n_bytes);
+
+GST_VIDEO_API
+GstFlowReturn gst_video_decoder_have_frame (GstVideoDecoder *decoder);
+
+GST_VIDEO_API
+GstFlowReturn gst_video_decoder_have_last_subframe (GstVideoDecoder *decoder,
+ GstVideoCodecFrame * frame);
+
+GST_VIDEO_API
+gsize gst_video_decoder_get_pending_frame_size (GstVideoDecoder *decoder);
+
+GST_VIDEO_API
+GstBuffer *gst_video_decoder_allocate_output_buffer (GstVideoDecoder * decoder);
+
+GST_VIDEO_API
+GstFlowReturn gst_video_decoder_allocate_output_frame_with_params (GstVideoDecoder *decoder,
+ GstVideoCodecFrame * frame,
+ GstBufferPoolAcquireParams *params);
+
+GST_VIDEO_API
+GstFlowReturn gst_video_decoder_allocate_output_frame (GstVideoDecoder *decoder,
+ GstVideoCodecFrame *frame);
+
+GST_VIDEO_API
+GstVideoCodecState *gst_video_decoder_set_output_state (GstVideoDecoder *decoder,
+ GstVideoFormat fmt, guint width, guint height,
+ GstVideoCodecState *reference);
+
+GST_VIDEO_API
+GstVideoCodecState *gst_video_decoder_set_interlaced_output_state (GstVideoDecoder *decoder,
+ GstVideoFormat fmt, GstVideoInterlaceMode interlace_mode,
+ guint width, guint height, GstVideoCodecState *reference);
+
+GST_VIDEO_API
+GstVideoCodecState *gst_video_decoder_get_output_state (GstVideoDecoder *decoder);
+
+GST_VIDEO_API
+gboolean gst_video_decoder_negotiate (GstVideoDecoder * decoder);
+
+GST_VIDEO_API
+GstClockTimeDiff gst_video_decoder_get_max_decode_time (GstVideoDecoder *decoder,
+ GstVideoCodecFrame *frame);
+
+GST_VIDEO_API
+gdouble gst_video_decoder_get_qos_proportion (GstVideoDecoder * decoder);
+
+GST_VIDEO_API
+GstFlowReturn gst_video_decoder_finish_frame (GstVideoDecoder *decoder,
+ GstVideoCodecFrame *frame);
+GST_VIDEO_API
+GstFlowReturn gst_video_decoder_finish_subframe (GstVideoDecoder *decoder,
+ GstVideoCodecFrame *frame);
+
+GST_VIDEO_API
+GstFlowReturn gst_video_decoder_drop_frame (GstVideoDecoder *dec,
+ GstVideoCodecFrame *frame);
+GST_VIDEO_API
+GstFlowReturn gst_video_decoder_drop_subframe (GstVideoDecoder *dec,
+ GstVideoCodecFrame *frame);
+
+GST_VIDEO_API
+void gst_video_decoder_request_sync_point (GstVideoDecoder *dec,
+ GstVideoCodecFrame *frame,
+ GstVideoDecoderRequestSyncPointFlags flags);
+
+GST_VIDEO_API
+void gst_video_decoder_release_frame (GstVideoDecoder * dec,
+ GstVideoCodecFrame * frame);
+
+GST_VIDEO_API
+void gst_video_decoder_merge_tags (GstVideoDecoder *decoder,
+ const GstTagList *tags,
+ GstTagMergeMode mode);
+
+GST_VIDEO_API
+GstCaps * gst_video_decoder_proxy_getcaps (GstVideoDecoder * decoder,
+ GstCaps * caps,
+ GstCaps * filter);
+
+GST_VIDEO_API
+void gst_video_decoder_set_use_default_pad_acceptcaps (GstVideoDecoder * decoder,
+ gboolean use);
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoDecoder, gst_object_unref)
+
+G_END_DECLS
+
+#endif
+
diff --git a/include/gst/video/gstvideoencoder.h b/include/gst/video/gstvideoencoder.h
new file mode 100644
index 0000000000..2a03fdda20
--- /dev/null
+++ b/include/gst/video/gstvideoencoder.h
@@ -0,0 +1,395 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
+ * Copyright (C) 2011 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ * Copyright (C) 2012 Collabora Ltd.
+ * Author : Edward Hervey <edward@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef _GST_VIDEO_ENCODER_H_
+#define _GST_VIDEO_ENCODER_H_
+
+#include <gst/video/gstvideoutils.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VIDEO_ENCODER \
+ (gst_video_encoder_get_type())
+#define GST_VIDEO_ENCODER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_ENCODER,GstVideoEncoder))
+#define GST_VIDEO_ENCODER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_ENCODER,GstVideoEncoderClass))
+#define GST_VIDEO_ENCODER_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_VIDEO_ENCODER,GstVideoEncoderClass))
+#define GST_IS_VIDEO_ENCODER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_ENCODER))
+#define GST_IS_VIDEO_ENCODER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_ENCODER))
+#define GST_VIDEO_ENCODER_CAST(enc) ((GstVideoEncoder*)enc)
+
+/**
+ * GST_VIDEO_ENCODER_SINK_NAME:
+ *
+ * The name of the templates for the sink pad.
+ */
+#define GST_VIDEO_ENCODER_SINK_NAME "sink"
+/**
+ * GST_VIDEO_ENCODER_SRC_NAME:
+ *
+ * The name of the templates for the source pad.
+ */
+#define GST_VIDEO_ENCODER_SRC_NAME "src"
+
+/**
+ * GST_VIDEO_ENCODER_SRC_PAD:
+ * @obj: a #GstVideoEncoder
+ *
+ * Gives the pointer to the source #GstPad object of the element.
+ */
+#define GST_VIDEO_ENCODER_SRC_PAD(obj) (((GstVideoEncoder *) (obj))->srcpad)
+
+/**
+ * GST_VIDEO_ENCODER_SINK_PAD:
+ * @obj: a #GstVideoEncoder
+ *
+ * Gives the pointer to the sink #GstPad object of the element.
+ */
+#define GST_VIDEO_ENCODER_SINK_PAD(obj) (((GstVideoEncoder *) (obj))->sinkpad)
+
+/**
+ * GST_VIDEO_ENCODER_FLOW_NEED_DATA:
+ *
+ * Returned while parsing to indicate more data is needed.
+ **/
+#define GST_VIDEO_ENCODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
+
+/**
+ * GST_VIDEO_ENCODER_FLOW_DROPPED:
+ *
+ * Returned when the event/buffer should be dropped.
+ *
+ * Deprecated: since 1.8. use gst_video_encoder_finish_frame with
+ * a %NULL frame->output_buffer to drop the frame instead.
+ */
+#ifndef GST_DISABLE_DEPRECATED
+#define GST_VIDEO_ENCODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1
+#endif
+
+/**
+ * GST_VIDEO_ENCODER_INPUT_SEGMENT:
+ * @obj: base parse instance
+ *
+ * Gives the segment of the element.
+ */
+#define GST_VIDEO_ENCODER_INPUT_SEGMENT(obj) (GST_VIDEO_ENCODER_CAST (obj)->input_segment)
+
+/**
+ * GST_VIDEO_ENCODER_OUTPUT_SEGMENT:
+ * @obj: base parse instance
+ *
+ * Gives the segment of the element.
+ */
+#define GST_VIDEO_ENCODER_OUTPUT_SEGMENT(obj) (GST_VIDEO_ENCODER_CAST (obj)->output_segment)
+
+/**
+ * GST_VIDEO_ENCODER_STREAM_LOCK:
+ * @encoder: video encoder instance
+ *
+ * Obtain a lock to protect the encoder function from concurrent access.
+ */
+#define GST_VIDEO_ENCODER_STREAM_LOCK(encoder) g_rec_mutex_lock (&GST_VIDEO_ENCODER (encoder)->stream_lock)
+
+/**
+ * GST_VIDEO_ENCODER_STREAM_UNLOCK:
+ * @encoder: video encoder instance
+ *
+ * Release the lock that protects the encoder function from concurrent access.
+ */
+#define GST_VIDEO_ENCODER_STREAM_UNLOCK(encoder) g_rec_mutex_unlock (&GST_VIDEO_ENCODER (encoder)->stream_lock)
+
+typedef struct _GstVideoEncoder GstVideoEncoder;
+typedef struct _GstVideoEncoderPrivate GstVideoEncoderPrivate;
+typedef struct _GstVideoEncoderClass GstVideoEncoderClass;
+
+/**
+ * GstVideoEncoder:
+ *
+ * The opaque #GstVideoEncoder data structure.
+ */
+struct _GstVideoEncoder
+{
+ /*< private >*/
+ GstElement element;
+
+ /*< protected >*/
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ /* protects all data processing, i.e. is locked
+ * in the chain function, finish_frame and when
+ * processing serialized events */
+ GRecMutex stream_lock;
+
+ /* MT-protected (with STREAM_LOCK) */
+ GstSegment input_segment;
+ GstSegment output_segment;
+
+ /*< private >*/
+ GstVideoEncoderPrivate *priv;
+
+ gpointer padding[GST_PADDING_LARGE];
+};
+
+/**
+ * GstVideoEncoderClass:
+ * @open: Optional.
+ * Called when the element changes to GST_STATE_READY.
+ * Allows opening external resources.
+ * @close: Optional.
+ * Called when the element changes to GST_STATE_NULL.
+ * Allows closing external resources.
+ * @start: Optional.
+ * Called when the element starts processing.
+ * Allows opening external resources.
+ * @stop: Optional.
+ * Called when the element stops processing.
+ * Allows closing external resources.
+ * @set_format: Optional.
+ * Notifies subclass of incoming data format.
+ * GstVideoCodecState fields have already been
+ * set according to provided caps.
+ * @handle_frame: Provides input frame to subclass.
+ * @reset: Optional.
+ * Allows subclass (encoder) to perform post-seek semantics reset.
+ * Deprecated.
+ * @finish: Optional.
+ * Called to request subclass to dispatch any pending remaining
+ * data (e.g. at EOS).
+ * @pre_push: Optional.
+ * Allows subclass to push frame downstream in whatever
+ * shape or form it deems appropriate. If not provided,
+ * provided encoded frame data is simply pushed downstream.
+ * @getcaps: Optional.
+ * Allows for a custom sink getcaps implementation (e.g.
+ * for multichannel input specification). If not implemented,
+ * default returns gst_video_encoder_proxy_getcaps
+ * applied to sink template caps.
+ * @sink_event: Optional.
+ * Event handler on the sink pad. This function should return
+ * TRUE if the event was handled and should be discarded
+ * (i.e. not unref'ed).
+ * Subclasses should chain up to the parent implementation to
+ * invoke the default handler.
+ * @src_event: Optional.
+ * Event handler on the source pad. This function should return
+ * TRUE if the event was handled and should be discarded
+ * (i.e. not unref'ed).
+ * Subclasses should chain up to the parent implementation to
+ * invoke the default handler.
+ * @negotiate: Optional.
+ * Negotiate with downstream and configure buffer pools, etc.
+ * Subclasses should chain up to the parent implementation to
+ * invoke the default handler.
+ * @decide_allocation: Optional.
+ * Setup the allocation parameters for allocating output
+ * buffers. The passed in query contains the result of the
+ * downstream allocation query.
+ * Subclasses should chain up to the parent implementation to
+ * invoke the default handler.
+ * @propose_allocation: Optional.
+ * Propose buffer allocation parameters for upstream elements.
+ * Subclasses should chain up to the parent implementation to
+ * invoke the default handler.
+ * @flush: Optional.
+ * Flush all remaining data from the encoder without
+ * pushing it downstream. Since: 1.2
+ * @sink_query: Optional.
+ * Query handler on the sink pad. This function should
+ * return TRUE if the query could be performed. Subclasses
+ * should chain up to the parent implementation to invoke the
+ * default handler. Since: 1.4
+ * @src_query: Optional.
+ * Query handler on the source pad. This function should
+ * return TRUE if the query could be performed. Subclasses
+ * should chain up to the parent implementation to invoke the
+ * default handler. Since: 1.4
+ * @transform_meta: Optional. Transform the metadata on the input buffer to the
+ * output buffer. By default this method is copies all meta without
+ * tags and meta with only the "video" tag. subclasses can
+ * implement this method and return %TRUE if the metadata is to be
+ * copied. Since: 1.6
+ *
+ * Subclasses can override any of the available virtual methods or not, as
+ * needed. At minimum @handle_frame needs to be overridden, and @set_format
+ * and @get_caps are likely needed as well.
+ */
+struct _GstVideoEncoderClass
+{
+ /*< private >*/
+ GstElementClass element_class;
+
+ /*< public >*/
+ /* virtual methods for subclasses */
+ gboolean (*open) (GstVideoEncoder *encoder);
+
+ gboolean (*close) (GstVideoEncoder *encoder);
+
+ gboolean (*start) (GstVideoEncoder *encoder);
+
+ gboolean (*stop) (GstVideoEncoder *encoder);
+
+ gboolean (*set_format) (GstVideoEncoder *encoder,
+ GstVideoCodecState *state);
+
+ GstFlowReturn (*handle_frame) (GstVideoEncoder *encoder,
+ GstVideoCodecFrame *frame);
+
+ gboolean (*reset) (GstVideoEncoder *encoder,
+ gboolean hard);
+
+ GstFlowReturn (*finish) (GstVideoEncoder *encoder);
+
+ GstFlowReturn (*pre_push) (GstVideoEncoder *encoder,
+ GstVideoCodecFrame *frame);
+
+ GstCaps * (*getcaps) (GstVideoEncoder *enc,
+ GstCaps *filter);
+
+ gboolean (*sink_event) (GstVideoEncoder *encoder,
+ GstEvent *event);
+
+ gboolean (*src_event) (GstVideoEncoder *encoder,
+ GstEvent *event);
+
+ gboolean (*negotiate) (GstVideoEncoder *encoder);
+
+ gboolean (*decide_allocation) (GstVideoEncoder *encoder, GstQuery *query);
+
+ gboolean (*propose_allocation) (GstVideoEncoder * encoder,
+ GstQuery * query);
+ gboolean (*flush) (GstVideoEncoder *encoder);
+
+ gboolean (*sink_query) (GstVideoEncoder *encoder,
+ GstQuery *query);
+
+ gboolean (*src_query) (GstVideoEncoder *encoder,
+ GstQuery *query);
+
+ gboolean (*transform_meta) (GstVideoEncoder *encoder,
+ GstVideoCodecFrame *frame,
+ GstMeta * meta);
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING_LARGE-4];
+};
+
+GST_VIDEO_API
+GType gst_video_encoder_get_type (void);
+
+GST_VIDEO_API
+GstVideoCodecState* gst_video_encoder_get_output_state (GstVideoEncoder *encoder);
+
+GST_VIDEO_API
+GstVideoCodecState* gst_video_encoder_set_output_state (GstVideoEncoder * encoder,
+ GstCaps * caps,
+ GstVideoCodecState * reference);
+
+GST_VIDEO_API
+gboolean gst_video_encoder_negotiate (GstVideoEncoder * encoder);
+
+GST_VIDEO_API
+GstVideoCodecFrame* gst_video_encoder_get_frame (GstVideoEncoder *encoder,
+ int frame_number);
+
+GST_VIDEO_API
+GstVideoCodecFrame* gst_video_encoder_get_oldest_frame (GstVideoEncoder *encoder);
+
+GST_VIDEO_API
+GList * gst_video_encoder_get_frames (GstVideoEncoder *encoder);
+
+GST_VIDEO_API
+GstBuffer * gst_video_encoder_allocate_output_buffer (GstVideoEncoder * encoder,
+ gsize size);
+
+GST_VIDEO_API
+GstFlowReturn gst_video_encoder_allocate_output_frame (GstVideoEncoder *encoder,
+ GstVideoCodecFrame *frame,
+ gsize size);
+
+GST_VIDEO_API
+GstFlowReturn gst_video_encoder_finish_frame (GstVideoEncoder *encoder,
+ GstVideoCodecFrame *frame);
+
+GST_VIDEO_API
+GstFlowReturn gst_video_encoder_finish_subframe (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame);
+
+GST_VIDEO_API
+GstCaps * gst_video_encoder_proxy_getcaps (GstVideoEncoder * enc,
+ GstCaps * caps,
+ GstCaps * filter);
+
+GST_VIDEO_API
+void gst_video_encoder_set_latency (GstVideoEncoder *encoder,
+ GstClockTime min_latency,
+ GstClockTime max_latency);
+
+GST_VIDEO_API
+void gst_video_encoder_get_latency (GstVideoEncoder *encoder,
+ GstClockTime *min_latency,
+ GstClockTime *max_latency);
+
+GST_VIDEO_API
+void gst_video_encoder_set_headers (GstVideoEncoder *encoder,
+ GList *headers);
+
+GST_VIDEO_API
+void gst_video_encoder_merge_tags (GstVideoEncoder *encoder,
+ const GstTagList *tags,
+ GstTagMergeMode mode);
+
+GST_VIDEO_API
+void gst_video_encoder_get_allocator (GstVideoEncoder *encoder,
+ GstAllocator **allocator,
+ GstAllocationParams *params);
+
+GST_VIDEO_API
+void gst_video_encoder_set_min_pts(GstVideoEncoder *encoder, GstClockTime min_pts);
+
+GST_VIDEO_API
+void gst_video_encoder_set_qos_enabled (GstVideoEncoder * encoder, gboolean enabled);
+
+GST_VIDEO_API
+gboolean gst_video_encoder_is_qos_enabled (GstVideoEncoder * encoder);
+
+GST_VIDEO_API
+GstClockTimeDiff gst_video_encoder_get_max_encode_time (GstVideoEncoder *encoder, GstVideoCodecFrame * frame);
+
+GST_VIDEO_API
+void gst_video_encoder_set_min_force_key_unit_interval (GstVideoEncoder * encoder,
+ GstClockTime interval);
+GST_VIDEO_API
+GstClockTime gst_video_encoder_get_min_force_key_unit_interval (GstVideoEncoder * encoder);
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoEncoder, gst_object_unref)
+
+G_END_DECLS
+
+#endif
+
diff --git a/include/gst/video/gstvideofilter.h b/include/gst/video/gstvideofilter.h
new file mode 100644
index 0000000000..869728678e
--- /dev/null
+++ b/include/gst/video/gstvideofilter.h
@@ -0,0 +1,89 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_VIDEO_FILTER_H__
+#define __GST_VIDEO_FILTER_H__
+
+#include <gst/base/gstbasetransform.h>
+#include <gst/video/video.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstVideoFilter GstVideoFilter;
+typedef struct _GstVideoFilterClass GstVideoFilterClass;
+
+#define GST_TYPE_VIDEO_FILTER \
+ (gst_video_filter_get_type())
+#define GST_VIDEO_FILTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_FILTER,GstVideoFilter))
+#define GST_VIDEO_FILTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_FILTER,GstVideoFilterClass))
+#define GST_VIDEO_FILTER_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_VIDEO_FILTER, GstVideoFilterClass))
+#define GST_IS_VIDEO_FILTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_FILTER))
+#define GST_IS_VIDEO_FILTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_FILTER))
+#define GST_VIDEO_FILTER_CAST(obj) ((GstVideoFilter *)(obj))
+
+struct _GstVideoFilter {
+ GstBaseTransform element;
+
+ gboolean negotiated;
+ GstVideoInfo in_info;
+ GstVideoInfo out_info;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+/**
+ * GstVideoFilterClass:
+ * @parent_class: the parent class structure
+ * @set_info: function to be called with the negotiated caps and video infos
+ * @transform_frame: transform a video frame
+ * @transform_frame_ip: transform a video frame in place
+ *
+ * The video filter class structure.
+ */
+struct _GstVideoFilterClass {
+ GstBaseTransformClass parent_class;
+
+ gboolean (*set_info) (GstVideoFilter *filter,
+ GstCaps *incaps, GstVideoInfo *in_info,
+ GstCaps *outcaps, GstVideoInfo *out_info);
+
+ /* transform */
+ GstFlowReturn (*transform_frame) (GstVideoFilter *filter,
+ GstVideoFrame *inframe, GstVideoFrame *outframe);
+ GstFlowReturn (*transform_frame_ip) (GstVideoFilter *trans, GstVideoFrame *frame);
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+GST_VIDEO_API
+GType gst_video_filter_get_type (void);
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoFilter, gst_object_unref)
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_FILTER_H__ */
diff --git a/include/gst/video/gstvideometa.h b/include/gst/video/gstvideometa.h
new file mode 100644
index 0000000000..8c8436b68c
--- /dev/null
+++ b/include/gst/video/gstvideometa.h
@@ -0,0 +1,418 @@
+/* GStreamer
+ * Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_META_H__
+#define __GST_VIDEO_META_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideotimecode.h>
+
+G_BEGIN_DECLS
+
+#define GST_VIDEO_META_API_TYPE (gst_video_meta_api_get_type())
+#define GST_VIDEO_META_INFO (gst_video_meta_get_info())
+typedef struct _GstVideoMeta GstVideoMeta;
+
+#define GST_CAPS_FEATURE_META_GST_VIDEO_META "meta:GstVideoMeta"
+
+#define GST_VIDEO_CROP_META_API_TYPE (gst_video_crop_meta_api_get_type())
+#define GST_VIDEO_CROP_META_INFO (gst_video_crop_meta_get_info())
+typedef struct _GstVideoCropMeta GstVideoCropMeta;
+
+/**
+ * GstVideoMeta:
+ * @meta: parent #GstMeta
+ * @buffer: the buffer this metadata belongs to
+ * @flags: additional video flags
+ * @format: the video format
+ * @id: identifier of the frame
+ * @width: the video width
+ * @height: the video height
+ * @n_planes: the number of planes in the image
+ * @offset: array of offsets for the planes. This field might not always be
+ * valid, it is used by the default implementation of @map.
+ * @stride: array of strides for the planes. This field might not always be
+ * valid, it is used by the default implementation of @map.
+ * @map: map the memory of a plane
+ * @unmap: unmap the memory of a plane
+ * @alignment: the paddings and alignment constraints of the video buffer.
+ * It is up to the caller of `gst_buffer_add_video_meta_full()` to set it
+ * using gst_video_meta_set_alignment(), if they did not it defaults
+ * to no padding and no alignment. Since: 1.18
+ *
+ * Extra buffer metadata describing image properties
+ *
+ * This meta can also be used by downstream elements to specifiy their
+ * buffer layout requirements for upstream. Upstream should try to
+ * fit those requirements, if possible, in order to prevent buffer copies.
+ *
+ * This is done by passing a custom #GstStructure to
+ * gst_query_add_allocation_meta() when handling the ALLOCATION query.
+ * This structure should be named 'video-meta' and can have the following
+ * fields:
+ * - padding-top (uint): extra pixels on the top
+ * - padding-bottom (uint): extra pixels on the bottom
+ * - padding-left (uint): extra pixels on the left side
+ * - padding-right (uint): extra pixels on the right side
+ * The padding fields have the same semantic as #GstVideoMeta.alignment
+ * and so represent the paddings requested on produced video buffers.
+ */
+struct _GstVideoMeta {
+ GstMeta meta;
+
+ GstBuffer *buffer;
+
+ GstVideoFrameFlags flags;
+ GstVideoFormat format;
+ gint id;
+ guint width;
+ guint height;
+
+ guint n_planes;
+ gsize offset[GST_VIDEO_MAX_PLANES];
+ gint stride[GST_VIDEO_MAX_PLANES];
+
+ gboolean (*map) (GstVideoMeta *meta, guint plane, GstMapInfo *info,
+ gpointer *data, gint * stride, GstMapFlags flags);
+ gboolean (*unmap) (GstVideoMeta *meta, guint plane, GstMapInfo *info);
+
+ GstVideoAlignment alignment;
+};
+
+GST_VIDEO_API
+GType gst_video_meta_api_get_type (void);
+
+GST_VIDEO_API
+const GstMetaInfo * gst_video_meta_get_info (void);
+
+GST_VIDEO_API
+GstVideoMeta * gst_buffer_get_video_meta (GstBuffer *buffer);
+
+GST_VIDEO_API
+GstVideoMeta * gst_buffer_get_video_meta_id (GstBuffer *buffer, gint id);
+
+GST_VIDEO_API
+GstVideoMeta * gst_buffer_add_video_meta (GstBuffer *buffer, GstVideoFrameFlags flags,
+ GstVideoFormat format, guint width, guint height);
+
+GST_VIDEO_API
+GstVideoMeta * gst_buffer_add_video_meta_full (GstBuffer *buffer, GstVideoFrameFlags flags,
+ GstVideoFormat format, guint width, guint height,
+ guint n_planes, gsize offset[GST_VIDEO_MAX_PLANES],
+ gint stride[GST_VIDEO_MAX_PLANES]);
+
+GST_VIDEO_API
+gboolean gst_video_meta_map (GstVideoMeta *meta, guint plane, GstMapInfo *info,
+ gpointer *data, gint *stride, GstMapFlags flags);
+
+GST_VIDEO_API
+gboolean gst_video_meta_unmap (GstVideoMeta *meta, guint plane, GstMapInfo *info);
+
+GST_VIDEO_API
+gboolean gst_video_meta_set_alignment (GstVideoMeta * meta, GstVideoAlignment alignment);
+
+GST_VIDEO_API
+gboolean gst_video_meta_get_plane_size (GstVideoMeta * meta, gsize plane_size[GST_VIDEO_MAX_PLANES]);
+
+GST_VIDEO_API
+gboolean gst_video_meta_get_plane_height (GstVideoMeta * meta, guint plane_height[GST_VIDEO_MAX_PLANES]);
+
+/**
+ * GstVideoCropMeta:
+ * @meta: parent #GstMeta
+ * @x: the horizontal offset
+ * @y: the vertical offset
+ * @width: the cropped width
+ * @height: the cropped height
+ *
+ * Extra buffer metadata describing image cropping.
+ */
+struct _GstVideoCropMeta {
+ GstMeta meta;
+
+ guint x;
+ guint y;
+ guint width;
+ guint height;
+};
+
+GST_VIDEO_API
+GType gst_video_crop_meta_api_get_type (void);
+
+GST_VIDEO_API
+const GstMetaInfo * gst_video_crop_meta_get_info (void);
+
+#define gst_buffer_get_video_crop_meta(b) ((GstVideoCropMeta*)gst_buffer_get_meta((b),GST_VIDEO_CROP_META_API_TYPE))
+#define gst_buffer_add_video_crop_meta(b) ((GstVideoCropMeta*)gst_buffer_add_meta((b),GST_VIDEO_CROP_META_INFO, NULL))
+
+/* video metadata transforms */
+
+GST_VIDEO_API
+GQuark gst_video_meta_transform_scale_get_quark (void);
+/**
+ * gst_video_meta_transform_scale:
+ *
+ * GQuark for the video "gst-video-scale" transform.
+ */
+#define GST_VIDEO_META_TRANSFORM_IS_SCALE(type) ((type) == gst_video_meta_transform_scale_get_quark())
+
+/**
+ * GstVideoMetaTransform:
+ * @in_info: the input #GstVideoInfo
+ * @out_info: the output #GstVideoInfo
+ *
+ * Extra data passed to a video transform #GstMetaTransformFunction such as:
+ * "gst-video-scale".
+ */
+typedef struct {
+ GstVideoInfo *in_info;
+ GstVideoInfo *out_info;
+} GstVideoMetaTransform;
+
+/**
+ * GstVideoGLTextureType:
+ * @GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE: Luminance texture, GL_LUMINANCE
+ * @GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE_ALPHA: Luminance-alpha texture, GL_LUMINANCE_ALPHA
+ * @GST_VIDEO_GL_TEXTURE_TYPE_RGB16: RGB 565 texture, GL_RGB
+ * @GST_VIDEO_GL_TEXTURE_TYPE_RGB: RGB texture, GL_RGB
+ * @GST_VIDEO_GL_TEXTURE_TYPE_RGBA: RGBA texture, GL_RGBA
+ * @GST_VIDEO_GL_TEXTURE_TYPE_R: R texture, GL_RED_EXT
+ * @GST_VIDEO_GL_TEXTURE_TYPE_RG: RG texture, GL_RG_EXT
+ *
+ * The GL texture type.
+ */
+typedef enum
+{
+ GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE,
+ GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE_ALPHA,
+ GST_VIDEO_GL_TEXTURE_TYPE_RGB16,
+ GST_VIDEO_GL_TEXTURE_TYPE_RGB,
+ GST_VIDEO_GL_TEXTURE_TYPE_RGBA,
+ GST_VIDEO_GL_TEXTURE_TYPE_R,
+ GST_VIDEO_GL_TEXTURE_TYPE_RG
+} GstVideoGLTextureType;
+
+/**
+ * GstVideoGLTextureOrientation:
+ * @GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_NORMAL: Top line first in memory, left row first
+ * @GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_FLIP: Bottom line first in memory, left row first
+ * @GST_VIDEO_GL_TEXTURE_ORIENTATION_X_FLIP_Y_NORMAL: Top line first in memory, right row first
+ * @GST_VIDEO_GL_TEXTURE_ORIENTATION_X_FLIP_Y_FLIP: Bottom line first in memory, right row first
+ *
+ * The orientation of the GL texture.
+ */
+typedef enum
+{
+ GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_NORMAL,
+ GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_FLIP,
+ GST_VIDEO_GL_TEXTURE_ORIENTATION_X_FLIP_Y_NORMAL,
+ GST_VIDEO_GL_TEXTURE_ORIENTATION_X_FLIP_Y_FLIP
+} GstVideoGLTextureOrientation;
+
+#define GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE (gst_video_gl_texture_upload_meta_api_get_type())
+#define GST_VIDEO_GL_TEXTURE_UPLOAD_META_INFO (gst_video_gl_texture_upload_meta_get_info())
+
+typedef struct _GstVideoGLTextureUploadMeta GstVideoGLTextureUploadMeta;
+typedef gboolean (*GstVideoGLTextureUpload) (GstVideoGLTextureUploadMeta *meta, guint texture_id[4]);
+
+#define GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META "meta:GstVideoGLTextureUploadMeta"
+
+/**
+ * GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META:
+ *
+ * An option that can be activated on a bufferpool to request gl texture upload
+ * meta on buffers from the pool.
+ *
+ * When this option is enabled on the bufferpool,
+ * @GST_BUFFER_POOL_OPTION_VIDEO_META should also be enabled.
+ *
+ * Since: 1.2.2
+ */
+#define GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META "GstBufferPoolOptionVideoGLTextureUploadMeta"
+
+/**
+ * GstVideoGLTextureUploadMeta:
+ * @meta: parent #GstMeta
+ * @texture_orientation: Orientation of the textures
+ * @n_textures: Number of textures that are generated
+ * @texture_type: Type of each texture
+ *
+ * Extra buffer metadata for uploading a buffer to an OpenGL texture
+ * ID. The caller of gst_video_gl_texture_upload_meta_upload() must
+ * have OpenGL set up and call this from a thread where it is valid
+ * to upload something to an OpenGL texture.
+ */
+
+struct _GstVideoGLTextureUploadMeta {
+ GstMeta meta;
+
+ GstVideoGLTextureOrientation texture_orientation;
+ guint n_textures;
+ GstVideoGLTextureType texture_type[4];
+
+ /* <private> */
+ GstBuffer *buffer;
+ GstVideoGLTextureUpload upload;
+
+ gpointer user_data;
+ GBoxedCopyFunc user_data_copy;
+ GBoxedFreeFunc user_data_free;
+};
+
+GST_VIDEO_API
+GType gst_video_gl_texture_upload_meta_api_get_type (void);
+
+GST_VIDEO_API
+const GstMetaInfo * gst_video_gl_texture_upload_meta_get_info (void);
+
+#define gst_buffer_get_video_gl_texture_upload_meta(b) ((GstVideoGLTextureUploadMeta*)gst_buffer_get_meta((b),GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE))
+
+GST_VIDEO_API
+GstVideoGLTextureUploadMeta *
+ gst_buffer_add_video_gl_texture_upload_meta (GstBuffer *buffer,
+ GstVideoGLTextureOrientation texture_orientation,
+ guint n_textures,
+ GstVideoGLTextureType texture_type[4],
+ GstVideoGLTextureUpload upload,
+ gpointer user_data,
+ GBoxedCopyFunc user_data_copy,
+ GBoxedFreeFunc user_data_free);
+
+GST_VIDEO_API
+gboolean gst_video_gl_texture_upload_meta_upload (GstVideoGLTextureUploadMeta *meta,
+ guint texture_id[4]);
+
+
+/**
+ * GstVideoRegionOfInterestMeta:
+ * @meta: parent #GstMeta
+ * @roi_type: GQuark describing the semantic of the Roi (f.i. a face, a pedestrian)
+ * @id: identifier of this particular ROI
+ * @parent_id: identifier of its parent ROI, used f.i. for ROI hierarchisation.
+ * @x: x component of upper-left corner
+ * @y: y component of upper-left corner
+ * @w: bounding box width
+ * @h: bounding box height
+ * @params: list of #GstStructure containing element-specific params for downstream,
+ * see gst_video_region_of_interest_meta_add_param(). (Since: 1.14)
+ *
+ * Extra buffer metadata describing an image region of interest
+ */
+typedef struct {
+ GstMeta meta;
+
+ GQuark roi_type;
+ gint id;
+ gint parent_id;
+
+ guint x;
+ guint y;
+ guint w;
+ guint h;
+
+ GList *params;
+} GstVideoRegionOfInterestMeta;
+
+GST_VIDEO_API
+GType gst_video_region_of_interest_meta_api_get_type (void);
+#define GST_VIDEO_REGION_OF_INTEREST_META_API_TYPE (gst_video_region_of_interest_meta_api_get_type())
+GST_VIDEO_API
+const GstMetaInfo *gst_video_region_of_interest_meta_get_info (void);
+#define GST_VIDEO_REGION_OF_INTEREST_META_INFO (gst_video_region_of_interest_meta_get_info())
+
+#define gst_buffer_get_video_region_of_interest_meta(b) \
+ ((GstVideoRegionOfInterestMeta*)gst_buffer_get_meta((b),GST_VIDEO_REGION_OF_INTEREST_META_API_TYPE))
+GST_VIDEO_API
+GstVideoRegionOfInterestMeta *gst_buffer_get_video_region_of_interest_meta_id (GstBuffer * buffer,
+ gint id);
+
+GST_VIDEO_API
+GstVideoRegionOfInterestMeta *gst_buffer_add_video_region_of_interest_meta (GstBuffer * buffer,
+ const gchar * roi_type,
+ guint x,
+ guint y,
+ guint w,
+ guint h);
+
+GST_VIDEO_API
+GstVideoRegionOfInterestMeta *gst_buffer_add_video_region_of_interest_meta_id (GstBuffer * buffer,
+ GQuark roi_type,
+ guint x,
+ guint y,
+ guint w,
+ guint h);
+GST_VIDEO_API
+void gst_video_region_of_interest_meta_add_param (GstVideoRegionOfInterestMeta * meta,
+ GstStructure * s);
+
+GST_VIDEO_API
+GstStructure *gst_video_region_of_interest_meta_get_param (GstVideoRegionOfInterestMeta * meta,
+ const gchar * name);
+
+/**
+ * GstVideoTimeCodeMeta:
+ * @meta: parent #GstMeta
+ * @tc: the GstVideoTimeCode to attach
+ *
+ * Extra buffer metadata describing the GstVideoTimeCode of the frame.
+ *
+ * Each frame is assumed to have its own timecode, i.e. they are not
+ * automatically incremented/interpolated.
+ *
+ * Since: 1.10
+ */
+typedef struct {
+ GstMeta meta;
+
+ GstVideoTimeCode tc;
+} GstVideoTimeCodeMeta;
+
+GST_VIDEO_API
+GType gst_video_time_code_meta_api_get_type (void);
+#define GST_VIDEO_TIME_CODE_META_API_TYPE (gst_video_time_code_meta_api_get_type())
+
+GST_VIDEO_API
+const GstMetaInfo *gst_video_time_code_meta_get_info (void);
+#define GST_VIDEO_TIME_CODE_META_INFO (gst_video_time_code_meta_get_info())
+
+#define gst_buffer_get_video_time_code_meta(b) \
+ ((GstVideoTimeCodeMeta*)gst_buffer_get_meta((b),GST_VIDEO_TIME_CODE_META_API_TYPE))
+
+GST_VIDEO_API
+GstVideoTimeCodeMeta *gst_buffer_add_video_time_code_meta (GstBuffer * buffer,
+ const GstVideoTimeCode* tc);
+
+GST_VIDEO_API
+GstVideoTimeCodeMeta *
+gst_buffer_add_video_time_code_meta_full (GstBuffer * buffer,
+ guint fps_n,
+ guint fps_d,
+ GDateTime * latest_daily_jam,
+ GstVideoTimeCodeFlags flags,
+ guint hours,
+ guint minutes,
+ guint seconds,
+ guint frames,
+ guint field_count);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_META_H__ */
diff --git a/include/gst/video/gstvideopool.h b/include/gst/video/gstvideopool.h
new file mode 100644
index 0000000000..6fd0db1bcd
--- /dev/null
+++ b/include/gst/video/gstvideopool.h
@@ -0,0 +1,88 @@
+/* GStreamer
+ * Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_POOL_H__
+#define __GST_VIDEO_POOL_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+
+G_BEGIN_DECLS
+
+/**
+ * GST_BUFFER_POOL_OPTION_VIDEO_META:
+ *
+ * An option that can be activated on bufferpool to request video metadata
+ * on buffers from the pool.
+ */
+#define GST_BUFFER_POOL_OPTION_VIDEO_META "GstBufferPoolOptionVideoMeta"
+
+/**
+ * GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT:
+ *
+ * A bufferpool option to enable extra padding. When a bufferpool supports this
+ * option, gst_buffer_pool_config_set_video_alignment() can be called.
+ *
+ * When this option is enabled on the bufferpool,
+ * #GST_BUFFER_POOL_OPTION_VIDEO_META should also be enabled.
+ */
+#define GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT "GstBufferPoolOptionVideoAlignment"
+
+/* setting a bufferpool config */
+
+GST_VIDEO_API
+void gst_buffer_pool_config_set_video_alignment (GstStructure *config, const GstVideoAlignment *align);
+
+GST_VIDEO_API
+gboolean gst_buffer_pool_config_get_video_alignment (GstStructure *config, GstVideoAlignment *align);
+
+/* video bufferpool */
+typedef struct _GstVideoBufferPool GstVideoBufferPool;
+typedef struct _GstVideoBufferPoolClass GstVideoBufferPoolClass;
+typedef struct _GstVideoBufferPoolPrivate GstVideoBufferPoolPrivate;
+
+#define GST_TYPE_VIDEO_BUFFER_POOL (gst_video_buffer_pool_get_type())
+#define GST_IS_VIDEO_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VIDEO_BUFFER_POOL))
+#define GST_VIDEO_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VIDEO_BUFFER_POOL, GstVideoBufferPool))
+#define GST_VIDEO_BUFFER_POOL_CAST(obj) ((GstVideoBufferPool*)(obj))
+
+struct _GstVideoBufferPool
+{
+ GstBufferPool bufferpool;
+
+ GstVideoBufferPoolPrivate *priv;
+};
+
+struct _GstVideoBufferPoolClass
+{
+ GstBufferPoolClass parent_class;
+};
+
+GST_VIDEO_API
+GType gst_video_buffer_pool_get_type (void);
+
+GST_VIDEO_API
+GstBufferPool * gst_video_buffer_pool_new (void);
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoBufferPool, gst_object_unref)
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_POOL_H__ */
diff --git a/include/gst/video/gstvideosink.h b/include/gst/video/gstvideosink.h
new file mode 100644
index 0000000000..a7a226dde2
--- /dev/null
+++ b/include/gst/video/gstvideosink.h
@@ -0,0 +1,153 @@
+/* GStreamer video sink base class
+ * Copyright (C) <2003> Julien Moutte <julien@moutte.net>
+ * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* FIXME 0.11: turn this into a proper base class */
+
+#ifndef __GST_VIDEO_SINK_H__
+#define __GST_VIDEO_SINK_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasesink.h>
+#include <gst/video/video-prelude.h>
+#include <gst/video/video-info.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VIDEO_SINK (gst_video_sink_get_type())
+#define GST_VIDEO_SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VIDEO_SINK, GstVideoSink))
+#define GST_VIDEO_SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VIDEO_SINK, GstVideoSinkClass))
+#define GST_IS_VIDEO_SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VIDEO_SINK))
+#define GST_IS_VIDEO_SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VIDEO_SINK))
+#define GST_VIDEO_SINK_GET_CLASS(klass) \
+ (G_TYPE_INSTANCE_GET_CLASS ((klass), GST_TYPE_VIDEO_SINK, GstVideoSinkClass))
+
+/**
+ * GST_VIDEO_SINK_CAST:
+ * @obj: a #GstVideoSink or derived object
+ *
+ * Cast @obj to a #GstVideoSink without runtime type check.
+ */
+#define GST_VIDEO_SINK_CAST(obj) ((GstVideoSink *) (obj))
+
+/**
+ * GST_VIDEO_SINK_PAD:
+ * @obj: a #GstVideoSink
+ *
+ * Get the sink #GstPad of @obj.
+ */
+#define GST_VIDEO_SINK_PAD(obj) GST_BASE_SINK_PAD(obj)
+
+#define GST_VIDEO_SINK_WIDTH(obj) (GST_VIDEO_SINK_CAST (obj)->width)
+#define GST_VIDEO_SINK_HEIGHT(obj) (GST_VIDEO_SINK_CAST (obj)->height)
+
+typedef struct _GstVideoSink GstVideoSink;
+typedef struct _GstVideoSinkClass GstVideoSinkClass;
+typedef struct _GstVideoRectangle GstVideoRectangle;
+typedef struct _GstVideoSinkPrivate GstVideoSinkPrivate;
+
+/**
+ * GstVideoRectangle:
+ * @x: X coordinate of rectangle's top-left point
+ * @y: Y coordinate of rectangle's top-left point
+ * @w: width of the rectangle
+ * @h: height of the rectangle
+ *
+ * Helper structure representing a rectangular area.
+ */
+struct _GstVideoRectangle {
+ gint x;
+ gint y;
+ gint w;
+ gint h;
+};
+
+/**
+ * GstVideoSink:
+ * @height: video height (derived class needs to set this)
+ * @width: video width (derived class needs to set this)
+ *
+ * The video sink instance structure. Derived video sinks should set the
+ * @height and @width members.
+ */
+struct _GstVideoSink {
+ GstBaseSink element; /* FIXME 0.11: this should not be called 'element' */
+
+ /*< public >*/
+ gint width, height;
+
+ /*< private >*/
+ GstVideoSinkPrivate *priv;
+
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+/**
+ * GstVideoSinkClass:
+ * @parent_class: the parent class structure
+ * @show_frame: render a video frame. Maps to #GstBaseSinkClass.render() and
+ * #GstBaseSinkClass.preroll() vfuncs. Rendering during preroll will be
+ * suppressed if the #GstVideoSink:show-preroll-frame property is set to
+ * %FALSE.
+ *
+ * The video sink class structure. Derived classes should override the
+ * @show_frame virtual function.
+ */
+struct _GstVideoSinkClass {
+ GstBaseSinkClass parent_class;
+
+ GstFlowReturn (*show_frame) (GstVideoSink *video_sink, GstBuffer *buf);
+
+ /**
+ * GstVideoSinkClass::set_info:
+ * @caps: A #GstCaps.
+ * @info: A #GstVideoInfo corresponding to @caps.
+ *
+ * Notifies the subclass of changed #GstVideoInfo.
+ *
+ * Since: 1.20
+ */
+ gboolean (*set_info) (GstVideoSink *video_sink, GstCaps *caps, const GstVideoInfo *info);
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING-1];
+};
+
+GST_VIDEO_API
+GType gst_video_sink_get_type (void);
+
+GST_VIDEO_DEPRECATED_FOR(gst_video_center_rect)
+void gst_video_sink_center_rect (GstVideoRectangle src, GstVideoRectangle dst,
+ GstVideoRectangle *result, gboolean scaling);
+
+GST_VIDEO_API
+void gst_video_center_rect (const GstVideoRectangle * src,
+ const GstVideoRectangle * dst,
+ GstVideoRectangle * result,
+ gboolean scaling);
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoSink, gst_object_unref)
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_SINK_H__ */
diff --git a/include/gst/video/gstvideotimecode.h b/include/gst/video/gstvideotimecode.h
new file mode 100644
index 0000000000..d35269687a
--- /dev/null
+++ b/include/gst/video/gstvideotimecode.h
@@ -0,0 +1,257 @@
+/* GStreamer
+ * Copyright (C) <2016> Vivia Nikolaidou <vivia@toolsonair.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_TIME_CODE_H__
+#define __GST_VIDEO_TIME_CODE_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstVideoTimeCodeConfig GstVideoTimeCodeConfig;
+typedef struct _GstVideoTimeCode GstVideoTimeCode;
+typedef struct _GstVideoTimeCodeInterval GstVideoTimeCodeInterval;
+
+/**
+ * GstVideoTimeCodeFlags:
+ * @GST_VIDEO_TIME_CODE_FLAGS_NONE: No flags
+ * @GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME: Whether we have drop frame rate
+ * @GST_VIDEO_TIME_CODE_FLAGS_INTERLACED: Whether we have interlaced video
+ *
+ * Flags related to the time code information.
+ * For drop frame, only 30000/1001 and 60000/1001 frame rates are supported.
+ *
+ * Since: 1.10
+ */
+typedef enum
+{
+ GST_VIDEO_TIME_CODE_FLAGS_NONE = 0,
+ GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME = (1<<0),
+ GST_VIDEO_TIME_CODE_FLAGS_INTERLACED = (1<<1)
+ /* Not supported yet:
+ * GST_VIDEO_TIME_CODE_ALLOW_MORE_THAN_24H = (1<<2)
+ * GST_VIDEO_TIME_CODE_ALLOW_NEGATIVE = (1<<3)
+ */
+} GstVideoTimeCodeFlags;
+
+/**
+ * GstVideoTimeCodeConfig:
+ * @fps_n: Numerator of the frame rate
+ * @fps_d: Denominator of the frame rate
+ * @flags: the corresponding #GstVideoTimeCodeFlags
+ * @latest_daily_jam: The latest daily jam information, if present, or NULL
+ *
+ * Supported frame rates: 30000/1001, 60000/1001 (both with and without drop
+ * frame), and integer frame rates e.g. 25/1, 30/1, 50/1, 60/1.
+ *
+ * The configuration of the time code.
+ *
+ * Since: 1.10
+ */
+struct _GstVideoTimeCodeConfig {
+ guint fps_n;
+ guint fps_d;
+ GstVideoTimeCodeFlags flags;
+ GDateTime *latest_daily_jam;
+};
+
+/**
+ * GstVideoTimeCode:
+ * @hours: the hours field of #GstVideoTimeCode
+ * @minutes: the minutes field of #GstVideoTimeCode
+ * @seconds: the seconds field of #GstVideoTimeCode
+ * @frames: the frames field of #GstVideoTimeCode
+ * @field_count: Interlaced video field count
+ * @config: the corresponding #GstVideoTimeCodeConfig
+ *
+ * @field_count must be 0 for progressive video and 1 or 2 for interlaced.
+ *
+ * A representation of a SMPTE time code.
+ *
+ * @hours must be positive and less than 24. Will wrap around otherwise.
+ * @minutes and @seconds must be positive and less than 60.
+ * @frames must be less than or equal to @config.fps_n / @config.fps_d
+ * These values are *NOT* automatically normalized.
+ *
+ * Since: 1.10
+ */
+struct _GstVideoTimeCode {
+ GstVideoTimeCodeConfig config;
+
+ guint hours;
+ guint minutes;
+ guint seconds;
+ guint frames;
+ guint field_count;
+};
+
+/**
+ * GstVideoTimeCodeInterval:
+ * @hours: the hours field of #GstVideoTimeCodeInterval
+ * @minutes: the minutes field of #GstVideoTimeCodeInterval
+ * @seconds: the seconds field of #GstVideoTimeCodeInterval
+ * @frames: the frames field of #GstVideoTimeCodeInterval
+ *
+ * A representation of a difference between two #GstVideoTimeCode instances.
+ * Will not necessarily correspond to a real timecode (e.g. 00:00:10;00)
+ *
+ * Since: 1.12
+ */
+struct _GstVideoTimeCodeInterval {
+ guint hours;
+ guint minutes;
+ guint seconds;
+ guint frames;
+};
+
+#define GST_VIDEO_TIME_CODE_INIT { {0, 0, 0, NULL}, 0, 0, 0, 0, 0 }
+
+#define GST_TYPE_VIDEO_TIME_CODE (gst_video_time_code_get_type())
+GST_VIDEO_API
+GType gst_video_time_code_get_type (void);
+
+GST_VIDEO_API
+GstVideoTimeCode * gst_video_time_code_new (guint fps_n,
+ guint fps_d,
+ GDateTime * latest_daily_jam,
+ GstVideoTimeCodeFlags flags,
+ guint hours,
+ guint minutes,
+ guint seconds,
+ guint frames,
+ guint field_count);
+
+GST_VIDEO_API
+GstVideoTimeCode * gst_video_time_code_new_empty (void);
+
+GST_VIDEO_API
+GstVideoTimeCode * gst_video_time_code_new_from_string (const gchar * tc_str);
+
+GST_VIDEO_DEPRECATED_FOR(gst_video_time_code_new_from_date_time_full)
+GstVideoTimeCode * gst_video_time_code_new_from_date_time (guint fps_n,
+ guint fps_d,
+ GDateTime * dt,
+ GstVideoTimeCodeFlags flags,
+ guint field_count);
+
+GST_VIDEO_API
+GstVideoTimeCode * gst_video_time_code_new_from_date_time_full (guint fps_n,
+ guint fps_d,
+ GDateTime * dt,
+ GstVideoTimeCodeFlags flags,
+ guint field_count);
+
+GST_VIDEO_API
+void gst_video_time_code_free (GstVideoTimeCode * tc);
+
+GST_VIDEO_API
+GstVideoTimeCode * gst_video_time_code_copy (const GstVideoTimeCode * tc);
+
+GST_VIDEO_API
+void gst_video_time_code_init (GstVideoTimeCode * tc,
+ guint fps_n,
+ guint fps_d,
+ GDateTime * latest_daily_jam,
+ GstVideoTimeCodeFlags flags,
+ guint hours,
+ guint minutes,
+ guint seconds,
+ guint frames,
+ guint field_count);
+
+GST_VIDEO_DEPRECATED_FOR(gst_video_time_code_init_from_date_time_full)
+void gst_video_time_code_init_from_date_time (GstVideoTimeCode * tc,
+ guint fps_n,
+ guint fps_d,
+ GDateTime * dt,
+ GstVideoTimeCodeFlags flags,
+ guint field_count);
+GST_VIDEO_API
+gboolean gst_video_time_code_init_from_date_time_full (GstVideoTimeCode * tc,
+ guint fps_n,
+ guint fps_d,
+ GDateTime * dt,
+ GstVideoTimeCodeFlags flags,
+ guint field_count);
+
+GST_VIDEO_API
+void gst_video_time_code_clear (GstVideoTimeCode * tc);
+
+GST_VIDEO_API
+gboolean gst_video_time_code_is_valid (const GstVideoTimeCode * tc);
+
+GST_VIDEO_API
+gint gst_video_time_code_compare (const GstVideoTimeCode * tc1,
+ const GstVideoTimeCode * tc2);
+
+GST_VIDEO_API
+void gst_video_time_code_increment_frame (GstVideoTimeCode * tc);
+
+GST_VIDEO_API
+void gst_video_time_code_add_frames (GstVideoTimeCode * tc,
+ gint64 frames);
+
+GST_VIDEO_API
+gchar *gst_video_time_code_to_string (const GstVideoTimeCode * tc);
+
+GST_VIDEO_API
+GDateTime *gst_video_time_code_to_date_time (const GstVideoTimeCode * tc);
+
+GST_VIDEO_API
+guint64 gst_video_time_code_nsec_since_daily_jam (const GstVideoTimeCode * tc);
+
+GST_VIDEO_API
+guint64 gst_video_time_code_frames_since_daily_jam (const GstVideoTimeCode * tc);
+
+GST_VIDEO_API
+GstVideoTimeCode * gst_video_time_code_add_interval (const GstVideoTimeCode * tc, const GstVideoTimeCodeInterval * tc_inter);
+
+#define GST_TYPE_VIDEO_TIME_CODE_INTERVAL (gst_video_time_code_interval_get_type())
+GST_VIDEO_API
+GType gst_video_time_code_interval_get_type (void);
+
+GST_VIDEO_API
+GstVideoTimeCodeInterval * gst_video_time_code_interval_new (guint hours,
+ guint minutes,
+ guint seconds,
+ guint frames);
+
+GST_VIDEO_API
+GstVideoTimeCodeInterval * gst_video_time_code_interval_new_from_string (const gchar * tc_inter_str);
+
+GST_VIDEO_API
+void gst_video_time_code_interval_free (GstVideoTimeCodeInterval * tc);
+
+GST_VIDEO_API
+GstVideoTimeCodeInterval * gst_video_time_code_interval_copy (const GstVideoTimeCodeInterval * tc);
+
+GST_VIDEO_API
+void gst_video_time_code_interval_init (GstVideoTimeCodeInterval * tc,
+ guint hours,
+ guint minutes,
+ guint seconds,
+ guint frames);
+
+GST_VIDEO_API
+void gst_video_time_code_interval_clear (GstVideoTimeCodeInterval * tc);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_TIME_CODE_H__ */
diff --git a/include/gst/video/gstvideoutils.h b/include/gst/video/gstvideoutils.h
new file mode 100644
index 0000000000..cbe19f0e61
--- /dev/null
+++ b/include/gst/video/gstvideoutils.h
@@ -0,0 +1,341 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ * Copyright (C) 2012 Collabora Ltd.
+ * Author : Edward Hervey <edward@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_H__
+#include <gst/video/video.h>
+#endif
+
+#ifndef _GST_VIDEO_UTILS_H_
+#define _GST_VIDEO_UTILS_H_
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+#include <gst/video/video-hdr.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_VIDEO_CODEC_STATE \
+ (gst_video_codec_state_get_type())
+
+#define GST_TYPE_VIDEO_CODEC_FRAME \
+ (gst_video_codec_frame_get_type())
+
+typedef struct _GstVideoCodecState GstVideoCodecState;
+typedef struct _GstVideoCodecFrame GstVideoCodecFrame;
+
+/**
+ * GstVideoCodecState:
+ * @info: The #GstVideoInfo describing the stream
+ * @caps: The #GstCaps used in the caps negotiation of the pad.
+ * @codec_data: a #GstBuffer corresponding to the
+ * 'codec_data' field of a stream, or NULL.
+ * @allocation_caps: The #GstCaps for allocation query and pool
+ * negotiation. Since: 1.10
+ * @mastering_display_info: Mastering display color volume information
+ * (HDR metadata) for the stream. Since: 1.20
+ * @content_light_level: Content light level information for the stream.
+ * Since: 1.20
+ *
+ * Structure representing the state of an incoming or outgoing video
+ * stream for encoders and decoders.
+ *
+ * Decoders and encoders will receive such a state through their
+ * respective @set_format vmethods.
+ *
+ * Decoders and encoders can set the downstream state, by using the
+ * gst_video_decoder_set_output_state() or
+ * gst_video_encoder_set_output_state() methods.
+ */
+/**
+ * GstVideoCodecState.mastering_display_info:
+ *
+ * Mastering display color volume information (HDR metadata) for the stream.
+ *
+ * Since: 1.20
+ */
+/**
+ * GstVideoCodecState.content_light_level:
+ *
+ * Content light level information for the stream.
+ *
+ * Since: 1.20
+ */
+struct _GstVideoCodecState
+{
+ /*< private >*/
+ gint ref_count;
+
+ /*< public >*/
+ GstVideoInfo info;
+
+ GstCaps *caps;
+
+ GstBuffer *codec_data;
+
+ GstCaps *allocation_caps;
+
+ GstVideoMasteringDisplayInfo *mastering_display_info;
+ GstVideoContentLightLevel *content_light_level;
+
+ /*< private >*/
+ gpointer padding[GST_PADDING_LARGE - 3];
+};
+
+/**
+ * GstVideoCodecFrameFlags:
+ * @GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY: is the frame only meant to be decoded
+ * @GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT: is the frame a synchronization point (keyframe)
+ * @GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME: should the output frame be made a keyframe
+ * @GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS: should the encoder output stream headers
+ * @GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED: the buffer data is corrupted (Since: 1.20)
+ *
+ * Flags for #GstVideoCodecFrame
+ */
+typedef enum
+{
+ GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY = (1<<0),
+ GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT = (1<<1),
+ GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME = (1<<2),
+ GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS = (1<<3),
+ /**
+ * GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED:
+ *
+ * The buffer data is corrupted.
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED = (1<<4),
+} GstVideoCodecFrameFlags;
+
+/**
+ * GST_VIDEO_CODEC_FRAME_FLAGS:
+ * @frame: a #GstVideoCodecFrame
+ *
+ * The entire set of flags for the @frame
+ */
+#define GST_VIDEO_CODEC_FRAME_FLAGS(frame) ((frame)->flags)
+
+/**
+ * GST_VIDEO_CODEC_FRAME_FLAG_IS_SET:
+ * @frame: a #GstVideoCodecFrame
+ * @flag: a flag to check for
+ *
+ * Checks whether the given @flag is set
+ */
+#define GST_VIDEO_CODEC_FRAME_FLAG_IS_SET(frame,flag) !!(GST_VIDEO_CODEC_FRAME_FLAGS(frame) & (flag))
+
+/**
+ * GST_VIDEO_CODEC_FRAME_FLAG_SET:
+ * @frame: a #GstVideoCodecFrame
+ * @flag: Flag to set, can be any number of bits in guint32.
+ *
+ * This macro sets the given bits
+ */
+#define GST_VIDEO_CODEC_FRAME_FLAG_SET(frame,flag) (GST_VIDEO_CODEC_FRAME_FLAGS(frame) |= (flag))
+
+/**
+ * GST_VIDEO_CODEC_FRAME_FLAG_UNSET:
+ * @frame: a #GstVideoCodecFrame
+ * @flag: Flag to unset
+ *
+ * This macro usets the given bits.
+ */
+#define GST_VIDEO_CODEC_FRAME_FLAG_UNSET(frame,flag) (GST_VIDEO_CODEC_FRAME_FLAGS(frame) &= ~(flag))
+
+/**
+ * GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY:
+ * @frame: a #GstVideoCodecFrame
+ *
+ * Tests if the buffer should only be decoded but not sent downstream.
+ */
+#define GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY(frame) (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY))
+
+/**
+ * GST_VIDEO_CODEC_FRAME_SET_DECODE_ONLY:
+ * @frame: a #GstVideoCodecFrame
+ *
+ * Sets the buffer to not be sent downstream.
+ *
+ * Decoder implementation can use this if they have frames that
+ * are not meant to be displayed.
+ *
+ * Encoder implementation can safely ignore this field.
+ */
+#define GST_VIDEO_CODEC_FRAME_SET_DECODE_ONLY(frame) (GST_VIDEO_CODEC_FRAME_FLAG_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY))
+
+/**
+ * GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT:
+ * @frame: a #GstVideoCodecFrame
+ *
+ * Tests if the frame is a synchronization point (like a keyframe).
+ *
+ * Decoder implementations can use this to detect keyframes.
+ */
+#define GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT(frame) (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT))
+
+/**
+ * GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT:
+ * @frame: a #GstVideoCodecFrame
+ *
+ * Sets the frame to be a synchronization point (like a keyframe).
+ *
+ * Encoder implementations should set this accordingly.
+ *
+ * Decoder implementing parsing features should set this when they
+ * detect such a synchronization point.
+ */
+#define GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT(frame) (GST_VIDEO_CODEC_FRAME_FLAG_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT))
+#define GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT(frame) (GST_VIDEO_CODEC_FRAME_FLAG_UNSET(frame, GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT))
+
+
+/**
+ * GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME:
+ * @frame: a #GstVideoCodecFrame
+ *
+ * Tests if the frame must be encoded as a keyframe. Applies only to
+ * frames provided to encoders. Decoders can safely ignore this field.
+ */
+#define GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME(frame) (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME))
+#define GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME(frame) (GST_VIDEO_CODEC_FRAME_FLAG_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME))
+#define GST_VIDEO_CODEC_FRAME_UNSET_FORCE_KEYFRAME(frame) (GST_VIDEO_CODEC_FRAME_FLAG_UNSET(frame, GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME))
+
+/**
+ * GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME_HEADERS:
+ * @frame: a #GstVideoCodecFrame
+ *
+ * Tests if encoder should output stream headers before outputting the
+ * resulting encoded buffer for the given frame.
+ *
+ * Applies only to frames provided to encoders. Decoders can safely
+ * ignore this field.
+ */
+#define GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME_HEADERS(frame) (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS))
+#define GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME_HEADERS(frame) (GST_VIDEO_CODEC_FRAME_FLAG_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS))
+#define GST_VIDEO_CODEC_FRAME_UNSET_FORCE_KEYFRAME_HEADERS(frame) (GST_VIDEO_CODEC_FRAME_FLAG_UNSET(frame, GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS))
+
+/**
+ * GstVideoCodecFrame:
+ * @pts: Presentation timestamp
+ * @dts: Decoding timestamp
+ * @duration: Duration of the frame
+ * @system_frame_number: Unique identifier for the frame. Use this if you need
+ * to get hold of the frame later (like when data is being decoded).
+ * Typical usage in decoders is to set this on the opaque value provided
+ * to the library and get back the frame using gst_video_decoder_get_frame()
+ * @distance_from_sync: Distance in frames from the last synchronization point.
+ * @input_buffer: the input #GstBuffer that created this frame. The buffer is owned
+ * by the frame and references to the frame instead of the buffer should
+ * be kept.
+ * @output_buffer: the output #GstBuffer. Implementations should set this either
+ * directly, or by using the
+ * gst_video_decoder_allocate_output_frame() or
+ * gst_video_decoder_allocate_output_buffer() methods. The buffer is
+ * owned by the frame and references to the frame instead of the
+ * buffer should be kept.
+ * @deadline: Running time when the frame will be used.
+ *
+ * A #GstVideoCodecFrame represents a video frame both in raw and
+ * encoded form.
+ */
+struct _GstVideoCodecFrame
+{
+ /*< private >*/
+ gint ref_count;
+ guint32 flags;
+
+ /*< public >*/
+ guint32 system_frame_number; /* ED */
+
+ /*< private >*/
+ guint32 decode_frame_number; /* ED */
+ guint32 presentation_frame_number; /* ED */
+
+ /*< public >*/
+ GstClockTime dts; /* ED */
+ GstClockTime pts; /* ED */
+ GstClockTime duration; /* ED */
+
+ int distance_from_sync; /* ED */
+
+ GstBuffer *input_buffer; /* ED */
+ GstBuffer *output_buffer; /* ED */
+
+ GstClockTime deadline; /* D */
+
+ /*< private >*/
+
+ /* Events that should be pushed downstream *before*
+ * the next output_buffer */
+ /* FIXME 2.0: Use a GQueue or similar */
+ GList *events; /* ED */
+
+ gpointer user_data;
+ GDestroyNotify user_data_destroy_notify;
+
+ union {
+ struct {
+ /*< private >*/
+ GstClockTime ts;
+ GstClockTime ts2;
+ guint num_subframes;
+ guint subframes_processed;
+ } ABI;
+ gpointer padding[GST_PADDING_LARGE];
+ } abidata;
+};
+
+/* GstVideoCodecState */
+
+GST_VIDEO_API
+GType gst_video_codec_state_get_type (void);
+
+GST_VIDEO_API
+GstVideoCodecState *gst_video_codec_state_ref (GstVideoCodecState * state);
+
+GST_VIDEO_API
+void gst_video_codec_state_unref (GstVideoCodecState * state);
+
+
+/* GstVideoCodecFrame */
+
+GST_VIDEO_API
+GType gst_video_codec_frame_get_type (void);
+
+GST_VIDEO_API
+GstVideoCodecFrame *gst_video_codec_frame_ref (GstVideoCodecFrame * frame);
+
+GST_VIDEO_API
+void gst_video_codec_frame_unref (GstVideoCodecFrame * frame);
+
+GST_VIDEO_API
+void gst_video_codec_frame_set_user_data (GstVideoCodecFrame *frame,
+ gpointer user_data,
+ GDestroyNotify notify);
+
+GST_VIDEO_API
+gpointer gst_video_codec_frame_get_user_data (GstVideoCodecFrame *frame);
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoCodecFrame, gst_video_codec_frame_unref)
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoCodecState, gst_video_codec_state_unref)
+
+G_END_DECLS
+
+#endif
diff --git a/include/gst/video/navigation.h b/include/gst/video/navigation.h
new file mode 100644
index 0000000000..6bd61a5991
--- /dev/null
+++ b/include/gst/video/navigation.h
@@ -0,0 +1,338 @@
+/* GStreamer Navigation
+ * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2003 David A. Schleef <ds@schleef.org>
+ *
+ * navigation.h: navigation interface design
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_NAVIGATION_H__
+#define __GST_NAVIGATION_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_NAVIGATION \
+ (gst_navigation_get_type ())
+#define GST_NAVIGATION(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_NAVIGATION, GstNavigation))
+#define GST_IS_NAVIGATION(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_NAVIGATION))
+#define GST_NAVIGATION_GET_INTERFACE(obj) \
+ (G_TYPE_INSTANCE_GET_INTERFACE ((obj), GST_TYPE_NAVIGATION, GstNavigationInterface))
+
+typedef struct _GstNavigation GstNavigation;
+typedef struct _GstNavigationInterface GstNavigationInterface;
+
+/**
+ * GstNavigationInterface:
+ * @iface: the parent interface
+ * @send_event: sending a navigation event
+ *
+ * Navigation interface.
+ */
+struct _GstNavigationInterface {
+ GTypeInterface iface;
+
+ /* virtual functions */
+ void (*send_event) (GstNavigation *navigation, GstStructure *structure);
+};
+
+GST_VIDEO_API
+GType gst_navigation_get_type (void);
+
+/* Navigation commands */
+
+/**
+ * GstNavigationCommand:
+ * @GST_NAVIGATION_COMMAND_INVALID: An invalid command entry
+ * @GST_NAVIGATION_COMMAND_MENU1: Execute navigation menu command 1. For DVD,
+ * this enters the DVD root menu, or exits back to the title from the menu.
+ * @GST_NAVIGATION_COMMAND_MENU2: Execute navigation menu command 2. For DVD,
+ * this jumps to the DVD title menu.
+ * @GST_NAVIGATION_COMMAND_MENU3: Execute navigation menu command 3. For DVD,
+ * this jumps into the DVD root menu.
+ * @GST_NAVIGATION_COMMAND_MENU4: Execute navigation menu command 4. For DVD,
+ * this jumps to the Subpicture menu.
+ * @GST_NAVIGATION_COMMAND_MENU5: Execute navigation menu command 5. For DVD,
+ * the jumps to the audio menu.
+ * @GST_NAVIGATION_COMMAND_MENU6: Execute navigation menu command 6. For DVD,
+ * this jumps to the angles menu.
+ * @GST_NAVIGATION_COMMAND_MENU7: Execute navigation menu command 7. For DVD,
+ * this jumps to the chapter menu.
+ * @GST_NAVIGATION_COMMAND_LEFT: Select the next button to the left in a menu,
+ * if such a button exists.
+ * @GST_NAVIGATION_COMMAND_RIGHT: Select the next button to the right in a menu,
+ * if such a button exists.
+ * @GST_NAVIGATION_COMMAND_UP: Select the button above the current one in a
+ * menu, if such a button exists.
+ * @GST_NAVIGATION_COMMAND_DOWN: Select the button below the current one in a
+ * menu, if such a button exists.
+ * @GST_NAVIGATION_COMMAND_ACTIVATE: Activate (click) the currently selected
+ * button in a menu, if such a button exists.
+ * @GST_NAVIGATION_COMMAND_PREV_ANGLE: Switch to the previous angle in a
+ * multiangle feature.
+ * @GST_NAVIGATION_COMMAND_NEXT_ANGLE: Switch to the next angle in a multiangle
+ * feature.
+ *
+ * A set of commands that may be issued to an element providing the
+ * #GstNavigation interface. The available commands can be queried via
+ * the gst_navigation_query_new_commands() query.
+ *
+ * For convenience in handling DVD navigation, the MENU commands are aliased as:
+ * GST_NAVIGATION_COMMAND_DVD_MENU = @GST_NAVIGATION_COMMAND_MENU1
+ * GST_NAVIGATION_COMMAND_DVD_TITLE_MENU = @GST_NAVIGATION_COMMAND_MENU2
+ * GST_NAVIGATION_COMMAND_DVD_ROOT_MENU = @GST_NAVIGATION_COMMAND_MENU3
+ * GST_NAVIGATION_COMMAND_DVD_SUBPICTURE_MENU = @GST_NAVIGATION_COMMAND_MENU4
+ * GST_NAVIGATION_COMMAND_DVD_AUDIO_MENU = @GST_NAVIGATION_COMMAND_MENU5
+ * GST_NAVIGATION_COMMAND_DVD_ANGLE_MENU = @GST_NAVIGATION_COMMAND_MENU6
+ * GST_NAVIGATION_COMMAND_DVD_CHAPTER_MENU = @GST_NAVIGATION_COMMAND_MENU7
+ */
+typedef enum {
+ GST_NAVIGATION_COMMAND_INVALID = 0,
+
+ GST_NAVIGATION_COMMAND_MENU1 = 1,
+ GST_NAVIGATION_COMMAND_MENU2 = 2,
+ GST_NAVIGATION_COMMAND_MENU3 = 3,
+ GST_NAVIGATION_COMMAND_MENU4 = 4,
+ GST_NAVIGATION_COMMAND_MENU5 = 5,
+ GST_NAVIGATION_COMMAND_MENU6 = 6,
+ GST_NAVIGATION_COMMAND_MENU7 = 7,
+
+ GST_NAVIGATION_COMMAND_LEFT = 20,
+ GST_NAVIGATION_COMMAND_RIGHT = 21,
+ GST_NAVIGATION_COMMAND_UP = 22,
+ GST_NAVIGATION_COMMAND_DOWN = 23,
+ GST_NAVIGATION_COMMAND_ACTIVATE = 24,
+
+ GST_NAVIGATION_COMMAND_PREV_ANGLE = 30,
+ GST_NAVIGATION_COMMAND_NEXT_ANGLE = 31
+} GstNavigationCommand;
+
+/* Some aliases for the menu command types */
+#define GST_NAVIGATION_COMMAND_DVD_MENU GST_NAVIGATION_COMMAND_MENU1
+#define GST_NAVIGATION_COMMAND_DVD_TITLE_MENU GST_NAVIGATION_COMMAND_MENU2
+#define GST_NAVIGATION_COMMAND_DVD_ROOT_MENU GST_NAVIGATION_COMMAND_MENU3
+#define GST_NAVIGATION_COMMAND_DVD_SUBPICTURE_MENU GST_NAVIGATION_COMMAND_MENU4
+#define GST_NAVIGATION_COMMAND_DVD_AUDIO_MENU GST_NAVIGATION_COMMAND_MENU5
+#define GST_NAVIGATION_COMMAND_DVD_ANGLE_MENU GST_NAVIGATION_COMMAND_MENU6
+#define GST_NAVIGATION_COMMAND_DVD_CHAPTER_MENU GST_NAVIGATION_COMMAND_MENU7
+
+/* Queries */
+/**
+ * GstNavigationQueryType:
+ * @GST_NAVIGATION_QUERY_INVALID: invalid query
+ * @GST_NAVIGATION_QUERY_COMMANDS: command query
+ * @GST_NAVIGATION_QUERY_ANGLES: viewing angle query
+ *
+ * Types of navigation interface queries.
+ */
+typedef enum
+{
+ GST_NAVIGATION_QUERY_INVALID = 0,
+ GST_NAVIGATION_QUERY_COMMANDS = 1,
+ GST_NAVIGATION_QUERY_ANGLES = 2
+} GstNavigationQueryType;
+
+GST_VIDEO_API
+GstNavigationQueryType gst_navigation_query_get_type (GstQuery *query);
+
+GST_VIDEO_API
+GstQuery * gst_navigation_query_new_commands (void);
+
+GST_VIDEO_API
+void gst_navigation_query_set_commands (GstQuery *query, gint n_cmds, ...);
+
+GST_VIDEO_API
+void gst_navigation_query_set_commandsv (GstQuery *query, gint n_cmds,
+ GstNavigationCommand *cmds);
+
+GST_VIDEO_API
+gboolean gst_navigation_query_parse_commands_length (GstQuery *query,
+ guint *n_cmds);
+
+GST_VIDEO_API
+gboolean gst_navigation_query_parse_commands_nth (GstQuery *query, guint nth,
+ GstNavigationCommand *cmd);
+
+GST_VIDEO_API
+GstQuery * gst_navigation_query_new_angles (void);
+
+GST_VIDEO_API
+void gst_navigation_query_set_angles (GstQuery *query, guint cur_angle,
+ guint n_angles);
+
+GST_VIDEO_API
+gboolean gst_navigation_query_parse_angles (GstQuery *query, guint *cur_angle,
+ guint *n_angles);
+
+/* Element messages */
+/**
+ * GstNavigationMessageType:
+ * @GST_NAVIGATION_MESSAGE_INVALID: Returned from
+ * gst_navigation_message_get_type() when the passed message is not a
+ * navigation message.
+ * @GST_NAVIGATION_MESSAGE_MOUSE_OVER: Sent when the mouse moves over or leaves a
+ * clickable region of the output, such as a DVD menu button.
+ * @GST_NAVIGATION_MESSAGE_COMMANDS_CHANGED: Sent when the set of available commands
+ * changes and should re-queried by interested applications.
+ * @GST_NAVIGATION_MESSAGE_ANGLES_CHANGED: Sent when display angles in a multi-angle
+ * feature (such as a multiangle DVD) change - either angles have appeared or
+ * disappeared.
+ * @GST_NAVIGATION_MESSAGE_EVENT: Sent when a navigation event was not handled
+ * by any element in the pipeline (Since: 1.6)
+ *
+ * A set of notifications that may be received on the bus when navigation
+ * related status changes.
+ */
+typedef enum {
+ GST_NAVIGATION_MESSAGE_INVALID,
+ GST_NAVIGATION_MESSAGE_MOUSE_OVER,
+ GST_NAVIGATION_MESSAGE_COMMANDS_CHANGED,
+ GST_NAVIGATION_MESSAGE_ANGLES_CHANGED,
+ GST_NAVIGATION_MESSAGE_EVENT
+} GstNavigationMessageType;
+
+GST_VIDEO_API
+GstNavigationMessageType gst_navigation_message_get_type (GstMessage *message);
+
+GST_VIDEO_API
+GstMessage * gst_navigation_message_new_mouse_over (GstObject *src,
+ gboolean active);
+
+GST_VIDEO_API
+gboolean gst_navigation_message_parse_mouse_over (GstMessage *message,
+ gboolean *active);
+
+GST_VIDEO_API
+GstMessage * gst_navigation_message_new_commands_changed (GstObject *src);
+
+GST_VIDEO_API
+GstMessage * gst_navigation_message_new_angles_changed (GstObject *src,
+ guint cur_angle,
+ guint n_angles);
+
+GST_VIDEO_API
+gboolean gst_navigation_message_parse_angles_changed (GstMessage *message,
+ guint *cur_angle,
+ guint *n_angles);
+
+GST_VIDEO_API
+GstMessage * gst_navigation_message_new_event (GstObject *src,
+ GstEvent *event);
+
+GST_VIDEO_API
+gboolean gst_navigation_message_parse_event (GstMessage *message,
+ GstEvent ** event);
+/* event parsing functions */
+/**
+ * GstNavigationEventType:
+ * @GST_NAVIGATION_EVENT_INVALID: Returned from
+ * gst_navigation_event_get_type() when the passed event is not a navigation event.
+ * @GST_NAVIGATION_EVENT_KEY_PRESS: A key press event. Use
+ * gst_navigation_event_parse_key_event() to extract the details from the event.
+ * @GST_NAVIGATION_EVENT_KEY_RELEASE: A key release event. Use
+ * gst_navigation_event_parse_key_event() to extract the details from the event.
+ * @GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS: A mouse button press event. Use
+ * gst_navigation_event_parse_mouse_button_event() to extract the details from the
+ * event.
+ * @GST_NAVIGATION_EVENT_MOUSE_BUTTON_RELEASE: A mouse button release event. Use
+ * gst_navigation_event_parse_mouse_button_event() to extract the details from the
+ * event.
+ * @GST_NAVIGATION_EVENT_MOUSE_MOVE: A mouse movement event. Use
+ * gst_navigation_event_parse_mouse_move_event() to extract the details from the
+ * event.
+ * @GST_NAVIGATION_EVENT_COMMAND: A navigation command event. Use
+ * gst_navigation_event_parse_command() to extract the details from the event.
+ * @GST_NAVIGATION_EVENT_MOUSE_SCROLL: A mouse scroll event. Use
+ * gst_navigation_event_parse_mouse_scroll_event() to extract the details from
+ * the event. (Since: 1.18)
+ *
+ * Enum values for the various events that an element implementing the
+ * GstNavigation interface might send up the pipeline.
+ */
+typedef enum {
+ GST_NAVIGATION_EVENT_INVALID = 0,
+ GST_NAVIGATION_EVENT_KEY_PRESS = 1,
+ GST_NAVIGATION_EVENT_KEY_RELEASE = 2,
+ GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS = 3,
+ GST_NAVIGATION_EVENT_MOUSE_BUTTON_RELEASE = 4,
+ GST_NAVIGATION_EVENT_MOUSE_MOVE = 5,
+ GST_NAVIGATION_EVENT_COMMAND = 6,
+
+ /**
+ * GST_NAVIGATION_EVENT_MOUSE_SCROLL:
+ *
+ * A mouse scroll event. Use gst_navigation_event_parse_mouse_scroll_event()
+ * to extract the details from the event.
+ *
+ * Since: 1.18
+ */
+ GST_NAVIGATION_EVENT_MOUSE_SCROLL = 7
+} GstNavigationEventType;
+
+GST_VIDEO_API
+GstNavigationEventType gst_navigation_event_get_type (GstEvent *event);
+
+GST_VIDEO_API
+gboolean gst_navigation_event_parse_key_event (GstEvent *event,
+ const gchar **key);
+
+GST_VIDEO_API
+gboolean gst_navigation_event_parse_mouse_button_event (GstEvent *event,
+ gint *button, gdouble *x, gdouble *y);
+
+GST_VIDEO_API
+gboolean gst_navigation_event_parse_mouse_move_event (GstEvent *event,
+ gdouble *x, gdouble *y);
+
+GST_VIDEO_API
+gboolean gst_navigation_event_parse_mouse_scroll_event (GstEvent *event,
+ gdouble *x, gdouble *y,
+ gdouble *delta_x, gdouble *delta_y);
+
+GST_VIDEO_API
+gboolean gst_navigation_event_parse_command (GstEvent *event,
+ GstNavigationCommand *command);
+
+/* interface virtual function wrappers */
+
+GST_VIDEO_API
+void gst_navigation_send_event (GstNavigation *navigation,
+ GstStructure *structure);
+
+GST_VIDEO_API
+void gst_navigation_send_key_event (GstNavigation *navigation,
+ const char *event, const char *key);
+
+GST_VIDEO_API
+void gst_navigation_send_mouse_event (GstNavigation *navigation,
+ const char *event, int button, double x, double y);
+
+GST_VIDEO_API
+void gst_navigation_send_mouse_scroll_event (GstNavigation *navigation,
+ double x, double y, double delta_x, double delta_y);
+
+GST_VIDEO_API
+void gst_navigation_send_command (GstNavigation *navigation,
+ GstNavigationCommand command);
+
+G_END_DECLS
+
+#endif /* __GST_NAVIGATION_H__ */
diff --git a/include/gst/video/video-anc.h b/include/gst/video/video-anc.h
new file mode 100644
index 0000000000..a28d0f2bf2
--- /dev/null
+++ b/include/gst/video/video-anc.h
@@ -0,0 +1,504 @@
+/* GStreamer
+ * Copyright (C) <2018> Edward Hervey <edward@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_ANC_H__
+#define __GST_VIDEO_ANC_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-format.h>
+#include <gst/video/video-info.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstVideoAncillary GstVideoAncillary;
+
+/**
+ * GstVideoAncillary:
+ * @DID: The Data Identifier
+ * @SDID_block_number: The Secondary Data Identifier (if type 2) or the Data
+ * Block Number (if type 1)
+ * @data_count: The amount of data (in bytes) in @data (max 255 bytes)
+ * @data: (array length=data_count): The user data content of the Ancillary packet.
+ * Does not contain the ADF, DID, SDID nor CS.
+ *
+ * Video Ancillary data, according to SMPTE-291M specification.
+ *
+ * Note that the contents of the data are always stored as 8bit data (i.e. do not contain
+ * the parity check bits).
+ *
+ * Since: 1.16
+ */
+struct _GstVideoAncillary {
+ guint8 DID;
+ guint8 SDID_block_number;
+ guint8 data_count;
+ guint8 data[256];
+
+ /*< private >*/
+ /* Padding for future extension */
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+/**
+ * GstVideoAncillaryDID:
+ *
+ * Since: 1.16
+ */
+typedef enum {
+ GST_VIDEO_ANCILLARY_DID_UNDEFINED = 0x00,
+ GST_VIDEO_ANCILLARY_DID_DELETION = 0x80,
+ GST_VIDEO_ANCILLARY_DID_HANC_3G_AUDIO_DATA_FIRST = 0xa0,
+ GST_VIDEO_ANCILLARY_DID_HANC_3G_AUDIO_DATA_LAST = 0xa7,
+ GST_VIDEO_ANCILLARY_DID_HANC_HDTV_AUDIO_DATA_FIRST = 0xe0,
+ GST_VIDEO_ANCILLARY_DID_HANC_HDTV_AUDIO_DATA_LAST = 0xe7,
+ GST_VIDEO_ANCILLARY_DID_HANC_SDTV_AUDIO_DATA_1_FIRST = 0xec,
+ GST_VIDEO_ANCILLARY_DID_HANC_SDTV_AUDIO_DATA_1_LAST = 0xef,
+ GST_VIDEO_ANCILLARY_DID_CAMERA_POSITION = 0xf0,
+ GST_VIDEO_ANCILLARY_DID_HANC_ERROR_DETECTION = 0xf4,
+ GST_VIDEO_ANCILLARY_DID_HANC_SDTV_AUDIO_DATA_2_FIRST = 0xf8,
+ GST_VIDEO_ANCILLARY_DID_HANC_SDTV_AUDIO_DATA_2_LAST = 0xff,
+} GstVideoAncillaryDID;
+
+/**
+ * GST_VIDEO_ANCILLARY_DID16:
+ * @anc: a #GstVideoAncillary
+ *
+ * Returns the #GstVideoAncillaryDID16 of the ancillary data.
+ *
+ * Since: 1.16
+ *
+ * Returns: a #GstVideoAncillaryDID16 identifier
+ */
+#define GST_VIDEO_ANCILLARY_DID16(anc) ((guint16)((anc)->DID) << 8 | (guint16)((anc)->SDID_block_number))
+
+/**
+ * GstVideoAncillaryDID16:
+ * @GST_VIDEO_ANCILLARY_DID16_S334_EIA_708: CEA 708 Ancillary data according to SMPTE 334
+ * @GST_VIDEO_ANCILLARY_DID16_S334_EIA_608: CEA 608 Ancillary data according to SMPTE 334
+ * @GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR: AFD/Bar Ancillary data according to SMPTE 2016-3 (Since: 1.18)
+ *
+ * Some know types of Ancillary Data identifiers.
+ *
+ * Since: 1.16
+ */
+typedef enum {
+ GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 = 0x6101,
+ GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 = 0x6102,
+ GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR = 0x4105,
+} GstVideoAncillaryDID16;
+
+/**
+ * GstVideoAFDValue:
+ * @GST_VIDEO_AFD_UNAVAILABLE: Unavailable (see note 0 below).
+ * @GST_VIDEO_AFD_16_9_TOP_ALIGNED: For 4:3 coded frame, letterbox 16:9 image,
+ * at top of the coded frame. For 16:9 coded frame, full frame 16:9 image,
+ * the same as the coded frame.
+ * @GST_VIDEO_AFD_14_9_TOP_ALIGNED: For 4:3 coded frame, letterbox 14:9 image,
+ * at top of the coded frame. For 16:9 coded frame, pillarbox 14:9 image,
+ * horizontally centered in the coded frame.
+ * @GST_VIDEO_AFD_GREATER_THAN_16_9: For 4:3 coded frame, letterbox image with an aspect ratio
+ * greater than 16:9, vertically centered in the coded frame. For 16:9 coded frame,
+ * letterbox image with an aspect ratio greater than 16:9.
+ * @GST_VIDEO_AFD_4_3_FULL_16_9_FULL: For 4:3 coded frame, full frame 4:3 image,
+ * the same as the coded frame. For 16:9 coded frame, full frame 16:9 image, the same as
+ * the coded frame.
+ * @GST_VIDEO_AFD_4_3_FULL_4_3_PILLAR: For 4:3 coded frame, full frame 4:3 image, the same as
+ * the coded frame. For 16:9 coded frame, pillarbox 4:3 image, horizontally centered in the
+ * coded frame.
+ * @GST_VIDEO_AFD_16_9_LETTER_16_9_FULL: For 4:3 coded frame, letterbox 16:9 image, vertically centered in
+ * the coded frame with all image areas protected. For 16:9 coded frame, full frame 16:9 image,
+ * with all image areas protected.
+ * @GST_VIDEO_AFD_14_9_LETTER_14_9_PILLAR: For 4:3 coded frame, letterbox 14:9 image, vertically centered in
+ * the coded frame. For 16:9 coded frame, pillarbox 14:9 image, horizontally centered in the
+ * coded frame.
+ * @GST_VIDEO_AFD_4_3_FULL_14_9_CENTER: For 4:3 coded frame, full frame 4:3 image, with alternative 14:9
+ * center. For 16:9 coded frame, pillarbox 4:3 image, with alternative 14:9 center.
+ * @GST_VIDEO_AFD_16_9_LETTER_14_9_CENTER: For 4:3 coded frame, letterbox 16:9 image, with alternative 14:9
+ * center. For 16:9 coded frame, full frame 16:9 image, with alternative 14:9 center.
+ * @GST_VIDEO_AFD_16_9_LETTER_4_3_CENTER: For 4:3 coded frame, letterbox 16:9 image, with alternative 4:3
+ * center. For 16:9 coded frame, full frame 16:9 image, with alternative 4:3 center.
+ *
+ * Enumeration of the various values for Active Format Description (AFD)
+ *
+ * AFD should be included in video user data whenever the rectangular
+ * picture area containing useful information does not extend to the full height or width of the coded
+ * frame. AFD data may also be included in user data when the rectangular picture area containing
+ * useful information extends to the full height and width of the coded frame.
+ *
+ * For details, see Table 6.14 Active Format in:
+ *
+ * ATSC Digital Television Standard:
+ * Part 4 – MPEG-2 Video System Characteristics
+ *
+ * https://www.atsc.org/wp-content/uploads/2015/03/a_53-Part-4-2009.pdf
+ *
+ * and Active Format Description in Complete list of AFD codes
+ *
+ * https://en.wikipedia.org/wiki/Active_Format_Description#Complete_list_of_AFD_codes
+ *
+ * and SMPTE ST2016-1
+ *
+ * Notes:
+ *
+ * 1) AFD 0 is undefined for ATSC and SMPTE ST2016-1, indicating that AFD data is not available:
+ * If Bar Data is not present, AFD '0000' indicates that exact information
+ * is not available and the active image should be assumed to be the same as the coded frame. AFD '0000'.
+ * AFD '0000' accompanied by Bar Data signals that the active image’s aspect ratio is narrower than 16:9,
+ * but is not 4:3 or 14:9. As the exact aspect ratio cannot be conveyed by AFD alone, wherever possible,
+ * AFD ‘0000’ should be accompanied by Bar Data to define the exact vertical or horizontal extent
+ * of the active image.
+ * 2) AFD 0 is reserved for DVB/ETSI
+ * 3) values 1, 5, 6, 7, and 12 are reserved for both ATSC and DVB/ETSI
+ * 4) values 2 and 3 are not recommended for ATSC, but are valid for DVB/ETSI
+ *
+ * Since: 1.18
+ */
+typedef enum {
+ GST_VIDEO_AFD_UNAVAILABLE = 0,
+ GST_VIDEO_AFD_16_9_TOP_ALIGNED = 2,
+ GST_VIDEO_AFD_14_9_TOP_ALIGNED = 3,
+ GST_VIDEO_AFD_GREATER_THAN_16_9 = 4,
+ GST_VIDEO_AFD_4_3_FULL_16_9_FULL = 8,
+ GST_VIDEO_AFD_4_3_FULL_4_3_PILLAR = 9,
+ GST_VIDEO_AFD_16_9_LETTER_16_9_FULL = 10,
+ GST_VIDEO_AFD_14_9_LETTER_14_9_PILLAR = 11,
+ GST_VIDEO_AFD_4_3_FULL_14_9_CENTER = 13,
+ GST_VIDEO_AFD_16_9_LETTER_14_9_CENTER = 14,
+ GST_VIDEO_AFD_16_9_LETTER_4_3_CENTER = 15
+} GstVideoAFDValue;
+
+/**
+ * GstVideoAFDSpec:
+ * @GST_VIDEO_AFD_SPEC_DVB_ETSI: AFD value is from DVB/ETSI standard
+ * @GST_VIDEO_AFD_SPEC_ATSC_A53: AFD value is from ATSC A/53 standard
+ * @GST_VIDEO_AFD_SPEC_SMPT_ST2016_1 : AFD value is from SMPTE ST2016-1 standard
+ *
+ * Enumeration of the different standards that may apply to AFD data:
+ *
+ * 0) ETSI/DVB:
+ * https://www.etsi.org/deliver/etsi_ts/101100_101199/101154/02.01.01_60/ts_101154v020101p.pdf
+ *
+ * 1) ATSC A/53:
+ * https://www.atsc.org/wp-content/uploads/2015/03/a_53-Part-4-2009.pdf
+ *
+ * 2) SMPTE ST2016-1:
+ *
+ * Since: 1.18
+ */
+typedef enum {
+ GST_VIDEO_AFD_SPEC_DVB_ETSI,
+ GST_VIDEO_AFD_SPEC_ATSC_A53,
+ GST_VIDEO_AFD_SPEC_SMPTE_ST2016_1
+} GstVideoAFDSpec;
+
+/**
+ * GstVideoAFDMeta:
+ * @meta: parent #GstMeta
+ * @field: 0 for progressive or field 1 and 1 for field 2
+ * @spec: #GstVideoAFDSpec that applies to @afd
+ * @afd: #GstVideoAFDValue AFD value
+ *
+ * Active Format Description (AFD)
+ *
+ * For details, see Table 6.14 Active Format in:
+ *
+ * ATSC Digital Television Standard:
+ * Part 4 – MPEG-2 Video System Characteristics
+ *
+ * https://www.atsc.org/wp-content/uploads/2015/03/a_53-Part-4-2009.pdf
+ *
+ * and Active Format Description in Complete list of AFD codes
+ *
+ * https://en.wikipedia.org/wiki/Active_Format_Description#Complete_list_of_AFD_codes
+ *
+ * and SMPTE ST2016-1
+ *
+ * Since: 1.18
+ */
+typedef struct {
+ GstMeta meta;
+
+ guint8 field;
+ GstVideoAFDSpec spec;
+ GstVideoAFDValue afd;
+} GstVideoAFDMeta;
+
+GST_VIDEO_API GType gst_video_afd_meta_api_get_type (void);
+#define GST_VIDEO_AFD_META_API_TYPE (gst_video_afd_meta_api_get_type())
+
+GST_VIDEO_API const GstMetaInfo *gst_video_afd_meta_get_info (void);
+#define GST_VIDEO_AFD_META_INFO (gst_video_afd_meta_get_info())
+
+/**
+ * gst_buffer_get_video_afd_meta:
+ * @b: A #GstBuffer
+ *
+ * Gets the #GstVideoAFDMeta that might be present on @b.
+ *
+ * Note: there may be two #GstVideoAFDMeta structs for interlaced video.
+ *
+ * Since: 1.18
+ *
+ * Returns: The first #GstVideoAFDMeta present on @b, or %NULL if
+ * no #GstVideoAFDMeta are present
+ */
+#define gst_buffer_get_video_afd_meta(b) \
+ ((GstVideoAFDMeta*)gst_buffer_get_meta((b),GST_VIDEO_AFD_META_API_TYPE))
+
+GST_VIDEO_API
+GstVideoAFDMeta *gst_buffer_add_video_afd_meta (GstBuffer * buffer, guint8 field,
+ GstVideoAFDSpec spec,
+ GstVideoAFDValue afd);
+/**
+ * GstVideoBarMeta:
+ * @meta: parent #GstMeta
+ * @field: 0 for progressive or field 1 and 1 for field 2
+ * @is_letterbox: if true then bar data specifies letterbox, otherwise pillarbox
+ * @bar_data1: If @is_letterbox is true, then the value specifies the
+ * last line of a horizontal letterbox bar area at top of reconstructed frame.
+ * Otherwise, it specifies the last horizontal luminance sample of a vertical pillarbox
+ * bar area at the left side of the reconstructed frame
+ * @bar_data2: If @is_letterbox is true, then the value specifies the
+ * first line of a horizontal letterbox bar area at bottom of reconstructed frame.
+ * Otherwise, it specifies the first horizontal
+ * luminance sample of a vertical pillarbox bar area at the right side of the reconstructed frame.
+ *
+ * Bar data should be included in video user data
+ * whenever the rectangular picture area containing useful information
+ * does not extend to the full height or width of the coded frame
+ * and AFD alone is insufficient to describe the extent of the image.
+ *
+ * Note: either vertical or horizontal bars are specified, but not both.
+ *
+ * For more details, see:
+ *
+ * https://www.atsc.org/wp-content/uploads/2015/03/a_53-Part-4-2009.pdf
+ *
+ * and SMPTE ST2016-1
+ *
+ * Since: 1.18
+ */
+typedef struct {
+ GstMeta meta;
+
+ guint8 field;
+ gboolean is_letterbox;
+ guint bar_data1;
+ guint bar_data2;
+} GstVideoBarMeta;
+
+GST_VIDEO_API GType gst_video_bar_meta_api_get_type (void);
+#define GST_VIDEO_BAR_META_API_TYPE (gst_video_bar_meta_api_get_type())
+
+GST_VIDEO_API const GstMetaInfo *gst_video_bar_meta_get_info (void);
+#define GST_VIDEO_BAR_META_INFO (gst_video_bar_meta_get_info())
+/**
+ * gst_buffer_get_video_bar_meta:
+ * @b: A #GstBuffer
+ *
+ * Gets the #GstVideoBarMeta that might be present on @b.
+ *
+ * Since: 1.18
+ *
+ * Returns: The first #GstVideoBarMeta present on @b, or %NULL if
+ * no #GstVideoBarMeta are present
+ */
+#define gst_buffer_get_video_bar_meta(b) \
+ ((GstVideoBarMeta*)gst_buffer_get_meta((b),GST_VIDEO_BAR_META_API_TYPE))
+
+GST_VIDEO_API
+GstVideoBarMeta *gst_buffer_add_video_bar_meta (GstBuffer * buffer, guint8 field,
+ gboolean is_letterbox, guint bar_data1, guint bar_data2);
+
+/* Closed Caption support */
+/**
+ * GstVideoCaptionType:
+ * @GST_VIDEO_CAPTION_TYPE_UNKNOWN: Unknown type of CC
+ * @GST_VIDEO_CAPTION_TYPE_CEA608_RAW: CEA-608 as byte pairs. Note that
+ * this format is not recommended since is does not specify to
+ * which field the caption comes from and therefore assumes
+ * it comes from the first field (and that there is no information
+ * on the second field). Use @GST_VIDEO_CAPTION_TYPE_CEA708_RAW
+ * if you wish to store CEA-608 from two fields and prefix each byte pair
+ * with 0xFC for the first field and 0xFD for the second field.
+ * @GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A: CEA-608 as byte triplets as defined
+ * in SMPTE S334-1 Annex A. The second and third byte of the byte triplet
+ * is the raw CEA608 data, the first byte is a bitfield: The top/7th bit is
+ * 0 for the second field, 1 for the first field, bit 6 and 5 are 0 and
+ * bits 4 to 0 are a 5 bit unsigned integer that represents the line
+ * offset relative to the base-line of the original image format (line 9
+ * for 525-line field 1, line 272 for 525-line field 2, line 5 for
+ * 625-line field 1 and line 318 for 625-line field 2).
+ * @GST_VIDEO_CAPTION_TYPE_CEA708_RAW: CEA-708 as cc_data byte triplets. They
+ * can also contain 608-in-708 and the first byte of each triplet has to
+ * be inspected for detecting the type.
+ * @GST_VIDEO_CAPTION_TYPE_CEA708_CDP: CEA-708 (and optionally CEA-608) in
+ * a CDP (Caption Distribution Packet) defined by SMPTE S-334-2.
+ * Contains the whole CDP (starting with 0x9669).
+ *
+ * The various known types of Closed Caption (CC).
+ *
+ * Since: 1.16
+ */
+typedef enum {
+ GST_VIDEO_CAPTION_TYPE_UNKNOWN = 0,
+ GST_VIDEO_CAPTION_TYPE_CEA608_RAW = 1,
+ GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A = 2,
+ GST_VIDEO_CAPTION_TYPE_CEA708_RAW = 3,
+ GST_VIDEO_CAPTION_TYPE_CEA708_CDP = 4
+} GstVideoCaptionType;
+
+GST_VIDEO_API
+GstVideoCaptionType
+gst_video_caption_type_from_caps (const GstCaps *caps);
+
+GST_VIDEO_API
+GstCaps *
+gst_video_caption_type_to_caps (GstVideoCaptionType type);
+
+/**
+ * GstVideoCaptionMeta:
+ * @meta: parent #GstMeta
+ * @caption_type: The type of Closed Caption contained in the meta.
+ * @data: (array length=size): The Closed Caption data.
+ * @size: The size in bytes of @data
+ *
+ * Extra buffer metadata providing Closed Caption.
+ *
+ * Since: 1.16
+ */
+typedef struct {
+ GstMeta meta;
+
+ GstVideoCaptionType caption_type;
+ guint8 *data;
+ gsize size;
+} GstVideoCaptionMeta;
+
+GST_VIDEO_API
+GType gst_video_caption_meta_api_get_type (void);
+#define GST_VIDEO_CAPTION_META_API_TYPE (gst_video_caption_meta_api_get_type())
+
+GST_VIDEO_API
+const GstMetaInfo *gst_video_caption_meta_get_info (void);
+#define GST_VIDEO_CAPTION_META_INFO (gst_video_caption_meta_get_info())
+
+/**
+ * gst_buffer_get_video_caption_meta:
+ * @b: A #GstBuffer
+ *
+ * Gets the #GstVideoCaptionMeta that might be present on @b.
+ *
+ * Since: 1.16
+ *
+ * Returns: The first #GstVideoCaptionMeta present on @b, or %NULL if
+ * no #GstVideoCaptionMeta are present
+ */
+#define gst_buffer_get_video_caption_meta(b) \
+ ((GstVideoCaptionMeta*)gst_buffer_get_meta((b),GST_VIDEO_CAPTION_META_API_TYPE))
+
+GST_VIDEO_API
+GstVideoCaptionMeta *gst_buffer_add_video_caption_meta (GstBuffer * buffer,
+ GstVideoCaptionType caption_type,
+ const guint8 *data,
+ gsize size);
+
+/**
+ * GstVideoVBIParser:
+ *
+ * A parser for detecting and extracting @GstVideoAncillary data from
+ * Vertical Blanking Interval lines of component signals.
+ *
+ * Since: 1.16
+ */
+
+typedef struct _GstVideoVBIParser GstVideoVBIParser;
+
+GST_VIDEO_API
+GType gst_video_vbi_parser_get_type (void);
+
+/**
+ * GstVideoVBIParserResult:
+ * @GST_VIDEO_VBI_PARSER_RESULT_DONE: No line were provided, or no more Ancillary data was found.
+ * @GST_VIDEO_VBI_PARSER_RESULT_OK: A #GstVideoAncillary was found.
+ * @GST_VIDEO_VBI_PARSER_RESULT_ERROR: An error occurred
+ *
+ * Return values for #GstVideoVBIParser
+ *
+ * Since: 1.16
+ */
+typedef enum {
+ GST_VIDEO_VBI_PARSER_RESULT_DONE = 0,
+ GST_VIDEO_VBI_PARSER_RESULT_OK = 1,
+ GST_VIDEO_VBI_PARSER_RESULT_ERROR = 2
+} GstVideoVBIParserResult;
+
+GST_VIDEO_API
+GstVideoVBIParserResult gst_video_vbi_parser_get_ancillary(GstVideoVBIParser *parser,
+ GstVideoAncillary *anc);
+
+GST_VIDEO_API
+GstVideoVBIParser *gst_video_vbi_parser_new (GstVideoFormat format, guint32 pixel_width);
+
+GST_VIDEO_API
+GstVideoVBIParser *gst_video_vbi_parser_copy (const GstVideoVBIParser *parser);
+
+GST_VIDEO_API
+void gst_video_vbi_parser_free (GstVideoVBIParser *parser);
+
+GST_VIDEO_API
+void gst_video_vbi_parser_add_line (GstVideoVBIParser *parser, const guint8 *data);
+
+/**
+ * GstVideoVBIEncoder:
+ *
+ * An encoder for writing ancillary data to the
+ * Vertical Blanking Interval lines of component signals.
+ *
+ * Since: 1.16
+ */
+
+typedef struct _GstVideoVBIEncoder GstVideoVBIEncoder;
+
+GST_VIDEO_API
+GType gst_video_vbi_encoder_get_type (void);
+
+GST_VIDEO_API
+GstVideoVBIEncoder *gst_video_vbi_encoder_new (GstVideoFormat format, guint32 pixel_width);
+
+GST_VIDEO_API
+GstVideoVBIEncoder *gst_video_vbi_encoder_copy (const GstVideoVBIEncoder *encoder);
+
+GST_VIDEO_API
+void gst_video_vbi_encoder_free (GstVideoVBIEncoder *encoder);
+
+GST_VIDEO_API
+gboolean gst_video_vbi_encoder_add_ancillary (GstVideoVBIEncoder *encoder,
+ gboolean composite,
+ guint8 DID,
+ guint8 SDID_block_number,
+ const guint8 *data,
+ guint data_count);
+
+GST_VIDEO_API
+void gst_video_vbi_encoder_write_line (GstVideoVBIEncoder *encoder, guint8 *data);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_ANC_H__ */
diff --git a/include/gst/video/video-blend.h b/include/gst/video/video-blend.h
new file mode 100644
index 0000000000..1d6175b793
--- /dev/null
+++ b/include/gst/video/video-blend.h
@@ -0,0 +1,41 @@
+/* Gstreamer video blending utility functions
+ *
+ * Copyright (C) <2011> Intel Corporation
+ * Copyright (C) <2011> Collabora Ltd.
+ * Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_VIDEO_BLEND__
+#define __GST_VIDEO_BLEND__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+GST_VIDEO_API
+void gst_video_blend_scale_linear_RGBA (GstVideoInfo * src, GstBuffer * src_buffer,
+ gint dest_height, gint dest_width,
+ GstVideoInfo * dest, GstBuffer ** dest_buffer);
+
+GST_VIDEO_API
+gboolean gst_video_blend (GstVideoFrame * dest,
+ GstVideoFrame * src,
+ gint x, gint y,
+ gfloat global_alpha);
+
+#endif
diff --git a/include/gst/video/video-chroma.h b/include/gst/video/video-chroma.h
new file mode 100644
index 0000000000..16720b4b52
--- /dev/null
+++ b/include/gst/video/video-chroma.h
@@ -0,0 +1,118 @@
+/* GStreamer
+ * Copyright (C) <2013> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_CHROMA_H__
+#define __GST_VIDEO_CHROMA_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+
+G_BEGIN_DECLS
+
+/**
+ * GstVideoChromaSite:
+ * @GST_VIDEO_CHROMA_SITE_UNKNOWN: unknown cositing
+ * @GST_VIDEO_CHROMA_SITE_NONE: no cositing
+ * @GST_VIDEO_CHROMA_SITE_H_COSITED: chroma is horizontally cosited
+ * @GST_VIDEO_CHROMA_SITE_V_COSITED: chroma is vertically cosited
+ * @GST_VIDEO_CHROMA_SITE_ALT_LINE: choma samples are sited on alternate lines
+ * @GST_VIDEO_CHROMA_SITE_COSITED: chroma samples cosited with luma samples
+ * @GST_VIDEO_CHROMA_SITE_JPEG: jpeg style cositing, also for mpeg1 and mjpeg
+ * @GST_VIDEO_CHROMA_SITE_MPEG2: mpeg2 style cositing
+ * @GST_VIDEO_CHROMA_SITE_DV: DV style cositing
+ *
+ * Various Chroma sitings.
+ */
+typedef enum {
+ GST_VIDEO_CHROMA_SITE_UNKNOWN = 0,
+ GST_VIDEO_CHROMA_SITE_NONE = (1 << 0),
+ GST_VIDEO_CHROMA_SITE_H_COSITED = (1 << 1),
+ GST_VIDEO_CHROMA_SITE_V_COSITED = (1 << 2),
+ GST_VIDEO_CHROMA_SITE_ALT_LINE = (1 << 3),
+ /* some common chroma cositing */
+ GST_VIDEO_CHROMA_SITE_COSITED = (GST_VIDEO_CHROMA_SITE_H_COSITED | GST_VIDEO_CHROMA_SITE_V_COSITED),
+ GST_VIDEO_CHROMA_SITE_JPEG = (GST_VIDEO_CHROMA_SITE_NONE),
+ GST_VIDEO_CHROMA_SITE_MPEG2 = (GST_VIDEO_CHROMA_SITE_H_COSITED),
+ GST_VIDEO_CHROMA_SITE_DV = (GST_VIDEO_CHROMA_SITE_COSITED | GST_VIDEO_CHROMA_SITE_ALT_LINE),
+} GstVideoChromaSite;
+
+GST_VIDEO_DEPRECATED_FOR(gst_video_chroma_site_from_string)
+GstVideoChromaSite gst_video_chroma_from_string (const gchar * s);
+
+GST_VIDEO_DEPRECATED_FOR(gst_video_chroma_site_to_string)
+const gchar * gst_video_chroma_to_string (GstVideoChromaSite site);
+
+GST_VIDEO_API
+GstVideoChromaSite gst_video_chroma_site_from_string (const gchar * s);
+
+GST_VIDEO_API
+gchar * gst_video_chroma_site_to_string (GstVideoChromaSite site);
+
+/**
+ * GstVideoChromaMethod:
+ * @GST_VIDEO_CHROMA_METHOD_NEAREST: Duplicates the chroma samples when
+ * upsampling and drops when subsampling
+ * @GST_VIDEO_CHROMA_METHOD_LINEAR: Uses linear interpolation to reconstruct
+ * missing chroma and averaging to subsample
+ *
+ * Different subsampling and upsampling methods
+ */
+typedef enum {
+ GST_VIDEO_CHROMA_METHOD_NEAREST,
+ GST_VIDEO_CHROMA_METHOD_LINEAR
+} GstVideoChromaMethod;
+
+/**
+ * GstVideoChromaFlags:
+ * @GST_VIDEO_CHROMA_FLAG_NONE: no flags
+ * @GST_VIDEO_CHROMA_FLAG_INTERLACED: the input is interlaced
+ *
+ * Extra flags that influence the result from gst_video_chroma_resample_new().
+ */
+typedef enum {
+ GST_VIDEO_CHROMA_FLAG_NONE = 0,
+ GST_VIDEO_CHROMA_FLAG_INTERLACED = (1 << 0),
+} GstVideoChromaFlags;
+
+typedef struct _GstVideoChromaResample GstVideoChromaResample;
+
+/* circular dependency, need to include this after defining the enums */
+#include <gst/video/video-format.h>
+
+GST_VIDEO_API
+GstVideoChromaResample * gst_video_chroma_resample_new (GstVideoChromaMethod method,
+ GstVideoChromaSite site,
+ GstVideoChromaFlags flags,
+ GstVideoFormat format,
+ gint h_factor, gint v_factor);
+
+GST_VIDEO_API
+void gst_video_chroma_resample_free (GstVideoChromaResample *resample);
+
+GST_VIDEO_API
+void gst_video_chroma_resample_get_info (GstVideoChromaResample *resample,
+ guint * n_lines, gint *offset);
+
+GST_VIDEO_API
+void gst_video_chroma_resample (GstVideoChromaResample *resample,
+ gpointer lines[], gint width);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_CHROMA_H__ */
diff --git a/include/gst/video/video-color.h b/include/gst/video/video-color.h
new file mode 100644
index 0000000000..c6ff3c8d85
--- /dev/null
+++ b/include/gst/video/video-color.h
@@ -0,0 +1,304 @@
+/* GStreamer
+ * Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_COLOR_H__
+#define __GST_VIDEO_COLOR_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video-format.h>
+
+G_BEGIN_DECLS
+
+/**
+ * GstVideoColorRange:
+ * @GST_VIDEO_COLOR_RANGE_UNKNOWN: unknown range
+ * @GST_VIDEO_COLOR_RANGE_0_255: [0..255] for 8 bit components
+ * @GST_VIDEO_COLOR_RANGE_16_235: [16..235] for 8 bit components. Chroma has
+ * [16..240] range.
+ *
+ * Possible color range values. These constants are defined for 8 bit color
+ * values and can be scaled for other bit depths.
+ */
+typedef enum {
+ GST_VIDEO_COLOR_RANGE_UNKNOWN = 0,
+ GST_VIDEO_COLOR_RANGE_0_255,
+ GST_VIDEO_COLOR_RANGE_16_235
+} GstVideoColorRange;
+
+/**
+ * GstVideoColorMatrix:
+ * @GST_VIDEO_COLOR_MATRIX_UNKNOWN: unknown matrix
+ * @GST_VIDEO_COLOR_MATRIX_RGB: identity matrix. Order of coefficients is
+ * actually GBR, also IEC 61966-2-1 (sRGB)
+ * @GST_VIDEO_COLOR_MATRIX_FCC: FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
+ * @GST_VIDEO_COLOR_MATRIX_BT709: ITU-R BT.709 color matrix, also ITU-R BT1361
+ * / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
+ * @GST_VIDEO_COLOR_MATRIX_BT601: ITU-R BT.601 color matrix, also SMPTE170M / ITU-R BT1358 525 / ITU-R BT1700 NTSC
+ * @GST_VIDEO_COLOR_MATRIX_SMPTE240M: SMPTE 240M color matrix
+ * @GST_VIDEO_COLOR_MATRIX_BT2020: ITU-R BT.2020 color matrix. Since: 1.6
+ *
+ * The color matrix is used to convert between Y'PbPr and
+ * non-linear RGB (R'G'B')
+ */
+typedef enum {
+ GST_VIDEO_COLOR_MATRIX_UNKNOWN = 0,
+ GST_VIDEO_COLOR_MATRIX_RGB,
+ GST_VIDEO_COLOR_MATRIX_FCC,
+ GST_VIDEO_COLOR_MATRIX_BT709,
+ GST_VIDEO_COLOR_MATRIX_BT601,
+ GST_VIDEO_COLOR_MATRIX_SMPTE240M,
+ GST_VIDEO_COLOR_MATRIX_BT2020
+} GstVideoColorMatrix;
+
+GST_VIDEO_API
+gboolean gst_video_color_matrix_get_Kr_Kb (GstVideoColorMatrix matrix, gdouble * Kr, gdouble * Kb);
+
+/**
+ * GstVideoTransferFunction:
+ * @GST_VIDEO_TRANSFER_UNKNOWN: unknown transfer function
+ * @GST_VIDEO_TRANSFER_GAMMA10: linear RGB, gamma 1.0 curve
+ * @GST_VIDEO_TRANSFER_GAMMA18: Gamma 1.8 curve
+ * @GST_VIDEO_TRANSFER_GAMMA20: Gamma 2.0 curve
+ * @GST_VIDEO_TRANSFER_GAMMA22: Gamma 2.2 curve
+ * @GST_VIDEO_TRANSFER_BT709: Gamma 2.2 curve with a linear segment in the lower
+ * range, also ITU-R BT470M / ITU-R BT1700 625 PAL &
+ * SECAM / ITU-R BT1361
+ * @GST_VIDEO_TRANSFER_SMPTE240M: Gamma 2.2 curve with a linear segment in the
+ * lower range
+ * @GST_VIDEO_TRANSFER_SRGB: Gamma 2.4 curve with a linear segment in the lower
+ * range. IEC 61966-2-1 (sRGB or sYCC)
+ * @GST_VIDEO_TRANSFER_GAMMA28: Gamma 2.8 curve, also ITU-R BT470BG
+ * @GST_VIDEO_TRANSFER_LOG100: Logarithmic transfer characteristic
+ * 100:1 range
+ * @GST_VIDEO_TRANSFER_LOG316: Logarithmic transfer characteristic
+ * 316.22777:1 range (100 * sqrt(10) : 1)
+ * @GST_VIDEO_TRANSFER_BT2020_12: Gamma 2.2 curve with a linear segment in the lower
+ * range. Used for BT.2020 with 12 bits per
+ * component. Since: 1.6
+ * @GST_VIDEO_TRANSFER_ADOBERGB: Gamma 2.19921875. Since: 1.8
+ * @GST_VIDEO_TRANSFER_BT2020_10: Rec. ITU-R BT.2020-2 with 10 bits per component.
+ * (functionally the same as the values
+ * GST_VIDEO_TRANSFER_BT709 and GST_VIDEO_TRANSFER_BT601).
+ * Since: 1.18
+ * @GST_VIDEO_TRANSFER_SMPTE2084: SMPTE ST 2084 for 10, 12, 14, and 16-bit systems.
+ * Known as perceptual quantization (PQ)
+ * Since: 1.18
+ * @GST_VIDEO_TRANSFER_ARIB_STD_B67: Association of Radio Industries and Businesses (ARIB)
+ * STD-B67 and Rec. ITU-R BT.2100-1 hybrid loggamma (HLG) system
+ * Since: 1.18
+ * @GST_VIDEO_TRANSFER_BT601: also known as SMPTE170M / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
+ * Functionally the same as the values
+ * GST_VIDEO_TRANSFER_BT709, and GST_VIDEO_TRANSFER_BT2020_10.
+ * Since: 1.18
+ *
+ * The video transfer function defines the formula for converting between
+ * non-linear RGB (R'G'B') and linear RGB
+ */
+typedef enum {
+ GST_VIDEO_TRANSFER_UNKNOWN = 0,
+ GST_VIDEO_TRANSFER_GAMMA10,
+ GST_VIDEO_TRANSFER_GAMMA18,
+ GST_VIDEO_TRANSFER_GAMMA20,
+ GST_VIDEO_TRANSFER_GAMMA22,
+ GST_VIDEO_TRANSFER_BT709,
+ GST_VIDEO_TRANSFER_SMPTE240M,
+ GST_VIDEO_TRANSFER_SRGB,
+ GST_VIDEO_TRANSFER_GAMMA28,
+ GST_VIDEO_TRANSFER_LOG100,
+ GST_VIDEO_TRANSFER_LOG316,
+ GST_VIDEO_TRANSFER_BT2020_12,
+ GST_VIDEO_TRANSFER_ADOBERGB,
+ GST_VIDEO_TRANSFER_BT2020_10,
+ GST_VIDEO_TRANSFER_SMPTE2084,
+ GST_VIDEO_TRANSFER_ARIB_STD_B67,
+ /**
+ * GST_VIDEO_TRANSFER_BT601:
+ *
+ * also known as SMPTE170M / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
+ *
+ * Since: 1.18
+ */
+ GST_VIDEO_TRANSFER_BT601
+} GstVideoTransferFunction;
+
+GST_VIDEO_DEPRECATED_FOR(gst_video_transfer_function_encode)
+gdouble gst_video_color_transfer_encode (GstVideoTransferFunction func, gdouble val);
+GST_VIDEO_API
+gdouble gst_video_transfer_function_encode (GstVideoTransferFunction func, gdouble val);
+
+GST_VIDEO_DEPRECATED_FOR(gst_video_transfer_function_decode)
+gdouble gst_video_color_transfer_decode (GstVideoTransferFunction func, gdouble val);
+GST_VIDEO_API
+gdouble gst_video_transfer_function_decode (GstVideoTransferFunction func, gdouble val);
+
+/**
+ * GstVideoColorPrimaries:
+ * @GST_VIDEO_COLOR_PRIMARIES_UNKNOWN: unknown color primaries
+ * @GST_VIDEO_COLOR_PRIMARIES_BT709: BT709 primaries, also ITU-R BT1361 / IEC
+ * 61966-2-4 / SMPTE RP177 Annex B
+ * @GST_VIDEO_COLOR_PRIMARIES_BT470M: BT470M primaries, also FCC Title 47 Code
+ * of Federal Regulations 73.682 (a)(20)
+ * @GST_VIDEO_COLOR_PRIMARIES_BT470BG: BT470BG primaries, also ITU-R BT601-6
+ * 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
+ * @GST_VIDEO_COLOR_PRIMARIES_SMPTE170M: SMPTE170M primaries, also ITU-R
+ * BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
+ * @GST_VIDEO_COLOR_PRIMARIES_SMPTE240M: SMPTE240M primaries
+ * @GST_VIDEO_COLOR_PRIMARIES_FILM: Generic film (colour filters using
+ * Illuminant C)
+ * @GST_VIDEO_COLOR_PRIMARIES_BT2020: ITU-R BT2020 primaries. Since: 1.6
+ * @GST_VIDEO_COLOR_PRIMARIES_ADOBERGB: Adobe RGB primaries. Since: 1.8
+ * @GST_VIDEO_COLOR_PRIMARIES_SMPTEST428: SMPTE ST 428 primaries (CIE 1931
+ * XYZ). Since: 1.16
+ * @GST_VIDEO_COLOR_PRIMARIES_SMPTERP431: SMPTE RP 431 primaries (ST 431-2
+ * (2011) / DCI P3). Since: 1.16
+ * @GST_VIDEO_COLOR_PRIMARIES_SMPTEEG432: SMPTE EG 432 primaries (ST 432-1
+ * (2010) / P3 D65). Since: 1.16
+ * @GST_VIDEO_COLOR_PRIMARIES_EBU3213: EBU 3213 primaries (JEDEC P22
+ * phosphors). Since: 1.16
+ *
+ * The color primaries define the how to transform linear RGB values to and from
+ * the CIE XYZ colorspace.
+ */
+typedef enum {
+ GST_VIDEO_COLOR_PRIMARIES_UNKNOWN = 0,
+ GST_VIDEO_COLOR_PRIMARIES_BT709,
+ GST_VIDEO_COLOR_PRIMARIES_BT470M,
+ GST_VIDEO_COLOR_PRIMARIES_BT470BG,
+ GST_VIDEO_COLOR_PRIMARIES_SMPTE170M,
+ GST_VIDEO_COLOR_PRIMARIES_SMPTE240M,
+ GST_VIDEO_COLOR_PRIMARIES_FILM,
+ GST_VIDEO_COLOR_PRIMARIES_BT2020,
+ GST_VIDEO_COLOR_PRIMARIES_ADOBERGB,
+ GST_VIDEO_COLOR_PRIMARIES_SMPTEST428,
+ GST_VIDEO_COLOR_PRIMARIES_SMPTERP431,
+ GST_VIDEO_COLOR_PRIMARIES_SMPTEEG432,
+ GST_VIDEO_COLOR_PRIMARIES_EBU3213,
+} GstVideoColorPrimaries;
+
+/**
+ * GstVideoColorPrimariesInfo:
+ * @primaries: a #GstVideoColorPrimaries
+ * @Wx: reference white x coordinate
+ * @Wy: reference white y coordinate
+ * @Rx: red x coordinate
+ * @Ry: red y coordinate
+ * @Gx: green x coordinate
+ * @Gy: green y coordinate
+ * @Bx: blue x coordinate
+ * @By: blue y coordinate
+ *
+ * Structure describing the chromaticity coordinates of an RGB system. These
+ * values can be used to construct a matrix to transform RGB to and from the
+ * XYZ colorspace.
+ *
+ * Since: 1.6
+ */
+typedef struct {
+ GstVideoColorPrimaries primaries;
+ gdouble Wx, Wy;
+ gdouble Rx, Ry;
+ gdouble Gx, Gy;
+ gdouble Bx, By;
+} GstVideoColorPrimariesInfo;
+
+GST_VIDEO_API
+const GstVideoColorPrimariesInfo *
+ gst_video_color_primaries_get_info (GstVideoColorPrimaries primaries);
+
+/**
+ * GstVideoColorimetry:
+ * @range: the color range. This is the valid range for the samples.
+ * It is used to convert the samples to Y'PbPr values.
+ * @matrix: the color matrix. Used to convert between Y'PbPr and
+ * non-linear RGB (R'G'B')
+ * @transfer: the transfer function. used to convert between R'G'B' and RGB
+ * @primaries: color primaries. used to convert between R'G'B' and CIE XYZ
+ *
+ * Structure describing the color info.
+ */
+typedef struct {
+ GstVideoColorRange range;
+ GstVideoColorMatrix matrix;
+ GstVideoTransferFunction transfer;
+ GstVideoColorPrimaries primaries;
+} GstVideoColorimetry;
+
+/* predefined colorimetry */
+#define GST_VIDEO_COLORIMETRY_BT601 "bt601"
+#define GST_VIDEO_COLORIMETRY_BT709 "bt709"
+#define GST_VIDEO_COLORIMETRY_SMPTE240M "smpte240m"
+#define GST_VIDEO_COLORIMETRY_SRGB "sRGB"
+#define GST_VIDEO_COLORIMETRY_BT2020 "bt2020"
+#define GST_VIDEO_COLORIMETRY_BT2020_10 "bt2020-10"
+#define GST_VIDEO_COLORIMETRY_BT2100_PQ "bt2100-pq"
+#define GST_VIDEO_COLORIMETRY_BT2100_HLG "bt2100-hlg"
+
+GST_VIDEO_API
+gboolean gst_video_colorimetry_matches (const GstVideoColorimetry *cinfo, const gchar *color);
+
+GST_VIDEO_API
+gboolean gst_video_colorimetry_from_string (GstVideoColorimetry *cinfo, const gchar *color);
+
+GST_VIDEO_API
+gchar * gst_video_colorimetry_to_string (const GstVideoColorimetry *cinfo);
+
+GST_VIDEO_API
+gboolean gst_video_colorimetry_is_equal (const GstVideoColorimetry *cinfo, const GstVideoColorimetry *other);
+
+/* compute offset and scale */
+
+GST_VIDEO_API
+void gst_video_color_range_offsets (GstVideoColorRange range,
+ const GstVideoFormatInfo *info,
+ gint offset[GST_VIDEO_MAX_COMPONENTS],
+ gint scale[GST_VIDEO_MAX_COMPONENTS]);
+
+/* conversion between GStreamer color{matrix,transfer,primaries} enum and
+ * values defined by ISO/IEC 23001-8 and ITU-T H.273 specification.
+ * Also H264 and H265 specifications follow the color{matrix,transfer,primaries}
+ * values */
+
+GST_VIDEO_API
+guint gst_video_color_matrix_to_iso (GstVideoColorMatrix matrix);
+
+GST_VIDEO_API
+guint gst_video_transfer_function_to_iso (GstVideoTransferFunction func);
+
+GST_VIDEO_API
+guint gst_video_color_primaries_to_iso (GstVideoColorPrimaries primaries);
+
+GST_VIDEO_API
+GstVideoColorMatrix gst_video_color_matrix_from_iso (guint value);
+
+GST_VIDEO_API
+GstVideoTransferFunction gst_video_transfer_function_from_iso (guint value);
+
+GST_VIDEO_API
+GstVideoColorPrimaries gst_video_color_primaries_from_iso (guint value);
+
+GST_VIDEO_API
+gboolean gst_video_transfer_function_is_equivalent (GstVideoTransferFunction from_func,
+ guint from_bpp,
+ GstVideoTransferFunction to_func,
+ guint to_bpp);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_COLOR_H__ */
diff --git a/include/gst/video/video-converter.h b/include/gst/video/video-converter.h
new file mode 100644
index 0000000000..7175f42b12
--- /dev/null
+++ b/include/gst/video/video-converter.h
@@ -0,0 +1,318 @@
+/* Video conversion api function
+ * Copyright (C) 2014 Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_CONVERTER_H__
+#define __GST_VIDEO_CONVERTER_H__
+
+#include <gst/video/video.h>
+
+G_BEGIN_DECLS
+
+/**
+ * GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD:
+ *
+ * #GstVideoResamplerMethod, The resampler method to use for
+ * resampling. Other options for the resampler can be used, see
+ * the #GstVideoResampler. Default is #GST_VIDEO_RESAMPLER_METHOD_CUBIC
+ */
+#define GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD "GstVideoConverter.resampler-method"
+/**
+ * GST_VIDEO_CONVERTER_OPT_CHROMA_RESAMPLER_METHOD:
+ *
+ * #GstVideoChromaMethod, The resampler method to use for
+ * chroma resampling. Other options for the resampler can be used, see
+ * the #GstVideoResampler. Default is #GST_VIDEO_RESAMPLER_METHOD_LINEAR
+ */
+#define GST_VIDEO_CONVERTER_OPT_CHROMA_RESAMPLER_METHOD "GstVideoConverter.chroma-resampler-method"
+/**
+ * GST_VIDEO_CONVERTER_OPT_RESAMPLER_TAPS:
+ *
+ * #G_TYPE_UINT, The number of taps for the resampler.
+ * Default is 0: let the resampler choose a good value.
+ */
+#define GST_VIDEO_CONVERTER_OPT_RESAMPLER_TAPS "GstVideoConverter.resampler-taps"
+
+/**
+ * GST_VIDEO_CONVERTER_OPT_DITHER_METHOD:
+ *
+ * #GstVideoDitherMethod, The dither method to use when
+ * changing bit depth.
+ * Default is #GST_VIDEO_DITHER_BAYER.
+ */
+#define GST_VIDEO_CONVERTER_OPT_DITHER_METHOD "GstVideoConverter.dither-method"
+
+/**
+ * GST_VIDEO_CONVERTER_OPT_DITHER_QUANTIZATION:
+ *
+ * #G_TYPE_UINT, The quantization amount to dither to. Components will be
+ * quantized to multiples of this value.
+ * Default is 1
+ */
+#define GST_VIDEO_CONVERTER_OPT_DITHER_QUANTIZATION "GstVideoConverter.dither-quantization"
+
+/**
+ * GST_VIDEO_CONVERTER_OPT_SRC_X:
+ *
+ * #G_TYPE_INT, source x position to start conversion, default 0
+ */
+#define GST_VIDEO_CONVERTER_OPT_SRC_X "GstVideoConverter.src-x"
+/**
+ * GST_VIDEO_CONVERTER_OPT_SRC_Y:
+ *
+ * #G_TYPE_INT, source y position to start conversion, default 0
+ */
+#define GST_VIDEO_CONVERTER_OPT_SRC_Y "GstVideoConverter.src-y"
+/**
+ * GST_VIDEO_CONVERTER_OPT_SRC_WIDTH:
+ *
+ * #G_TYPE_INT, source width to convert, default source width
+ */
+#define GST_VIDEO_CONVERTER_OPT_SRC_WIDTH "GstVideoConverter.src-width"
+/**
+ * GST_VIDEO_CONVERTER_OPT_SRC_HEIGHT:
+ *
+ * #G_TYPE_INT, source height to convert, default source height
+ */
+#define GST_VIDEO_CONVERTER_OPT_SRC_HEIGHT "GstVideoConverter.src-height"
+
+/**
+ * GST_VIDEO_CONVERTER_OPT_DEST_X:
+ *
+ * #G_TYPE_INT, x position in the destination frame, default 0
+ */
+#define GST_VIDEO_CONVERTER_OPT_DEST_X "GstVideoConverter.dest-x"
+/**
+ * GST_VIDEO_CONVERTER_OPT_DEST_Y:
+ *
+ * #G_TYPE_INT, y position in the destination frame, default 0
+ */
+#define GST_VIDEO_CONVERTER_OPT_DEST_Y "GstVideoConverter.dest-y"
+/**
+ * GST_VIDEO_CONVERTER_OPT_DEST_WIDTH:
+ *
+ * #G_TYPE_INT, width in the destination frame, default destination width
+ */
+#define GST_VIDEO_CONVERTER_OPT_DEST_WIDTH "GstVideoConverter.dest-width"
+/**
+ * GST_VIDEO_CONVERTER_OPT_DEST_HEIGHT:
+ *
+ * #G_TYPE_INT, height in the destination frame, default destination height
+ */
+#define GST_VIDEO_CONVERTER_OPT_DEST_HEIGHT "GstVideoConverter.dest-height"
+
+/**
+ * GST_VIDEO_CONVERTER_OPT_FILL_BORDER:
+ *
+ * #G_TYPE_BOOLEAN, if the destination rectangle does not fill the complete
+ * destination image, render a border with
+ * #GST_VIDEO_CONVERTER_OPT_BORDER_ARGB. Otherwise the unusded pixels in the
+ * destination are untouched. Default %TRUE.
+ */
+#define GST_VIDEO_CONVERTER_OPT_FILL_BORDER "GstVideoConverter.fill-border"
+/**
+ * GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE:
+ *
+ * #G_TYPE_DOUBLE, the alpha color value to use.
+ * Default to 1.0
+ */
+#define GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE "GstVideoConverter.alpha-value"
+/**
+ * GstVideoAlphaMode:
+ * @GST_VIDEO_ALPHA_MODE_COPY: When input and output have alpha, it will be copied.
+ * When the input has no alpha, alpha will be set to
+ * #GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE
+ * @GST_VIDEO_ALPHA_MODE_SET: set all alpha to
+ * #GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE
+ * @GST_VIDEO_ALPHA_MODE_MULT: multiply all alpha with
+ * #GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE.
+ * When the input format has no alpha but the output format has, the
+ * alpha value will be set to #GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE
+ *
+ * Different alpha modes.
+ *
+ * Since: 1.6
+ */
+typedef enum {
+ GST_VIDEO_ALPHA_MODE_COPY,
+ GST_VIDEO_ALPHA_MODE_SET,
+ GST_VIDEO_ALPHA_MODE_MULT
+} GstVideoAlphaMode;
+/**
+ * GST_VIDEO_CONVERTER_OPT_ALPHA_MODE:
+ *
+ * #GstVideoAlphaMode, the alpha mode to use.
+ * Default is #GST_VIDEO_ALPHA_MODE_COPY.
+ */
+#define GST_VIDEO_CONVERTER_OPT_ALPHA_MODE "GstVideoConverter.alpha-mode"
+/**
+ * GST_VIDEO_CONVERTER_OPT_BORDER_ARGB:
+ *
+ * #G_TYPE_UINT, the border color to use if #GST_VIDEO_CONVERTER_OPT_FILL_BORDER
+ * is set to %TRUE. The color is in ARGB format.
+ * Default 0xff000000
+ */
+#define GST_VIDEO_CONVERTER_OPT_BORDER_ARGB "GstVideoConverter.border-argb"
+
+/**
+ * GstVideoChromaMode:
+ * @GST_VIDEO_CHROMA_MODE_FULL: do full chroma up and down sampling
+ * @GST_VIDEO_CHROMA_MODE_UPSAMPLE_ONLY: only perform chroma upsampling
+ * @GST_VIDEO_CHROMA_MODE_DOWNSAMPLE_ONLY: only perform chroma downsampling
+ * @GST_VIDEO_CHROMA_MODE_NONE: disable chroma resampling
+ *
+ * Different chroma downsampling and upsampling modes
+ *
+ * Since: 1.6
+ */
+typedef enum {
+ GST_VIDEO_CHROMA_MODE_FULL,
+ GST_VIDEO_CHROMA_MODE_UPSAMPLE_ONLY,
+ GST_VIDEO_CHROMA_MODE_DOWNSAMPLE_ONLY,
+ GST_VIDEO_CHROMA_MODE_NONE
+} GstVideoChromaMode;
+
+/**
+ * GST_VIDEO_CONVERTER_OPT_CHROMA_MODE:
+ *
+ * #GstVideoChromaMode, set the chroma resample mode subsampled
+ * formats. Default is #GST_VIDEO_CHROMA_MODE_FULL.
+ */
+#define GST_VIDEO_CONVERTER_OPT_CHROMA_MODE "GstVideoConverter.chroma-mode"
+
+/**
+ *GstVideoMatrixMode:
+ * @GST_VIDEO_MATRIX_MODE_FULL: do conversion between color matrices
+ * @GST_VIDEO_MATRIX_MODE_INPUT_ONLY: use the input color matrix to convert
+ * to and from R'G'B
+ * @GST_VIDEO_MATRIX_MODE_OUTPUT_ONLY: use the output color matrix to convert
+ * to and from R'G'B
+ * @GST_VIDEO_MATRIX_MODE_NONE: disable color matrix conversion.
+ *
+ * Different color matrix conversion modes
+ *
+ * Since: 1.6
+ */
+typedef enum {
+ GST_VIDEO_MATRIX_MODE_FULL,
+ GST_VIDEO_MATRIX_MODE_INPUT_ONLY,
+ GST_VIDEO_MATRIX_MODE_OUTPUT_ONLY,
+ GST_VIDEO_MATRIX_MODE_NONE
+} GstVideoMatrixMode;
+/**
+ * GST_VIDEO_CONVERTER_OPT_MATRIX_MODE:
+ *
+ * #GstVideoMatrixMode, set the color matrix conversion mode for
+ * converting between Y'PbPr and non-linear RGB (R'G'B').
+ * Default is #GST_VIDEO_MATRIX_MODE_FULL.
+ */
+#define GST_VIDEO_CONVERTER_OPT_MATRIX_MODE "GstVideoConverter.matrix-mode"
+/**
+ * GstVideoGammaMode:
+ * @GST_VIDEO_GAMMA_MODE_NONE: disable gamma handling
+ * @GST_VIDEO_GAMMA_MODE_REMAP: convert between input and output gamma
+ * Different gamma conversion modes
+ *
+ * Since: 1.6
+ */
+typedef enum {
+ GST_VIDEO_GAMMA_MODE_NONE,
+ GST_VIDEO_GAMMA_MODE_REMAP
+} GstVideoGammaMode;
+/**
+ * GST_VIDEO_CONVERTER_OPT_GAMMA_MODE:
+ *
+ * #GstVideoGammaMode, set the gamma mode.
+ * Default is #GST_VIDEO_GAMMA_MODE_NONE.
+ */
+#define GST_VIDEO_CONVERTER_OPT_GAMMA_MODE "GstVideoConverter.gamma-mode"
+/**
+ * GstVideoPrimariesMode:
+ * @GST_VIDEO_PRIMARIES_MODE_NONE: disable conversion between primaries
+ * @GST_VIDEO_PRIMARIES_MODE_MERGE_ONLY: do conversion between primaries only
+ * when it can be merged with color matrix conversion.
+ * @GST_VIDEO_PRIMARIES_MODE_FAST: fast conversion between primaries
+ *
+ * Different primaries conversion modes
+ *
+ * Since: 1.6
+ */
+typedef enum {
+ GST_VIDEO_PRIMARIES_MODE_NONE,
+ GST_VIDEO_PRIMARIES_MODE_MERGE_ONLY,
+ GST_VIDEO_PRIMARIES_MODE_FAST
+} GstVideoPrimariesMode;
+/**
+ * GST_VIDEO_CONVERTER_OPT_PRIMARIES_MODE:
+ *
+ * #GstVideoPrimariesMode, set the primaries conversion mode.
+ * Default is #GST_VIDEO_PRIMARIES_MODE_NONE.
+ */
+#define GST_VIDEO_CONVERTER_OPT_PRIMARIES_MODE "GstVideoConverter.primaries-mode"
+
+/**
+ * GST_VIDEO_CONVERTER_OPT_THREADS:
+ *
+ * #G_TYPE_UINT, maximum number of threads to use. Default 1, 0 for the number
+ * of cores.
+ */
+#define GST_VIDEO_CONVERTER_OPT_THREADS "GstVideoConverter.threads"
+
+/**
+ * GST_VIDEO_CONVERTER_OPT_ASYNC_TASKS:
+ *
+ * #G_TYPE_BOOLEAN, whether gst_video_converter_frame() will return immediately
+ * without waiting for the conversion to complete. A subsequent
+ * gst_video_converter_frame_finish() must be performed to ensure completion of the
+ * conversion before subsequent use. Default %FALSE
+ *
+ * Since: 1.20
+ */
+#define GST_VIDEO_CONVERTER_OPT_ASYNC_TASKS "GstVideoConverter.async-tasks"
+
+typedef struct _GstVideoConverter GstVideoConverter;
+
+GST_VIDEO_API
+GstVideoConverter * gst_video_converter_new (const GstVideoInfo *in_info,
+ const GstVideoInfo *out_info,
+ GstStructure *config);
+
+GST_VIDEO_API
+GstVideoConverter * gst_video_converter_new_with_pool (const GstVideoInfo * in_info,
+ const GstVideoInfo * out_info,
+ GstStructure * config,
+ GstTaskPool * pool);
+
+GST_VIDEO_API
+void gst_video_converter_free (GstVideoConverter * convert);
+
+GST_VIDEO_API
+gboolean gst_video_converter_set_config (GstVideoConverter * convert, GstStructure *config);
+
+GST_VIDEO_API
+const GstStructure * gst_video_converter_get_config (GstVideoConverter * convert);
+
+GST_VIDEO_API
+void gst_video_converter_frame (GstVideoConverter * convert,
+ const GstVideoFrame *src, GstVideoFrame *dest);
+GST_VIDEO_API
+void gst_video_converter_frame_finish (GstVideoConverter * convert);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_CONVERTER_H__ */
diff --git a/include/gst/video/video-dither.h b/include/gst/video/video-dither.h
new file mode 100644
index 0000000000..61a696ed32
--- /dev/null
+++ b/include/gst/video/video-dither.h
@@ -0,0 +1,81 @@
+/* GStreamer
+ * Copyright (C) <2014> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_DITHER_H__
+#define __GST_VIDEO_DITHER_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+
+G_BEGIN_DECLS
+
+/**
+ * GstVideoDitherMethod:
+ * @GST_VIDEO_DITHER_NONE: no dithering
+ * @GST_VIDEO_DITHER_VERTERR: propagate rounding errors downwards
+ * @GST_VIDEO_DITHER_FLOYD_STEINBERG: Dither with floyd-steinberg error diffusion
+ * @GST_VIDEO_DITHER_SIERRA_LITE: Dither with Sierra Lite error diffusion
+ * @GST_VIDEO_DITHER_BAYER: ordered dither using a bayer pattern
+ *
+ * Different dithering methods to use.
+ */
+typedef enum {
+ GST_VIDEO_DITHER_NONE,
+ GST_VIDEO_DITHER_VERTERR,
+ GST_VIDEO_DITHER_FLOYD_STEINBERG,
+ GST_VIDEO_DITHER_SIERRA_LITE,
+ GST_VIDEO_DITHER_BAYER,
+} GstVideoDitherMethod;
+
+/**
+ * GstVideoDitherFlags:
+ * @GST_VIDEO_DITHER_FLAG_NONE: no flags
+ * @GST_VIDEO_DITHER_FLAG_INTERLACED: the input is interlaced
+ * @GST_VIDEO_DITHER_FLAG_QUANTIZE: quantize values in addition to adding dither.
+ *
+ * Extra flags that influence the result from gst_video_chroma_resample_new().
+ */
+typedef enum {
+ GST_VIDEO_DITHER_FLAG_NONE = 0,
+ GST_VIDEO_DITHER_FLAG_INTERLACED = (1 << 0),
+ GST_VIDEO_DITHER_FLAG_QUANTIZE = (1 << 1),
+} GstVideoDitherFlags;
+
+typedef struct _GstVideoDither GstVideoDither;
+
+/* circular dependency, need to include this after defining the enums */
+#include <gst/video/video-format.h>
+
+GST_VIDEO_API
+GstVideoDither * gst_video_dither_new (GstVideoDitherMethod method,
+ GstVideoDitherFlags flags,
+ GstVideoFormat format,
+ guint quantizer[GST_VIDEO_MAX_COMPONENTS],
+ guint width);
+
+GST_VIDEO_API
+void gst_video_dither_free (GstVideoDither *dither);
+
+GST_VIDEO_API
+void gst_video_dither_line (GstVideoDither *dither,
+ gpointer line, guint x, guint y, guint width);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_DITHER_H__ */
diff --git a/include/gst/video/video-enumtypes.h b/include/gst/video/video-enumtypes.h
new file mode 100644
index 0000000000..2341679f4c
--- /dev/null
+++ b/include/gst/video/video-enumtypes.h
@@ -0,0 +1,248 @@
+
+/* This file is generated by glib-mkenums, do not modify it. This code is licensed under the same license as the containing project. Note that it links to GLib, so must comply with the LGPL linking clauses. */
+
+#pragma once
+
+ #include <glib-object.h>
+ #include <gst/video/video-prelude.h>
+
+ G_BEGIN_DECLS
+
+/* enumerations from "colorbalance.h" */
+
+GST_VIDEO_API
+GType gst_color_balance_type_get_type (void);
+#define GST_TYPE_COLOR_BALANCE_TYPE (gst_color_balance_type_get_type())
+
+/* enumerations from "gstvideodecoder.h" */
+
+GST_VIDEO_API
+GType gst_video_decoder_request_sync_point_flags_get_type (void);
+#define GST_TYPE_VIDEO_DECODER_REQUEST_SYNC_POINT_FLAGS (gst_video_decoder_request_sync_point_flags_get_type())
+
+/* enumerations from "gstvideometa.h" */
+
+GST_VIDEO_API
+GType gst_video_gl_texture_type_get_type (void);
+#define GST_TYPE_VIDEO_GL_TEXTURE_TYPE (gst_video_gl_texture_type_get_type())
+
+GST_VIDEO_API
+GType gst_video_gl_texture_orientation_get_type (void);
+#define GST_TYPE_VIDEO_GL_TEXTURE_ORIENTATION (gst_video_gl_texture_orientation_get_type())
+
+/* enumerations from "gstvideotimecode.h" */
+
+GST_VIDEO_API
+GType gst_video_time_code_flags_get_type (void);
+#define GST_TYPE_VIDEO_TIME_CODE_FLAGS (gst_video_time_code_flags_get_type())
+
+/* enumerations from "gstvideoutils.h" */
+
+GST_VIDEO_API
+GType gst_video_codec_frame_flags_get_type (void);
+#define GST_TYPE_VIDEO_CODEC_FRAME_FLAGS (gst_video_codec_frame_flags_get_type())
+
+/* enumerations from "navigation.h" */
+
+GST_VIDEO_API
+GType gst_navigation_command_get_type (void);
+#define GST_TYPE_NAVIGATION_COMMAND (gst_navigation_command_get_type())
+
+GST_VIDEO_API
+GType gst_navigation_query_type_get_type (void);
+#define GST_TYPE_NAVIGATION_QUERY_TYPE (gst_navigation_query_type_get_type())
+
+GST_VIDEO_API
+GType gst_navigation_message_type_get_type (void);
+#define GST_TYPE_NAVIGATION_MESSAGE_TYPE (gst_navigation_message_type_get_type())
+
+GST_VIDEO_API
+GType gst_navigation_event_type_get_type (void);
+#define GST_TYPE_NAVIGATION_EVENT_TYPE (gst_navigation_event_type_get_type())
+
+/* enumerations from "video-anc.h" */
+
+GST_VIDEO_API
+GType gst_video_ancillary_did_get_type (void);
+#define GST_TYPE_VIDEO_ANCILLARY_DID (gst_video_ancillary_did_get_type())
+
+GST_VIDEO_API
+GType gst_video_ancillary_di_d16_get_type (void);
+#define GST_TYPE_VIDEO_ANCILLARY_DI_D16 (gst_video_ancillary_di_d16_get_type())
+
+GST_VIDEO_API
+GType gst_video_afd_value_get_type (void);
+#define GST_TYPE_VIDEO_AFD_VALUE (gst_video_afd_value_get_type())
+
+GST_VIDEO_API
+GType gst_video_afd_spec_get_type (void);
+#define GST_TYPE_VIDEO_AFD_SPEC (gst_video_afd_spec_get_type())
+
+GST_VIDEO_API
+GType gst_video_caption_type_get_type (void);
+#define GST_TYPE_VIDEO_CAPTION_TYPE (gst_video_caption_type_get_type())
+
+GST_VIDEO_API
+GType gst_video_vbi_parser_result_get_type (void);
+#define GST_TYPE_VIDEO_VBI_PARSER_RESULT (gst_video_vbi_parser_result_get_type())
+
+/* enumerations from "video-chroma.h" */
+
+GST_VIDEO_API
+GType gst_video_chroma_site_get_type (void);
+#define GST_TYPE_VIDEO_CHROMA_SITE (gst_video_chroma_site_get_type())
+
+GST_VIDEO_API
+GType gst_video_chroma_method_get_type (void);
+#define GST_TYPE_VIDEO_CHROMA_METHOD (gst_video_chroma_method_get_type())
+
+GST_VIDEO_API
+GType gst_video_chroma_flags_get_type (void);
+#define GST_TYPE_VIDEO_CHROMA_FLAGS (gst_video_chroma_flags_get_type())
+
+/* enumerations from "video-color.h" */
+
+GST_VIDEO_API
+GType gst_video_color_range_get_type (void);
+#define GST_TYPE_VIDEO_COLOR_RANGE (gst_video_color_range_get_type())
+
+GST_VIDEO_API
+GType gst_video_color_matrix_get_type (void);
+#define GST_TYPE_VIDEO_COLOR_MATRIX (gst_video_color_matrix_get_type())
+
+GST_VIDEO_API
+GType gst_video_transfer_function_get_type (void);
+#define GST_TYPE_VIDEO_TRANSFER_FUNCTION (gst_video_transfer_function_get_type())
+
+GST_VIDEO_API
+GType gst_video_color_primaries_get_type (void);
+#define GST_TYPE_VIDEO_COLOR_PRIMARIES (gst_video_color_primaries_get_type())
+
+/* enumerations from "video-converter.h" */
+
+GST_VIDEO_API
+GType gst_video_alpha_mode_get_type (void);
+#define GST_TYPE_VIDEO_ALPHA_MODE (gst_video_alpha_mode_get_type())
+
+GST_VIDEO_API
+GType gst_video_chroma_mode_get_type (void);
+#define GST_TYPE_VIDEO_CHROMA_MODE (gst_video_chroma_mode_get_type())
+
+GST_VIDEO_API
+GType gst_video_matrix_mode_get_type (void);
+#define GST_TYPE_VIDEO_MATRIX_MODE (gst_video_matrix_mode_get_type())
+
+GST_VIDEO_API
+GType gst_video_gamma_mode_get_type (void);
+#define GST_TYPE_VIDEO_GAMMA_MODE (gst_video_gamma_mode_get_type())
+
+GST_VIDEO_API
+GType gst_video_primaries_mode_get_type (void);
+#define GST_TYPE_VIDEO_PRIMARIES_MODE (gst_video_primaries_mode_get_type())
+
+/* enumerations from "video-dither.h" */
+
+GST_VIDEO_API
+GType gst_video_dither_method_get_type (void);
+#define GST_TYPE_VIDEO_DITHER_METHOD (gst_video_dither_method_get_type())
+
+GST_VIDEO_API
+GType gst_video_dither_flags_get_type (void);
+#define GST_TYPE_VIDEO_DITHER_FLAGS (gst_video_dither_flags_get_type())
+
+/* enumerations from "video-format.h" */
+
+GST_VIDEO_API
+GType gst_video_format_get_type (void);
+#define GST_TYPE_VIDEO_FORMAT (gst_video_format_get_type())
+
+GST_VIDEO_API
+GType gst_video_format_flags_get_type (void);
+#define GST_TYPE_VIDEO_FORMAT_FLAGS (gst_video_format_flags_get_type())
+
+GST_VIDEO_API
+GType gst_video_pack_flags_get_type (void);
+#define GST_TYPE_VIDEO_PACK_FLAGS (gst_video_pack_flags_get_type())
+
+/* enumerations from "video-frame.h" */
+
+GST_VIDEO_API
+GType gst_video_frame_flags_get_type (void);
+#define GST_TYPE_VIDEO_FRAME_FLAGS (gst_video_frame_flags_get_type())
+
+GST_VIDEO_API
+GType gst_video_buffer_flags_get_type (void);
+#define GST_TYPE_VIDEO_BUFFER_FLAGS (gst_video_buffer_flags_get_type())
+
+GST_VIDEO_API
+GType gst_video_frame_map_flags_get_type (void);
+#define GST_TYPE_VIDEO_FRAME_MAP_FLAGS (gst_video_frame_map_flags_get_type())
+
+/* enumerations from "video-info.h" */
+
+GST_VIDEO_API
+GType gst_video_interlace_mode_get_type (void);
+#define GST_TYPE_VIDEO_INTERLACE_MODE (gst_video_interlace_mode_get_type())
+
+GST_VIDEO_API
+GType gst_video_multiview_mode_get_type (void);
+#define GST_TYPE_VIDEO_MULTIVIEW_MODE (gst_video_multiview_mode_get_type())
+
+GST_VIDEO_API
+GType gst_video_multiview_frame_packing_get_type (void);
+#define GST_TYPE_VIDEO_MULTIVIEW_FRAME_PACKING (gst_video_multiview_frame_packing_get_type())
+
+GST_VIDEO_API
+GType gst_video_multiview_flags_get_type (void);
+#define GST_TYPE_VIDEO_MULTIVIEW_FLAGS (gst_video_multiview_flags_get_type())
+
+GST_VIDEO_API
+GType gst_video_flags_get_type (void);
+#define GST_TYPE_VIDEO_FLAGS (gst_video_flags_get_type())
+
+GST_VIDEO_API
+GType gst_video_field_order_get_type (void);
+#define GST_TYPE_VIDEO_FIELD_ORDER (gst_video_field_order_get_type())
+
+/* enumerations from "video-overlay-composition.h" */
+
+GST_VIDEO_API
+GType gst_video_overlay_format_flags_get_type (void);
+#define GST_TYPE_VIDEO_OVERLAY_FORMAT_FLAGS (gst_video_overlay_format_flags_get_type())
+
+/* enumerations from "video-resampler.h" */
+
+GST_VIDEO_API
+GType gst_video_resampler_method_get_type (void);
+#define GST_TYPE_VIDEO_RESAMPLER_METHOD (gst_video_resampler_method_get_type())
+
+GST_VIDEO_API
+GType gst_video_resampler_flags_get_type (void);
+#define GST_TYPE_VIDEO_RESAMPLER_FLAGS (gst_video_resampler_flags_get_type())
+
+/* enumerations from "video-scaler.h" */
+
+GST_VIDEO_API
+GType gst_video_scaler_flags_get_type (void);
+#define GST_TYPE_VIDEO_SCALER_FLAGS (gst_video_scaler_flags_get_type())
+
+/* enumerations from "video-tile.h" */
+
+GST_VIDEO_API
+GType gst_video_tile_type_get_type (void);
+#define GST_TYPE_VIDEO_TILE_TYPE (gst_video_tile_type_get_type())
+
+GST_VIDEO_API
+GType gst_video_tile_mode_get_type (void);
+#define GST_TYPE_VIDEO_TILE_MODE (gst_video_tile_mode_get_type())
+
+/* enumerations from "video.h" */
+
+GST_VIDEO_API
+GType gst_video_orientation_method_get_type (void);
+#define GST_TYPE_VIDEO_ORIENTATION_METHOD (gst_video_orientation_method_get_type())
+
+G_END_DECLS
+
+/* Generated data ends here */
+
diff --git a/include/gst/video/video-event.h b/include/gst/video/video-event.h
new file mode 100644
index 0000000000..1fe4e504ca
--- /dev/null
+++ b/include/gst/video/video-event.h
@@ -0,0 +1,69 @@
+/* GStreamer
+ * Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_EVENT_H__
+#define __GST_VIDEO_EVENT_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+
+G_BEGIN_DECLS
+
+/* video still frame event creation and parsing */
+
+GST_VIDEO_API
+GstEvent * gst_video_event_new_still_frame (gboolean in_still);
+
+GST_VIDEO_API
+gboolean gst_video_event_parse_still_frame (GstEvent * event, gboolean * in_still);
+
+/* video force key unit event creation and parsing */
+
+GST_VIDEO_API
+GstEvent * gst_video_event_new_downstream_force_key_unit (GstClockTime timestamp,
+ GstClockTime stream_time,
+ GstClockTime running_time,
+ gboolean all_headers,
+ guint count);
+
+GST_VIDEO_API
+gboolean gst_video_event_parse_downstream_force_key_unit (GstEvent * event,
+ GstClockTime * timestamp,
+ GstClockTime * stream_time,
+ GstClockTime * running_time,
+ gboolean * all_headers,
+ guint * count);
+
+GST_VIDEO_API
+GstEvent * gst_video_event_new_upstream_force_key_unit (GstClockTime running_time,
+ gboolean all_headers,
+ guint count);
+
+GST_VIDEO_API
+gboolean gst_video_event_parse_upstream_force_key_unit (GstEvent * event,
+ GstClockTime * running_time,
+ gboolean * all_headers,
+ guint * count);
+
+GST_VIDEO_API
+gboolean gst_video_event_is_force_key_unit(GstEvent *event);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_EVENT_H__ */
diff --git a/include/gst/video/video-format.h b/include/gst/video/video-format.h
new file mode 100644
index 0000000000..9c701c895c
--- /dev/null
+++ b/include/gst/video/video-format.h
@@ -0,0 +1,809 @@
+/* GStreamer
+ * Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_FORMAT_H__
+#define __GST_VIDEO_FORMAT_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#include <gst/video/video-enumtypes.h>
+#include <gst/video/video-tile.h>
+
+/**
+ * GstVideoFormat:
+ * @GST_VIDEO_FORMAT_UNKNOWN: Unknown or unset video format id
+ * @GST_VIDEO_FORMAT_ENCODED: Encoded video format. Only ever use that in caps for
+ * special video formats in combination with non-system
+ * memory GstCapsFeatures where it does not make sense
+ * to specify a real video format.
+ * @GST_VIDEO_FORMAT_I420: planar 4:2:0 YUV
+ * @GST_VIDEO_FORMAT_YV12: planar 4:2:0 YVU (like I420 but UV planes swapped)
+ * @GST_VIDEO_FORMAT_YUY2: packed 4:2:2 YUV (Y0-U0-Y1-V0 Y2-U2-Y3-V2 Y4 ...)
+ * @GST_VIDEO_FORMAT_UYVY: packed 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
+ * @GST_VIDEO_FORMAT_VYUY: packed 4:2:2 YUV (V0-Y0-U0-Y1 V2-Y2-U2-Y3 V4 ...)
+ * @GST_VIDEO_FORMAT_AYUV: packed 4:4:4 YUV with alpha channel (A0-Y0-U0-V0 ...)
+ * @GST_VIDEO_FORMAT_RGBx: sparse rgb packed into 32 bit, space last
+ * @GST_VIDEO_FORMAT_BGRx: sparse reverse rgb packed into 32 bit, space last
+ * @GST_VIDEO_FORMAT_xRGB: sparse rgb packed into 32 bit, space first
+ * @GST_VIDEO_FORMAT_xBGR: sparse reverse rgb packed into 32 bit, space first
+ * @GST_VIDEO_FORMAT_RGBA: rgb with alpha channel last
+ * @GST_VIDEO_FORMAT_BGRA: reverse rgb with alpha channel last
+ * @GST_VIDEO_FORMAT_ARGB: rgb with alpha channel first
+ * @GST_VIDEO_FORMAT_ABGR: reverse rgb with alpha channel first
+ * @GST_VIDEO_FORMAT_RGB: RGB packed into 24 bits without padding (`R-G-B-R-G-B`)
+ * @GST_VIDEO_FORMAT_BGR: reverse RGB packed into 24 bits without padding (`B-G-R-B-G-R`)
+ * @GST_VIDEO_FORMAT_Y41B: planar 4:1:1 YUV
+ * @GST_VIDEO_FORMAT_Y42B: planar 4:2:2 YUV
+ * @GST_VIDEO_FORMAT_YVYU: packed 4:2:2 YUV (Y0-V0-Y1-U0 Y2-V2-Y3-U2 Y4 ...)
+ * @GST_VIDEO_FORMAT_Y444: planar 4:4:4 YUV
+ * @GST_VIDEO_FORMAT_v210: packed 4:2:2 10-bit YUV, complex format
+ * @GST_VIDEO_FORMAT_v216: packed 4:2:2 16-bit YUV, Y0-U0-Y1-V1 order
+ * @GST_VIDEO_FORMAT_NV12: planar 4:2:0 YUV with interleaved UV plane
+ * @GST_VIDEO_FORMAT_NV21: planar 4:2:0 YUV with interleaved VU plane
+ * @GST_VIDEO_FORMAT_NV12_10LE32: 10-bit variant of @GST_VIDEO_FORMAT_NV12, packed into 32bit words (MSB 2 bits padding) (Since: 1.14)
+ * @GST_VIDEO_FORMAT_GRAY8: 8-bit grayscale
+ * @GST_VIDEO_FORMAT_GRAY10_LE32: 10-bit grayscale, packed into 32bit words (2 bits padding) (Since: 1.14)
+ * @GST_VIDEO_FORMAT_GRAY16_BE: 16-bit grayscale, most significant byte first
+ * @GST_VIDEO_FORMAT_GRAY16_LE: 16-bit grayscale, least significant byte first
+ * @GST_VIDEO_FORMAT_v308: packed 4:4:4 YUV (Y-U-V ...)
+ * @GST_VIDEO_FORMAT_IYU2: packed 4:4:4 YUV (U-Y-V ...) (Since: 1.10)
+ * @GST_VIDEO_FORMAT_RGB16: rgb 5-6-5 bits per component
+ * @GST_VIDEO_FORMAT_BGR16: reverse rgb 5-6-5 bits per component
+ * @GST_VIDEO_FORMAT_RGB15: rgb 5-5-5 bits per component
+ * @GST_VIDEO_FORMAT_BGR15: reverse rgb 5-5-5 bits per component
+ * @GST_VIDEO_FORMAT_UYVP: packed 10-bit 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
+ * @GST_VIDEO_FORMAT_A420: planar 4:4:2:0 AYUV
+ * @GST_VIDEO_FORMAT_RGB8P: 8-bit paletted RGB
+ * @GST_VIDEO_FORMAT_YUV9: planar 4:1:0 YUV
+ * @GST_VIDEO_FORMAT_YVU9: planar 4:1:0 YUV (like YUV9 but UV planes swapped)
+ * @GST_VIDEO_FORMAT_IYU1: packed 4:1:1 YUV (Cb-Y0-Y1-Cr-Y2-Y3 ...)
+ * @GST_VIDEO_FORMAT_ARGB64: rgb with alpha channel first, 16 bits (native endianness) per channel
+ * @GST_VIDEO_FORMAT_AYUV64: packed 4:4:4 YUV with alpha channel, 16 bits (native endianness) per channel (A0-Y0-U0-V0 ...)
+ * @GST_VIDEO_FORMAT_r210: packed 4:4:4 RGB, 10 bits per channel
+ * @GST_VIDEO_FORMAT_I420_10BE: planar 4:2:0 YUV, 10 bits per channel
+ * @GST_VIDEO_FORMAT_I420_10LE: planar 4:2:0 YUV, 10 bits per channel
+ * @GST_VIDEO_FORMAT_I422_10BE: planar 4:2:2 YUV, 10 bits per channel
+ * @GST_VIDEO_FORMAT_I422_10LE: planar 4:2:2 YUV, 10 bits per channel
+ * @GST_VIDEO_FORMAT_Y444_10BE: planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
+ * @GST_VIDEO_FORMAT_Y444_10LE: planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
+ * @GST_VIDEO_FORMAT_GBR: planar 4:4:4 RGB, 8 bits per channel (Since: 1.2)
+ * @GST_VIDEO_FORMAT_GBR_10BE: planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
+ * @GST_VIDEO_FORMAT_GBR_10LE: planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
+ * @GST_VIDEO_FORMAT_NV16: planar 4:2:2 YUV with interleaved UV plane (Since: 1.2)
+ * @GST_VIDEO_FORMAT_NV16_10LE32: 10-bit variant of @GST_VIDEO_FORMAT_NV16, packed into 32bit words (MSB 2 bits padding) (Since: 1.14)
+ * @GST_VIDEO_FORMAT_NV24: planar 4:4:4 YUV with interleaved UV plane (Since: 1.2)
+ * @GST_VIDEO_FORMAT_NV12_64Z32: NV12 with 64x32 tiling in zigzag pattern (Since: 1.4)
+ * @GST_VIDEO_FORMAT_A420_10BE: planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
+ * @GST_VIDEO_FORMAT_A420_10LE: planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
+ * @GST_VIDEO_FORMAT_A422_10BE: planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
+ * @GST_VIDEO_FORMAT_A422_10LE: planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
+ * @GST_VIDEO_FORMAT_A444_10BE: planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
+ * @GST_VIDEO_FORMAT_A444_10LE: planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
+ * @GST_VIDEO_FORMAT_NV61: planar 4:2:2 YUV with interleaved VU plane (Since: 1.6)
+ * @GST_VIDEO_FORMAT_P010_10BE: planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
+ * @GST_VIDEO_FORMAT_P010_10LE: planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
+ * @GST_VIDEO_FORMAT_GBRA: planar 4:4:4:4 ARGB, 8 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_GBRA_10BE: planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_GBRA_10LE: planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_GBR_12BE: planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_GBR_12LE: planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_GBRA_12BE: planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_GBRA_12LE: planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_I420_12BE: planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_I420_12LE: planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_I422_12BE: planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_I422_12LE: planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_Y444_12BE: planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_Y444_12LE: planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_NV12_10LE40: Fully packed variant of NV12_10LE32 (Since: 1.16)
+ * @GST_VIDEO_FORMAT_Y210: packed 4:2:2 YUV, 10 bits per channel (Since: 1.16)
+ * @GST_VIDEO_FORMAT_Y410: packed 4:4:4 YUV, 10 bits per channel(A-V-Y-U...) (Since: 1.16)
+ * @GST_VIDEO_FORMAT_VUYA: packed 4:4:4 YUV with alpha channel (V0-U0-Y0-A0...) (Since: 1.16)
+ * @GST_VIDEO_FORMAT_BGR10A2_LE: packed 4:4:4 RGB with alpha channel(B-G-R-A), 10 bits for R/G/B channel and MSB 2 bits for alpha channel (Since: 1.16)
+ * @GST_VIDEO_FORMAT_RGB10A2_LE: packed 4:4:4 RGB with alpha channel(R-G-B-A), 10 bits for R/G/B channel and MSB 2 bits for alpha channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_Y444_16BE: planar 4:4:4 YUV, 16 bits per channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_Y444_16LE: planar 4:4:4 YUV, 16 bits per channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_P016_BE: planar 4:2:0 YUV with interleaved UV plane, 16 bits per channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_P016_LE: planar 4:2:0 YUV with interleaved UV plane, 16 bits per channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_P012_BE: planar 4:2:0 YUV with interleaved UV plane, 12 bits per channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_P012_LE: planar 4:2:0 YUV with interleaved UV plane, 12 bits per channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_Y212_BE: packed 4:2:2 YUV, 12 bits per channel (Y-U-Y-V) (Since: 1.18)
+ * @GST_VIDEO_FORMAT_Y212_LE: packed 4:2:2 YUV, 12 bits per channel (Y-U-Y-V) (Since: 1.18)
+ * @GST_VIDEO_FORMAT_Y412_BE: packed 4:4:4:4 YUV, 12 bits per channel(U-Y-V-A...) (Since: 1.18)
+ * @GST_VIDEO_FORMAT_Y412_LE: packed 4:4:4:4 YUV, 12 bits per channel(U-Y-V-A...) (Since: 1.18)
+ * @GST_VIDEO_FORMAT_NV12_4L4: NV12 with 4x4 tiles in linear order (Since: 1.18)
+ * @GST_VIDEO_FORMAT_NV12_32L32: NV12 with 32x32 tiles in linear order (Since: 1.18)
+ * @GST_VIDEO_FORMAT_RGBP: planar 4:4:4 RGB, 8 bits per channel (Since: 1.20)
+ * @GST_VIDEO_FORMAT_BGRP: planar 4:4:4 RGB, 8 bits per channel (Since: 1.20)
+ * @GST_VIDEO_FORMAT_AV12: Planar 4:2:0 YUV with interleaved UV plane with alpha as 3rd plane (Since: 1.20)
+ * @GST_VIDEO_FORMAT_ARGB64_LE: RGB with alpha channel first, 16 bits per channel
+ * @GST_VIDEO_FORMAT_ARGB64_BE: RGB with alpha channel first, 16 bits per channel
+ * @GST_VIDEO_FORMAT_RGBA64_LE: RGB with alpha channel last, 16 bits per channel
+ * @GST_VIDEO_FORMAT_RGBA64_BE: RGB with alpha channel last, 16 bits per channel
+ * @GST_VIDEO_FORMAT_BGRA64_LE: reverse RGB with alpha channel last, 16 bits per channel
+ * @GST_VIDEO_FORMAT_BGRA64_BE: reverse RGB with alpha channel last, 16 bits per channel
+ * @GST_VIDEO_FORMAT_ABGR64_LE: reverse RGB with alpha channel first, 16 bits per channel
+ * @GST_VIDEO_FORMAT_ABGR64_BE: reverse RGB with alpha channel first, 16 bits per channel
+ *
+ * Enum value describing the most common video formats.
+ *
+ * See the [GStreamer raw video format design document](https://gstreamer.freedesktop.org/documentation/additional/design/mediatype-video-raw.html#formats)
+ * for details about the layout and packing of these formats in memory.
+ */
+typedef enum {
+ GST_VIDEO_FORMAT_UNKNOWN,
+ GST_VIDEO_FORMAT_ENCODED,
+ GST_VIDEO_FORMAT_I420,
+ GST_VIDEO_FORMAT_YV12,
+ GST_VIDEO_FORMAT_YUY2,
+ GST_VIDEO_FORMAT_UYVY,
+ GST_VIDEO_FORMAT_AYUV,
+ GST_VIDEO_FORMAT_RGBx,
+ GST_VIDEO_FORMAT_BGRx,
+ GST_VIDEO_FORMAT_xRGB,
+ GST_VIDEO_FORMAT_xBGR,
+ GST_VIDEO_FORMAT_RGBA,
+ GST_VIDEO_FORMAT_BGRA,
+ GST_VIDEO_FORMAT_ARGB,
+ GST_VIDEO_FORMAT_ABGR,
+ GST_VIDEO_FORMAT_RGB,
+ GST_VIDEO_FORMAT_BGR,
+ GST_VIDEO_FORMAT_Y41B,
+ GST_VIDEO_FORMAT_Y42B,
+ GST_VIDEO_FORMAT_YVYU,
+ GST_VIDEO_FORMAT_Y444,
+ GST_VIDEO_FORMAT_v210,
+ GST_VIDEO_FORMAT_v216,
+ GST_VIDEO_FORMAT_NV12,
+ GST_VIDEO_FORMAT_NV21,
+ GST_VIDEO_FORMAT_GRAY8,
+ GST_VIDEO_FORMAT_GRAY16_BE,
+ GST_VIDEO_FORMAT_GRAY16_LE,
+ GST_VIDEO_FORMAT_v308,
+ GST_VIDEO_FORMAT_RGB16,
+ GST_VIDEO_FORMAT_BGR16,
+ GST_VIDEO_FORMAT_RGB15,
+ GST_VIDEO_FORMAT_BGR15,
+ GST_VIDEO_FORMAT_UYVP,
+ GST_VIDEO_FORMAT_A420,
+ GST_VIDEO_FORMAT_RGB8P,
+ GST_VIDEO_FORMAT_YUV9,
+ GST_VIDEO_FORMAT_YVU9,
+ GST_VIDEO_FORMAT_IYU1,
+ GST_VIDEO_FORMAT_ARGB64,
+ GST_VIDEO_FORMAT_AYUV64,
+ GST_VIDEO_FORMAT_r210,
+ GST_VIDEO_FORMAT_I420_10BE,
+ GST_VIDEO_FORMAT_I420_10LE,
+ GST_VIDEO_FORMAT_I422_10BE,
+ GST_VIDEO_FORMAT_I422_10LE,
+ GST_VIDEO_FORMAT_Y444_10BE,
+ GST_VIDEO_FORMAT_Y444_10LE,
+ GST_VIDEO_FORMAT_GBR,
+ GST_VIDEO_FORMAT_GBR_10BE,
+ GST_VIDEO_FORMAT_GBR_10LE,
+ GST_VIDEO_FORMAT_NV16,
+ GST_VIDEO_FORMAT_NV24,
+ GST_VIDEO_FORMAT_NV12_64Z32,
+ GST_VIDEO_FORMAT_A420_10BE,
+ GST_VIDEO_FORMAT_A420_10LE,
+ GST_VIDEO_FORMAT_A422_10BE,
+ GST_VIDEO_FORMAT_A422_10LE,
+ GST_VIDEO_FORMAT_A444_10BE,
+ GST_VIDEO_FORMAT_A444_10LE,
+ GST_VIDEO_FORMAT_NV61,
+ GST_VIDEO_FORMAT_P010_10BE,
+ GST_VIDEO_FORMAT_P010_10LE,
+ GST_VIDEO_FORMAT_IYU2,
+ GST_VIDEO_FORMAT_VYUY,
+ GST_VIDEO_FORMAT_GBRA,
+ GST_VIDEO_FORMAT_GBRA_10BE,
+ GST_VIDEO_FORMAT_GBRA_10LE,
+ GST_VIDEO_FORMAT_GBR_12BE,
+ GST_VIDEO_FORMAT_GBR_12LE,
+ GST_VIDEO_FORMAT_GBRA_12BE,
+ GST_VIDEO_FORMAT_GBRA_12LE,
+ GST_VIDEO_FORMAT_I420_12BE,
+ GST_VIDEO_FORMAT_I420_12LE,
+ GST_VIDEO_FORMAT_I422_12BE,
+ GST_VIDEO_FORMAT_I422_12LE,
+ GST_VIDEO_FORMAT_Y444_12BE,
+ GST_VIDEO_FORMAT_Y444_12LE,
+ GST_VIDEO_FORMAT_GRAY10_LE32,
+ GST_VIDEO_FORMAT_NV12_10LE32,
+ GST_VIDEO_FORMAT_NV16_10LE32,
+ GST_VIDEO_FORMAT_NV12_10LE40,
+ GST_VIDEO_FORMAT_Y210,
+ GST_VIDEO_FORMAT_Y410,
+ GST_VIDEO_FORMAT_VUYA,
+ GST_VIDEO_FORMAT_BGR10A2_LE,
+ GST_VIDEO_FORMAT_RGB10A2_LE,
+ GST_VIDEO_FORMAT_Y444_16BE,
+ GST_VIDEO_FORMAT_Y444_16LE,
+ GST_VIDEO_FORMAT_P016_BE,
+ GST_VIDEO_FORMAT_P016_LE,
+ GST_VIDEO_FORMAT_P012_BE,
+ GST_VIDEO_FORMAT_P012_LE,
+ GST_VIDEO_FORMAT_Y212_BE,
+ GST_VIDEO_FORMAT_Y212_LE,
+ GST_VIDEO_FORMAT_Y412_BE,
+ GST_VIDEO_FORMAT_Y412_LE,
+ /**
+ * GST_VIDEO_FORMAT_NV12_4L4:
+ *
+ * NV12 with 4x4 tiles in linear order.
+ *
+ * Since: 1.18
+ */
+ GST_VIDEO_FORMAT_NV12_4L4,
+ /**
+ * GST_VIDEO_FORMAT_NV12_32L32:
+ *
+ * NV12 with 32x32 tiles in linear order.
+ *
+ * Since: 1.18
+ */
+ GST_VIDEO_FORMAT_NV12_32L32,
+
+ /**
+ * GST_VIDEO_FORMAT_RGBP:
+ *
+ * Planar 4:4:4 RGB, R-G-B order
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_RGBP,
+
+ /**
+ * GST_VIDEO_FORMAT_BGRP:
+ *
+ * Planar 4:4:4 RGB, B-G-R order
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_BGRP,
+
+ /**
+ * GST_VIDEO_FORMAT_AV12:
+ *
+ * Planar 4:2:0 YUV with interleaved UV plane with alpha as
+ * 3rd plane.
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_AV12,
+
+ /**
+ * GST_VIDEO_FORMAT_ARGB64_LE:
+ *
+ * RGB with alpha channel first, 16 bits (little endian)
+ * per channel.
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_ARGB64_LE,
+
+ /**
+ * GST_VIDEO_FORMAT_ARGB64_BE:
+ *
+ * RGB with alpha channel first, 16 bits (big endian)
+ * per channel.
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_ARGB64_BE,
+
+ /**
+ * GST_VIDEO_FORMAT_RGBA64_LE:
+ *
+ * RGB with alpha channel last, 16 bits (little endian)
+ * per channel.
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_RGBA64_LE,
+
+ /**
+ * GST_VIDEO_FORMAT_RGBA64_BE:
+ *
+ * RGB with alpha channel last, 16 bits (big endian)
+ * per channel.
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_RGBA64_BE,
+
+ /**
+ * GST_VIDEO_FORMAT_BGRA64_LE:
+ *
+ * Reverse RGB with alpha channel last, 16 bits (little endian)
+ * per channel.
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_BGRA64_LE,
+
+ /**
+ * GST_VIDEO_FORMAT_BGRA64_BE:
+ *
+ * Reverse RGB with alpha channel last, 16 bits (big endian)
+ * per channel.
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_BGRA64_BE,
+
+ /**
+ * GST_VIDEO_FORMAT_ABGR64_LE:
+ *
+ * Reverse RGB with alpha channel first, 16 bits (little endian)
+ * per channel.
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_ABGR64_LE,
+
+ /**
+ * GST_VIDEO_FORMAT_ABGR64_BE:
+ *
+ * Reverse RGB with alpha channel first, 16 bits (big endian)
+ * per channel.
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_ABGR64_BE,
+} GstVideoFormat;
+
+#define GST_VIDEO_MAX_PLANES 4
+#define GST_VIDEO_MAX_COMPONENTS 4
+
+typedef struct _GstVideoFormatInfo GstVideoFormatInfo;
+
+/**
+ * GstVideoFormatFlags:
+ * @GST_VIDEO_FORMAT_FLAG_YUV: The video format is YUV, components are numbered
+ * 0=Y, 1=U, 2=V.
+ * @GST_VIDEO_FORMAT_FLAG_RGB: The video format is RGB, components are numbered
+ * 0=R, 1=G, 2=B.
+ * @GST_VIDEO_FORMAT_FLAG_GRAY: The video is gray, there is one gray component
+ * with index 0.
+ * @GST_VIDEO_FORMAT_FLAG_ALPHA: The video format has an alpha components with
+ * the number 3.
+ * @GST_VIDEO_FORMAT_FLAG_LE: The video format has data stored in little
+ * endianness.
+ * @GST_VIDEO_FORMAT_FLAG_PALETTE: The video format has a palette. The palette
+ * is stored in the second plane and indexes are stored in the first plane.
+ * @GST_VIDEO_FORMAT_FLAG_COMPLEX: The video format has a complex layout that
+ * can't be described with the usual information in the #GstVideoFormatInfo.
+ * @GST_VIDEO_FORMAT_FLAG_UNPACK: This format can be used in a
+ * #GstVideoFormatUnpack and #GstVideoFormatPack function.
+ * @GST_VIDEO_FORMAT_FLAG_TILED: The format is tiled, there is tiling information
+ * in the last plane.
+ *
+ * The different video flags that a format info can have.
+ */
+typedef enum
+{
+ GST_VIDEO_FORMAT_FLAG_YUV = (1 << 0),
+ GST_VIDEO_FORMAT_FLAG_RGB = (1 << 1),
+ GST_VIDEO_FORMAT_FLAG_GRAY = (1 << 2),
+ GST_VIDEO_FORMAT_FLAG_ALPHA = (1 << 3),
+ GST_VIDEO_FORMAT_FLAG_LE = (1 << 4),
+ GST_VIDEO_FORMAT_FLAG_PALETTE = (1 << 5),
+ GST_VIDEO_FORMAT_FLAG_COMPLEX = (1 << 6),
+ GST_VIDEO_FORMAT_FLAG_UNPACK = (1 << 7),
+ GST_VIDEO_FORMAT_FLAG_TILED = (1 << 8)
+} GstVideoFormatFlags;
+
+/* YUV components */
+#define GST_VIDEO_COMP_Y 0
+#define GST_VIDEO_COMP_U 1
+#define GST_VIDEO_COMP_V 2
+
+/* RGB components */
+#define GST_VIDEO_COMP_R 0
+#define GST_VIDEO_COMP_G 1
+#define GST_VIDEO_COMP_B 2
+
+/* alpha component */
+#define GST_VIDEO_COMP_A 3
+
+/* palette components */
+#define GST_VIDEO_COMP_INDEX 0
+#define GST_VIDEO_COMP_PALETTE 1
+
+#include <gst/video/video-chroma.h>
+
+/**
+ * GstVideoPackFlags:
+ * @GST_VIDEO_PACK_FLAG_NONE: No flag
+ * @GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE: When the source has a smaller depth
+ * than the target format, set the least significant bits of the target
+ * to 0. This is likely slightly faster but less accurate. When this flag
+ * is not specified, the most significant bits of the source are duplicated
+ * in the least significant bits of the destination.
+ * @GST_VIDEO_PACK_FLAG_INTERLACED: The source is interlaced. The unpacked
+ * format will be interlaced as well with each line containing
+ * information from alternating fields. (Since: 1.2)
+ *
+ * The different flags that can be used when packing and unpacking.
+ */
+typedef enum
+{
+ GST_VIDEO_PACK_FLAG_NONE = 0,
+ GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE = (1 << 0),
+ GST_VIDEO_PACK_FLAG_INTERLACED = (1 << 1)
+} GstVideoPackFlags;
+
+/**
+ * GstVideoFormatUnpack:
+ * @info: a #GstVideoFormatInfo
+ * @flags: flags to control the unpacking
+ * @dest: a destination array
+ * @data: pointers to the data planes
+ * @stride: strides of the planes
+ * @x: the x position in the image to start from
+ * @y: the y position in the image to start from
+ * @width: the amount of pixels to unpack.
+ *
+ * Unpacks @width pixels from the given planes and strides containing data of
+ * format @info. The pixels will be unpacked into @dest with each component
+ * interleaved as per @info's unpack_format, which will usually be one of
+ * #GST_VIDEO_FORMAT_ARGB, #GST_VIDEO_FORMAT_AYUV, #GST_VIDEO_FORMAT_ARGB64 or
+ * #GST_VIDEO_FORMAT_AYUV64 depending on the format to unpack.
+ * @dest should at least be big enough to hold @width * bytes_per_pixel bytes
+ * where bytes_per_pixel relates to the unpack format and will usually be
+ * either 4 or 8 depending on the unpack format. bytes_per_pixel will be
+ * the same as the pixel stride for plane 0 for the above formats.
+ *
+ * For subsampled formats, the components will be duplicated in the destination
+ * array. Reconstruction of the missing components can be performed in a
+ * separate step after unpacking.
+ */
+typedef void (*GstVideoFormatUnpack) (const GstVideoFormatInfo *info,
+ GstVideoPackFlags flags, gpointer dest,
+ const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES],
+ gint x, gint y, gint width);
+/**
+ * GstVideoFormatPack:
+ * @info: a #GstVideoFormatInfo
+ * @flags: flags to control the packing
+ * @src: a source array
+ * @sstride: the source array stride
+ * @data: pointers to the destination data planes
+ * @stride: strides of the destination planes
+ * @chroma_site: the chroma siting of the target when subsampled (not used)
+ * @y: the y position in the image to pack to
+ * @width: the amount of pixels to pack.
+ *
+ * Packs @width pixels from @src to the given planes and strides in the
+ * format @info. The pixels from source have each component interleaved
+ * and will be packed into the planes in @data.
+ *
+ * This function operates on pack_lines lines, meaning that @src should
+ * contain at least pack_lines lines with a stride of @sstride and @y
+ * should be a multiple of pack_lines.
+ *
+ * Subsampled formats will use the horizontally and vertically cosited
+ * component from the source. Subsampling should be performed before
+ * packing.
+ *
+ * Because this function does not have a x coordinate, it is not possible to
+ * pack pixels starting from an unaligned position. For tiled images this
+ * means that packing should start from a tile coordinate. For subsampled
+ * formats this means that a complete pixel needs to be packed.
+ */
+/* FIXME(2.0): remove the chroma_site, it is unused and is not relevant for
+ * packing, chroma subsampling based on chroma-site should be done in a separate
+ * step before packing*/
+typedef void (*GstVideoFormatPack) (const GstVideoFormatInfo *info,
+ GstVideoPackFlags flags,
+ const gpointer src, gint sstride,
+ gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES],
+ GstVideoChromaSite chroma_site,
+ gint y, gint width);
+
+/**
+ * GstVideoFormatInfo:
+ * @format: #GstVideoFormat
+ * @name: string representation of the format
+ * @description: use readable description of the format
+ * @flags: #GstVideoFormatFlags
+ * @bits: The number of bits used to pack data items. This can be less than 8
+ * when multiple pixels are stored in a byte. for values > 8 multiple bytes
+ * should be read according to the endianness flag before applying the shift
+ * and mask.
+ * @n_components: the number of components in the video format.
+ * @shift: the number of bits to shift away to get the component data
+ * @depth: the depth in bits for each component
+ * @pixel_stride: the pixel stride of each component. This is the amount of
+ * bytes to the pixel immediately to the right. When bits < 8, the stride is
+ * expressed in bits. For 24-bit RGB, this would be 3 bytes, for example,
+ * while it would be 4 bytes for RGBx or ARGB.
+ * @n_planes: the number of planes for this format. The number of planes can be
+ * less than the amount of components when multiple components are packed into
+ * one plane.
+ * @plane: the plane number where a component can be found
+ * @poffset: the offset in the plane where the first pixel of the components
+ * can be found.
+ * @w_sub: subsampling factor of the width for the component. Use
+ * GST_VIDEO_SUB_SCALE to scale a width.
+ * @h_sub: subsampling factor of the height for the component. Use
+ * GST_VIDEO_SUB_SCALE to scale a height.
+ * @unpack_format: the format of the unpacked pixels. This format must have the
+ * #GST_VIDEO_FORMAT_FLAG_UNPACK flag set.
+ * @unpack_func: an unpack function for this format
+ * @pack_lines: the amount of lines that will be packed
+ * @pack_func: an pack function for this format
+ * @tile_mode: The tiling mode
+ * @tile_ws: The width of a tile, in bytes, represented as a shift
+ * @tile_hs: The height of a tile, in bytes, represented as a shift
+ *
+ * Information for a video format.
+ */
+struct _GstVideoFormatInfo {
+ GstVideoFormat format;
+ const gchar *name;
+ const gchar *description;
+ GstVideoFormatFlags flags;
+ guint bits;
+ guint n_components;
+ guint shift[GST_VIDEO_MAX_COMPONENTS];
+ guint depth[GST_VIDEO_MAX_COMPONENTS];
+ gint pixel_stride[GST_VIDEO_MAX_COMPONENTS];
+ guint n_planes;
+ guint plane[GST_VIDEO_MAX_COMPONENTS];
+ guint poffset[GST_VIDEO_MAX_COMPONENTS];
+ guint w_sub[GST_VIDEO_MAX_COMPONENTS];
+ guint h_sub[GST_VIDEO_MAX_COMPONENTS];
+
+ GstVideoFormat unpack_format;
+ GstVideoFormatUnpack unpack_func;
+ gint pack_lines;
+ GstVideoFormatPack pack_func;
+
+ GstVideoTileMode tile_mode;
+ guint tile_ws;
+ guint tile_hs;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+#define GST_VIDEO_FORMAT_INFO_FORMAT(info) ((info)->format)
+#define GST_VIDEO_FORMAT_INFO_NAME(info) ((info)->name)
+#define GST_VIDEO_FORMAT_INFO_FLAGS(info) ((info)->flags)
+
+#define GST_VIDEO_FORMAT_INFO_IS_YUV(info) (((info)->flags & GST_VIDEO_FORMAT_FLAG_YUV) != 0)
+#define GST_VIDEO_FORMAT_INFO_IS_RGB(info) (((info)->flags & GST_VIDEO_FORMAT_FLAG_RGB) != 0)
+#define GST_VIDEO_FORMAT_INFO_IS_GRAY(info) (((info)->flags & GST_VIDEO_FORMAT_FLAG_GRAY) != 0)
+#define GST_VIDEO_FORMAT_INFO_HAS_ALPHA(info) (((info)->flags & GST_VIDEO_FORMAT_FLAG_ALPHA) != 0)
+#define GST_VIDEO_FORMAT_INFO_IS_LE(info) (((info)->flags & GST_VIDEO_FORMAT_FLAG_LE) != 0)
+#define GST_VIDEO_FORMAT_INFO_HAS_PALETTE(info) (((info)->flags & GST_VIDEO_FORMAT_FLAG_PALETTE) != 0)
+#define GST_VIDEO_FORMAT_INFO_IS_COMPLEX(info) (((info)->flags & GST_VIDEO_FORMAT_FLAG_COMPLEX) != 0)
+#define GST_VIDEO_FORMAT_INFO_IS_TILED(info) (((info)->flags & GST_VIDEO_FORMAT_FLAG_TILED) != 0)
+
+#define GST_VIDEO_FORMAT_INFO_BITS(info) ((info)->bits)
+#define GST_VIDEO_FORMAT_INFO_N_COMPONENTS(info) ((info)->n_components)
+#define GST_VIDEO_FORMAT_INFO_SHIFT(info,c) ((info)->shift[c])
+#define GST_VIDEO_FORMAT_INFO_DEPTH(info,c) ((info)->depth[c])
+/**
+ * GST_VIDEO_FORMAT_INFO_PSTRIDE:
+ * @info: a #GstVideoFormatInfo
+ * @c: the component index
+ *
+ * pixel stride for the given component. This is the amount of bytes to the
+ * pixel immediately to the right, so basically bytes from one pixel to the
+ * next. When bits < 8, the stride is expressed in bits.
+ *
+ * Examples: for 24-bit RGB, the pixel stride would be 3 bytes, while it
+ * would be 4 bytes for RGBx or ARGB, and 8 bytes for ARGB64 or AYUV64.
+ * For planar formats such as I420 the pixel stride is usually 1. For
+ * YUY2 it would be 2 bytes.
+ */
+#define GST_VIDEO_FORMAT_INFO_PSTRIDE(info,c) ((info)->pixel_stride[c])
+/**
+ * GST_VIDEO_FORMAT_INFO_N_PLANES:
+ * @info: a #GstVideoFormatInfo
+ *
+ * Number of planes. This is the number of planes the pixel layout is
+ * organized in in memory. The number of planes can be less than the
+ * number of components (e.g. Y,U,V,A or R, G, B, A) when multiple
+ * components are packed into one plane.
+ *
+ * Examples: RGB/RGBx/RGBA: 1 plane, 3/3/4 components;
+ * I420: 3 planes, 3 components; NV21/NV12: 2 planes, 3 components.
+ */
+#define GST_VIDEO_FORMAT_INFO_N_PLANES(info) ((info)->n_planes)
+/**
+ * GST_VIDEO_FORMAT_INFO_PLANE:
+ * @info: a #GstVideoFormatInfo
+ * @c: the component index
+ *
+ * Plane number where the given component can be found. A plane may
+ * contain data for multiple components.
+ */
+#define GST_VIDEO_FORMAT_INFO_PLANE(info,c) ((info)->plane[c])
+#define GST_VIDEO_FORMAT_INFO_POFFSET(info,c) ((info)->poffset[c])
+#define GST_VIDEO_FORMAT_INFO_W_SUB(info,c) ((info)->w_sub[c])
+#define GST_VIDEO_FORMAT_INFO_H_SUB(info,c) ((info)->h_sub[c])
+
+/* rounds up */
+#define GST_VIDEO_SUB_SCALE(scale,val) (-((-((gint)(val)))>>(scale)))
+
+#define GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(info,c,w) GST_VIDEO_SUB_SCALE ((info)->w_sub[c],(w))
+#define GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info,c,h) GST_VIDEO_SUB_SCALE ((info)->h_sub[c],(h))
+
+#define GST_VIDEO_FORMAT_INFO_DATA(info,planes,comp) \
+ (((guint8*)(planes)[(info)->plane[comp]]) + (info)->poffset[comp])
+/**
+ * GST_VIDEO_FORMAT_INFO_STRIDE:
+ * @info: a #GstVideoFormatInfo
+ * @strides: an array of strides
+ * @comp: the component index
+ *
+ * Row stride in bytes, that is number of bytes from the first pixel component
+ * of a row to the first pixel component in the next row. This might include
+ * some row padding (memory not actually used for anything, to make sure the
+ * beginning of the next row is aligned in a particular way).
+ */
+#define GST_VIDEO_FORMAT_INFO_STRIDE(info,strides,comp) ((strides)[(info)->plane[comp]])
+#define GST_VIDEO_FORMAT_INFO_OFFSET(info,offsets,comp) \
+ (((offsets)[(info)->plane[comp]]) + (info)->poffset[comp])
+
+#define GST_VIDEO_FORMAT_INFO_TILE_MODE(info) ((info)->tile_mode)
+#define GST_VIDEO_FORMAT_INFO_TILE_WS(info) ((info)->tile_ws)
+#define GST_VIDEO_FORMAT_INFO_TILE_HS(info) ((info)->tile_hs)
+
+GST_VIDEO_API
+void gst_video_format_info_component (const GstVideoFormatInfo *info, guint plane, gint components[GST_VIDEO_MAX_COMPONENTS]);
+
+/* format properties */
+
+GST_VIDEO_API
+GstVideoFormat gst_video_format_from_masks (gint depth, gint bpp, gint endianness,
+ guint red_mask, guint green_mask,
+ guint blue_mask, guint alpha_mask) G_GNUC_CONST;
+
+GST_VIDEO_API
+GstVideoFormat gst_video_format_from_fourcc (guint32 fourcc) G_GNUC_CONST;
+
+GST_VIDEO_API
+GstVideoFormat gst_video_format_from_string (const gchar *format) G_GNUC_CONST;
+
+GST_VIDEO_API
+guint32 gst_video_format_to_fourcc (GstVideoFormat format) G_GNUC_CONST;
+
+GST_VIDEO_API
+const gchar * gst_video_format_to_string (GstVideoFormat format) G_GNUC_CONST;
+
+GST_VIDEO_API
+const GstVideoFormatInfo *
+ gst_video_format_get_info (GstVideoFormat format) G_GNUC_CONST;
+
+GST_VIDEO_API
+gconstpointer gst_video_format_get_palette (GstVideoFormat format, gsize *size);
+
+#define GST_VIDEO_SIZE_RANGE "(int) [ 1, max ]"
+#define GST_VIDEO_FPS_RANGE "(fraction) [ 0, max ]"
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+# define GST_VIDEO_NE(s) G_STRINGIFY(s)"_LE"
+# define GST_VIDEO_OE(s) G_STRINGIFY(s)"_BE"
+#else
+# define GST_VIDEO_NE(s) G_STRINGIFY(s)"_BE"
+# define GST_VIDEO_OE(s) G_STRINGIFY(s)"_LE"
+#endif
+
+/**
+ * GST_VIDEO_FORMATS_ALL:
+ *
+ * List of all video formats, for use in template caps strings.
+ *
+ * Formats are sorted by decreasing "quality", using these criteria by priority:
+ * - number of components
+ * - depth
+ * - subsampling factor of the width
+ * - subsampling factor of the height
+ * - number of planes
+ * - native endianness preferred
+ * - pixel stride
+ * - poffset
+ * - prefer non-complex formats
+ * - prefer YUV formats over RGB ones
+ * - prefer I420 over YV12
+ * - format name
+ */
+#if G_BYTE_ORDER == G_BIG_ENDIAN
+#define GST_VIDEO_FORMATS_ALL "{ ABGR64_BE, BGRA64_BE, AYUV64, ARGB64_BE, ARGB64, " \
+ "RGBA64_BE, ABGR64_LE, BGRA64_LE, ARGB64_LE, RGBA64_LE, GBRA_12BE, GBRA_12LE, Y412_BE, " \
+ "Y412_LE, A444_10BE, GBRA_10BE, A444_10LE, GBRA_10LE, A422_10BE, A422_10LE, " \
+ "A420_10BE, A420_10LE, Y410, RGB10A2_LE, BGR10A2_LE, GBRA, ABGR, VUYA, BGRA, " \
+ "AYUV, ARGB, RGBA, A420, AV12, Y444_16BE, Y444_16LE, v216, P016_BE, P016_LE, Y444_12BE, " \
+ "GBR_12BE, Y444_12LE, GBR_12LE, I422_12BE, I422_12LE, Y212_BE, Y212_LE, I420_12BE, " \
+ "I420_12LE, P012_BE, P012_LE, Y444_10BE, GBR_10BE, Y444_10LE, GBR_10LE, r210, " \
+ "I422_10BE, I422_10LE, NV16_10LE32, Y210, v210, UYVP, I420_10BE, I420_10LE, " \
+ "P010_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, " \
+ "xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, " \
+ "YV12, NV21, NV12, NV12_64Z32, NV12_4L4, NV12_32L32, Y41B, IYU1, YVU9, YUV9, RGB16, " \
+ "BGR16, RGB15, BGR15, RGB8P, GRAY16_BE, GRAY16_LE, GRAY10_LE32, GRAY8 }"
+#elif G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define GST_VIDEO_FORMATS_ALL "{ ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, " \
+ "RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, " \
+ "Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, " \
+ "A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, " \
+ "AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, " \
+ "GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, " \
+ "I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, " \
+ "I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, " \
+ "P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, " \
+ "xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, " \
+ "YV12, NV21, NV12, NV12_64Z32, NV12_4L4, NV12_32L32, Y41B, IYU1, YVU9, YUV9, RGB16, " \
+ "BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }"
+#endif
+
+GST_VIDEO_API
+const GstVideoFormat * gst_video_formats_raw (guint * len);
+
+/**
+ * GST_VIDEO_CAPS_MAKE:
+ * @format: string format that describes the pixel layout, as string
+ * (e.g. "I420", "RGB", "YV12", "YUY2", "AYUV", etc.)
+ *
+ * Generic caps string for video, for use in pad templates.
+ */
+#define GST_VIDEO_CAPS_MAKE(format) \
+ "video/x-raw, " \
+ "format = (string) " format ", " \
+ "width = " GST_VIDEO_SIZE_RANGE ", " \
+ "height = " GST_VIDEO_SIZE_RANGE ", " \
+ "framerate = " GST_VIDEO_FPS_RANGE
+
+/**
+ * GST_VIDEO_CAPS_MAKE_WITH_FEATURES:
+ * @format: string format that describes the pixel layout, as string
+ * (e.g. "I420", "RGB", "YV12", "YUY2", "AYUV", etc.)
+ * @features: Requires caps features as a string, e.g.
+ * "memory:SystemMemory".
+ *
+ * Generic caps string for video, for use in pad templates.
+ *
+ * Since: 1.2
+ */
+#define GST_VIDEO_CAPS_MAKE_WITH_FEATURES(features,format) \
+ "video/x-raw(" features "), " \
+ "format = (string) " format ", " \
+ "width = " GST_VIDEO_SIZE_RANGE ", " \
+ "height = " GST_VIDEO_SIZE_RANGE ", " \
+ "framerate = " GST_VIDEO_FPS_RANGE
+
+GST_VIDEO_API
+GstCaps * gst_video_make_raw_caps (const GstVideoFormat formats[], guint len);
+
+GST_VIDEO_API
+GstCaps * gst_video_make_raw_caps_with_features (const GstVideoFormat formats[], guint len,
+ GstCapsFeatures * features);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_FORMAT_H__ */
diff --git a/include/gst/video/video-frame.h b/include/gst/video/video-frame.h
new file mode 100644
index 0000000000..f3925c47de
--- /dev/null
+++ b/include/gst/video/video-frame.h
@@ -0,0 +1,254 @@
+/* GStreamer
+ * Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_FRAME_H__
+#define __GST_VIDEO_FRAME_H__
+
+#include <gst/video/video-enumtypes.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstVideoFrame GstVideoFrame;
+
+/**
+ * GstVideoFrameFlags:
+ * @GST_VIDEO_FRAME_FLAG_NONE: no flags
+ * @GST_VIDEO_FRAME_FLAG_INTERLACED: The video frame is interlaced. In mixed
+ * interlace-mode, this flag specifies if the frame is interlaced or
+ * progressive.
+ * @GST_VIDEO_FRAME_FLAG_TFF: The video frame has the top field first
+ * @GST_VIDEO_FRAME_FLAG_RFF: The video frame has the repeat flag
+ * @GST_VIDEO_FRAME_FLAG_ONEFIELD: The video frame has one field
+ * @GST_VIDEO_FRAME_FLAG_MULTIPLE_VIEW: The video contains one or
+ * more non-mono views
+ * @GST_VIDEO_FRAME_FLAG_FIRST_IN_BUNDLE: The video frame is the first
+ * in a set of corresponding views provided as sequential frames.
+ * @GST_VIDEO_FRAME_FLAG_TOP_FIELD: The video frame has the top field only. This
+ * is the same as GST_VIDEO_FRAME_FLAG_TFF | GST_VIDEO_FRAME_FLAG_ONEFIELD
+ * (Since: 1.16).
+ * @GST_VIDEO_FRAME_FLAG_BOTTOM_FIELD: The video frame has the bottom field
+ * only. This is the same as GST_VIDEO_FRAME_FLAG_ONEFIELD
+ * (GST_VIDEO_FRAME_FLAG_TFF flag unset) (Since: 1.16).
+ *
+ * Extra video frame flags
+ */
+typedef enum {
+ GST_VIDEO_FRAME_FLAG_NONE = 0,
+ GST_VIDEO_FRAME_FLAG_INTERLACED = (1 << 0),
+ GST_VIDEO_FRAME_FLAG_TFF = (1 << 1),
+ GST_VIDEO_FRAME_FLAG_RFF = (1 << 2),
+ GST_VIDEO_FRAME_FLAG_ONEFIELD = (1 << 3),
+ GST_VIDEO_FRAME_FLAG_MULTIPLE_VIEW = (1 << 4),
+ GST_VIDEO_FRAME_FLAG_FIRST_IN_BUNDLE = (1 << 5),
+ GST_VIDEO_FRAME_FLAG_TOP_FIELD = GST_VIDEO_FRAME_FLAG_TFF |
+ GST_VIDEO_FRAME_FLAG_ONEFIELD,
+ GST_VIDEO_FRAME_FLAG_BOTTOM_FIELD = GST_VIDEO_FRAME_FLAG_ONEFIELD,
+} GstVideoFrameFlags;
+
+/* circular dependency, need to include this after defining the enums */
+#include <gst/video/video-format.h>
+#include <gst/video/video-info.h>
+
+/**
+ * GstVideoFrame:
+ * @info: the #GstVideoInfo
+ * @flags: #GstVideoFrameFlags for the frame
+ * @buffer: the mapped buffer
+ * @meta: pointer to metadata if any
+ * @id: id of the mapped frame. the id can for example be used to
+ * identify the frame in case of multiview video.
+ * @data: pointers to the plane data
+ * @map: mappings of the planes
+ *
+ * A video frame obtained from gst_video_frame_map()
+ */
+struct _GstVideoFrame {
+ GstVideoInfo info;
+ GstVideoFrameFlags flags;
+
+ GstBuffer *buffer;
+ gpointer meta;
+ gint id;
+
+ gpointer data[GST_VIDEO_MAX_PLANES];
+ GstMapInfo map[GST_VIDEO_MAX_PLANES];
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+GST_VIDEO_API
+gboolean gst_video_frame_map (GstVideoFrame *frame, const GstVideoInfo *info,
+ GstBuffer *buffer, GstMapFlags flags);
+
+GST_VIDEO_API
+gboolean gst_video_frame_map_id (GstVideoFrame *frame, const GstVideoInfo *info,
+ GstBuffer *buffer, gint id, GstMapFlags flags);
+
+GST_VIDEO_API
+void gst_video_frame_unmap (GstVideoFrame *frame);
+
+GST_VIDEO_API
+gboolean gst_video_frame_copy (GstVideoFrame *dest, const GstVideoFrame *src);
+
+GST_VIDEO_API
+gboolean gst_video_frame_copy_plane (GstVideoFrame *dest, const GstVideoFrame *src,
+ guint plane);
+
+/* general info */
+#define GST_VIDEO_FRAME_FORMAT(f) (GST_VIDEO_INFO_FORMAT(&(f)->info))
+#define GST_VIDEO_FRAME_WIDTH(f) (GST_VIDEO_INFO_WIDTH(&(f)->info))
+#define GST_VIDEO_FRAME_HEIGHT(f) (GST_VIDEO_INFO_HEIGHT(&(f)->info))
+#define GST_VIDEO_FRAME_SIZE(f) (GST_VIDEO_INFO_SIZE(&(f)->info))
+
+/* flags */
+#define GST_VIDEO_FRAME_FLAGS(f) ((f)->flags)
+#define GST_VIDEO_FRAME_FLAG_IS_SET(f,fl) ((GST_VIDEO_FRAME_FLAGS(f) & (fl)) == (fl))
+#define GST_VIDEO_FRAME_IS_INTERLACED(f) (GST_VIDEO_FRAME_FLAG_IS_SET(f, GST_VIDEO_FRAME_FLAG_INTERLACED))
+#define GST_VIDEO_FRAME_IS_TFF(f) (GST_VIDEO_FRAME_FLAG_IS_SET(f, GST_VIDEO_FRAME_FLAG_TFF))
+#define GST_VIDEO_FRAME_IS_RFF(f) (GST_VIDEO_FRAME_FLAG_IS_SET(f, GST_VIDEO_FRAME_FLAG_RFF))
+#define GST_VIDEO_FRAME_IS_ONEFIELD(f) (GST_VIDEO_FRAME_FLAG_IS_SET(f, GST_VIDEO_FRAME_FLAG_ONEFIELD))
+#define GST_VIDEO_FRAME_IS_TOP_FIELD(f) (GST_VIDEO_FRAME_FLAG_IS_SET(f, GST_VIDEO_FRAME_FLAG_TOP_FIELD))
+
+/* GST_VIDEO_FRAME_FLAG_BOTTOM_FIELD is a subset of
+ * GST_VIDEO_FRAME_FLAG_TOP_FIELD so needs to be checked accordingly. */
+#define _GST_VIDEO_FRAME_FLAG_FIELD_MASK GST_VIDEO_FRAME_FLAG_TOP_FIELD
+
+#define GST_VIDEO_FRAME_IS_BOTTOM_FIELD(f) (((f)->flags & _GST_VIDEO_FRAME_FLAG_FIELD_MASK) == GST_VIDEO_FRAME_FLAG_BOTTOM_FIELD)
+
+/* dealing with planes */
+#define GST_VIDEO_FRAME_N_PLANES(f) (GST_VIDEO_INFO_N_PLANES(&(f)->info))
+#define GST_VIDEO_FRAME_PLANE_DATA(f,p) ((f)->data[p])
+#define GST_VIDEO_FRAME_PLANE_OFFSET(f,p) (GST_VIDEO_INFO_PLANE_OFFSET(&(f)->info,(p)))
+#define GST_VIDEO_FRAME_PLANE_STRIDE(f,p) (GST_VIDEO_INFO_PLANE_STRIDE(&(f)->info,(p)))
+
+/* dealing with components */
+#define GST_VIDEO_FRAME_N_COMPONENTS(f) GST_VIDEO_INFO_N_COMPONENTS(&(f)->info)
+#define GST_VIDEO_FRAME_COMP_DEPTH(f,c) GST_VIDEO_INFO_COMP_DEPTH(&(f)->info,(c))
+#define GST_VIDEO_FRAME_COMP_DATA(f,c) GST_VIDEO_INFO_COMP_DATA(&(f)->info,(f)->data,(c))
+#define GST_VIDEO_FRAME_COMP_STRIDE(f,c) GST_VIDEO_INFO_COMP_STRIDE(&(f)->info,(c))
+#define GST_VIDEO_FRAME_COMP_OFFSET(f,c) GST_VIDEO_INFO_COMP_OFFSET(&(f)->info,(c))
+#define GST_VIDEO_FRAME_COMP_WIDTH(f,c) GST_VIDEO_INFO_COMP_WIDTH(&(f)->info,(c))
+#define GST_VIDEO_FRAME_COMP_HEIGHT(f,c) GST_VIDEO_INFO_COMP_HEIGHT(&(f)->info,(c))
+#define GST_VIDEO_FRAME_COMP_PLANE(f,c) GST_VIDEO_INFO_COMP_PLANE(&(f)->info,(c))
+#define GST_VIDEO_FRAME_COMP_PSTRIDE(f,c) GST_VIDEO_INFO_COMP_PSTRIDE(&(f)->info,(c))
+#define GST_VIDEO_FRAME_COMP_POFFSET(f,c) GST_VIDEO_INFO_COMP_POFFSET(&(f)->info,(c))
+
+/* buffer flags */
+
+/**
+ * GstVideoBufferFlags:
+ * @GST_VIDEO_BUFFER_FLAG_INTERLACED: If the #GstBuffer is interlaced. In mixed
+ * interlace-mode, this flags specifies if the frame is
+ * interlaced or progressive.
+ * @GST_VIDEO_BUFFER_FLAG_TFF: If the #GstBuffer is interlaced, then the first field
+ * in the video frame is the top field. If unset, the
+ * bottom field is first.
+ * @GST_VIDEO_BUFFER_FLAG_RFF: If the #GstBuffer is interlaced, then the first field
+ * (as defined by the %GST_VIDEO_BUFFER_FLAG_TFF flag setting)
+ * is repeated.
+ * @GST_VIDEO_BUFFER_FLAG_ONEFIELD: If the #GstBuffer is interlaced, then only the
+ * first field (as defined by the %GST_VIDEO_BUFFER_FLAG_TFF
+ * flag setting) is to be displayed (Since: 1.16).
+ * @GST_VIDEO_BUFFER_FLAG_MULTIPLE_VIEW: The #GstBuffer contains one or more specific views,
+ * such as left or right eye view. This flags is set on
+ * any buffer that contains non-mono content - even for
+ * streams that contain only a single viewpoint. In mixed
+ * mono / non-mono streams, the absence of the flag marks
+ * mono buffers.
+ * @GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE: When conveying stereo/multiview content with
+ * frame-by-frame methods, this flag marks the first buffer
+ * in a bundle of frames that belong together.
+ * @GST_VIDEO_BUFFER_FLAG_TOP_FIELD: The video frame has the top field only. This is the
+ * same as GST_VIDEO_BUFFER_FLAG_TFF |
+ * GST_VIDEO_BUFFER_FLAG_ONEFIELD (Since: 1.16).
+ * Use GST_VIDEO_BUFFER_IS_TOP_FIELD() to check for this flag.
+ * @GST_VIDEO_BUFFER_FLAG_BOTTOM_FIELD: The video frame has the bottom field only. This is
+ * the same as GST_VIDEO_BUFFER_FLAG_ONEFIELD
+ * (GST_VIDEO_BUFFER_FLAG_TFF flag unset) (Since: 1.16).
+ * Use GST_VIDEO_BUFFER_IS_BOTTOM_FIELD() to check for this flag.
+ * @GST_VIDEO_BUFFER_FLAG_MARKER: The #GstBuffer contains the end of a video field or frame
+ * boundary such as the last subframe or packet (Since: 1.18).
+ * @GST_VIDEO_BUFFER_FLAG_LAST: Offset to define more flags
+ *
+ * Additional video buffer flags. These flags can potentially be used on any
+ * buffers carrying closed caption data, or video data - even encoded data.
+ *
+ * Note that these are only valid for #GstCaps of type: video/... and caption/...
+ * They can conflict with other extended buffer flags.
+ */
+typedef enum {
+ GST_VIDEO_BUFFER_FLAG_INTERLACED = (GST_BUFFER_FLAG_LAST << 0),
+ GST_VIDEO_BUFFER_FLAG_TFF = (GST_BUFFER_FLAG_LAST << 1),
+ GST_VIDEO_BUFFER_FLAG_RFF = (GST_BUFFER_FLAG_LAST << 2),
+ GST_VIDEO_BUFFER_FLAG_ONEFIELD = (GST_BUFFER_FLAG_LAST << 3),
+
+ GST_VIDEO_BUFFER_FLAG_MULTIPLE_VIEW = (GST_BUFFER_FLAG_LAST << 4),
+ GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE = (GST_BUFFER_FLAG_LAST << 5),
+
+ GST_VIDEO_BUFFER_FLAG_TOP_FIELD = GST_VIDEO_BUFFER_FLAG_TFF |
+ GST_VIDEO_BUFFER_FLAG_ONEFIELD,
+ GST_VIDEO_BUFFER_FLAG_BOTTOM_FIELD = GST_VIDEO_BUFFER_FLAG_ONEFIELD,
+
+ GST_VIDEO_BUFFER_FLAG_MARKER = GST_BUFFER_FLAG_MARKER,
+
+ GST_VIDEO_BUFFER_FLAG_LAST = (GST_BUFFER_FLAG_LAST << 8)
+} GstVideoBufferFlags;
+
+/* GST_VIDEO_BUFFER_FLAG_TOP_FIELD is a subset of
+ * GST_VIDEO_BUFFER_FLAG_BOTTOM_FIELD so needs to be checked accordingly. */
+#define _GST_VIDEO_BUFFER_FLAG_FIELD_MASK GST_VIDEO_BUFFER_FLAG_TOP_FIELD
+
+/**
+ * GST_VIDEO_BUFFER_IS_TOP_FIELD:
+ * @buf: a #GstBuffer
+ *
+ * Check if GST_VIDEO_BUFFER_FLAG_TOP_FIELD is set on @buf (Since: 1.18).
+ */
+#define GST_VIDEO_BUFFER_IS_TOP_FIELD(buf) ((GST_BUFFER_FLAGS (buf) & _GST_VIDEO_BUFFER_FLAG_FIELD_MASK) == GST_VIDEO_BUFFER_FLAG_TOP_FIELD)
+
+/**
+ * GST_VIDEO_BUFFER_IS_BOTTOM_FIELD:
+ * @buf: a #GstBuffer
+ *
+ * Check if GST_VIDEO_BUFFER_FLAG_BOTTOM_FIELD is set on @buf (Since: 1.18).
+ */
+#define GST_VIDEO_BUFFER_IS_BOTTOM_FIELD(buf) ((GST_BUFFER_FLAGS (buf) & _GST_VIDEO_BUFFER_FLAG_FIELD_MASK) == GST_VIDEO_BUFFER_FLAG_BOTTOM_FIELD)
+
+/**
+ * GstVideoFrameMapFlags:
+ * @GST_VIDEO_FRAME_MAP_FLAG_NO_REF: Don't take another reference of the buffer and store it in
+ * the GstVideoFrame. This makes sure that the buffer stays
+ * writable while the frame is mapped, but requires that the
+ * buffer reference stays valid until the frame is unmapped again.
+ * @GST_VIDEO_FRAME_MAP_FLAG_LAST: Offset to define more flags
+ *
+ * Additional mapping flags for gst_video_frame_map().
+ *
+ * Since: 1.6
+ */
+typedef enum {
+ GST_VIDEO_FRAME_MAP_FLAG_NO_REF = (GST_MAP_FLAG_LAST << 0),
+ GST_VIDEO_FRAME_MAP_FLAG_LAST = (GST_MAP_FLAG_LAST << 8)
+ /* 8 more flags possible afterwards */
+} GstVideoFrameMapFlags;
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_FRAME_H__ */
diff --git a/include/gst/video/video-hdr.h b/include/gst/video/video-hdr.h
new file mode 100644
index 0000000000..b6ce360233
--- /dev/null
+++ b/include/gst/video/video-hdr.h
@@ -0,0 +1,143 @@
+/* GStreamer
+ * Copyright (C) <2018-2019> Seungha Yang <seungha.yang@navercorp.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_HDR_H__
+#define __GST_VIDEO_HDR_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstVideoMasteringDisplayInfoCoordinates GstVideoMasteringDisplayInfoCoordinates;
+typedef struct _GstVideoMasteringDisplayInfo GstVideoMasteringDisplayInfo;
+typedef struct _GstVideoContentLightLevel GstVideoContentLightLevel;
+
+/**
+ * GstVideoMasteringDisplayInfoCoordinates:
+ * @x: the x coordinate of CIE 1931 color space in unit of 0.00002.
+ * @y: the y coordinate of CIE 1931 color space in unit of 0.00002.
+ *
+ * Used to represent display_primaries and white_point of
+ * #GstVideoMasteringDisplayInfo struct. See #GstVideoMasteringDisplayInfo
+ *
+ * Since: 1.18
+ */
+struct _GstVideoMasteringDisplayInfoCoordinates
+{
+ guint16 x;
+ guint16 y;
+};
+
+/**
+ * GstVideoMasteringDisplayInfo:
+ * @display_primaries: the xy coordinates of primaries in the CIE 1931 color space.
+ * the index 0 contains red, 1 is for green and 2 is for blue.
+ * each value is normalized to 50000 (meaning that in unit of 0.00002)
+ * @white_point: the xy coordinates of white point in the CIE 1931 color space.
+ * each value is normalized to 50000 (meaning that in unit of 0.00002)
+ * @max_display_mastering_luminance: the maximum value of display luminance
+ * in unit of 0.0001 candelas per square metre (cd/m^2 and nit)
+ * @min_display_mastering_luminance: the minimum value of display luminance
+ * in unit of 0.0001 candelas per square metre (cd/m^2 and nit)
+ *
+ * Mastering display color volume information defined by SMPTE ST 2086
+ * (a.k.a static HDR metadata).
+ *
+ * Since: 1.18
+ */
+struct _GstVideoMasteringDisplayInfo
+{
+ GstVideoMasteringDisplayInfoCoordinates display_primaries[3];
+ GstVideoMasteringDisplayInfoCoordinates white_point;
+ guint32 max_display_mastering_luminance;
+ guint32 min_display_mastering_luminance;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+GST_VIDEO_API
+void gst_video_mastering_display_info_init (GstVideoMasteringDisplayInfo * minfo);
+
+GST_VIDEO_API
+gboolean gst_video_mastering_display_info_from_string (GstVideoMasteringDisplayInfo * minfo,
+ const gchar * mastering);
+
+GST_VIDEO_API
+gchar * gst_video_mastering_display_info_to_string (const GstVideoMasteringDisplayInfo * minfo);
+
+GST_VIDEO_API
+gboolean gst_video_mastering_display_info_is_equal (const GstVideoMasteringDisplayInfo * minfo,
+ const GstVideoMasteringDisplayInfo * other);
+
+GST_VIDEO_API
+gboolean gst_video_mastering_display_info_from_caps (GstVideoMasteringDisplayInfo * minfo,
+ const GstCaps * caps);
+
+GST_VIDEO_API
+gboolean gst_video_mastering_display_info_add_to_caps (const GstVideoMasteringDisplayInfo * minfo,
+ GstCaps * caps);
+
+/**
+ * GstVideoContentLightLevel:
+ * @max_content_light_level: the maximum content light level
+ * (abbreviated to MaxCLL) in candelas per square meter (cd/m^2 and nit)
+ * @max_frame_average_light_level: the maximum frame average light level
+ * (abbreviated to MaxFLL) in candelas per square meter (cd/m^2 and nit)
+ *
+ * Content light level information specified in CEA-861.3, Appendix A.
+ *
+ * Since: 1.18
+ */
+struct _GstVideoContentLightLevel
+{
+ guint16 max_content_light_level;
+ guint16 max_frame_average_light_level;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+GST_VIDEO_API
+void gst_video_content_light_level_init (GstVideoContentLightLevel * linfo);
+
+GST_VIDEO_API
+gboolean gst_video_content_light_level_from_string (GstVideoContentLightLevel * linfo,
+ const gchar * level);
+
+GST_VIDEO_API
+gchar * gst_video_content_light_level_to_string (const GstVideoContentLightLevel * linfo);
+
+GST_VIDEO_API
+gboolean gst_video_content_light_level_is_equal (const GstVideoContentLightLevel * linfo,
+ const GstVideoContentLightLevel * other);
+
+GST_VIDEO_API
+gboolean gst_video_content_light_level_from_caps (GstVideoContentLightLevel * linfo,
+ const GstCaps * caps);
+
+GST_VIDEO_API
+gboolean gst_video_content_light_level_add_to_caps (const GstVideoContentLightLevel * linfo,
+ GstCaps * caps);
+
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_HDR_H__ */
diff --git a/include/gst/video/video-info.h b/include/gst/video/video-info.h
new file mode 100644
index 0000000000..3de617d954
--- /dev/null
+++ b/include/gst/video/video-info.h
@@ -0,0 +1,484 @@
+/* GStreamer
+ * Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_INFO_H__
+#define __GST_VIDEO_INFO_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-format.h>
+#include <gst/video/video-color.h>
+
+G_BEGIN_DECLS
+
+#include <gst/video/video-enumtypes.h>
+
+typedef struct _GstVideoInfo GstVideoInfo;
+
+/**
+ * GST_CAPS_FEATURE_FORMAT_INTERLACED:
+ *
+ * Name of the caps feature indicating that the stream is interlaced.
+ *
+ * Currently it is only used for video with 'interlace-mode=alternate'
+ * to ensure backwards compatibility for this new mode.
+ * In this mode each buffer carries a single field of interlaced video.
+ * @GST_VIDEO_BUFFER_FLAG_TOP_FIELD and @GST_VIDEO_BUFFER_FLAG_BOTTOM_FIELD
+ * indicate whether the buffer carries a top or bottom field. The order of
+ * buffers/fields in the stream and the timestamps on the buffers indicate the
+ * temporal order of the fields.
+ * Top and bottom fields are expected to alternate in this mode.
+ * The frame rate in the caps still signals the frame rate, so the notional field
+ * rate will be twice the frame rate from the caps
+ * (see @GST_VIDEO_INFO_FIELD_RATE_N).
+ *
+ * Since: 1.16.
+ */
+#define GST_CAPS_FEATURE_FORMAT_INTERLACED "format:Interlaced"
+
+/**
+ * GstVideoInterlaceMode:
+ * @GST_VIDEO_INTERLACE_MODE_PROGRESSIVE: all frames are progressive
+ * @GST_VIDEO_INTERLACE_MODE_INTERLEAVED: 2 fields are interleaved in one video
+ * frame. Extra buffer flags describe the field order.
+ * @GST_VIDEO_INTERLACE_MODE_MIXED: frames contains both interlaced and
+ * progressive video, the buffer flags describe the frame and fields.
+ * @GST_VIDEO_INTERLACE_MODE_FIELDS: 2 fields are stored in one buffer, use the
+ * frame ID to get access to the required field. For multiview (the
+ * 'views' property > 1) the fields of view N can be found at frame ID
+ * (N * 2) and (N * 2) + 1.
+ * Each field has only half the amount of lines as noted in the
+ * height property. This mode requires multiple GstVideoMeta metadata
+ * to describe the fields.
+ * @GST_VIDEO_INTERLACE_MODE_ALTERNATE: 1 field is stored in one buffer,
+ * @GST_VIDEO_BUFFER_FLAG_TF or @GST_VIDEO_BUFFER_FLAG_BF indicates if
+ * the buffer is carrying the top or bottom field, respectively. The top and
+ * bottom buffers must alternate in the pipeline, with this mode
+ * (Since: 1.16).
+ *
+ * The possible values of the #GstVideoInterlaceMode describing the interlace
+ * mode of the stream.
+ */
+typedef enum {
+ GST_VIDEO_INTERLACE_MODE_PROGRESSIVE = 0,
+ GST_VIDEO_INTERLACE_MODE_INTERLEAVED,
+ GST_VIDEO_INTERLACE_MODE_MIXED,
+ GST_VIDEO_INTERLACE_MODE_FIELDS,
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE,
+} GstVideoInterlaceMode;
+
+GST_VIDEO_API
+const gchar * gst_video_interlace_mode_to_string (GstVideoInterlaceMode mode);
+
+GST_VIDEO_API
+GstVideoInterlaceMode gst_video_interlace_mode_from_string (const gchar * mode);
+
+/**
+ * GstVideoMultiviewMode:
+ * @GST_VIDEO_MULTIVIEW_MODE_NONE: A special value indicating
+ * no multiview information. Used in GstVideoInfo and other places to
+ * indicate that no specific multiview handling has been requested or
+ * provided. This value is never carried on caps.
+ * @GST_VIDEO_MULTIVIEW_MODE_MONO: All frames are monoscopic.
+ * @GST_VIDEO_MULTIVIEW_MODE_LEFT: All frames represent a left-eye view.
+ * @GST_VIDEO_MULTIVIEW_MODE_RIGHT: All frames represent a right-eye view.
+ * @GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE: Left and right eye views are
+ * provided in the left and right half of the frame respectively.
+ * @GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE_QUINCUNX: Left and right eye
+ * views are provided in the left and right half of the frame, but
+ * have been sampled using quincunx method, with half-pixel offset
+ * between the 2 views.
+ * @GST_VIDEO_MULTIVIEW_MODE_COLUMN_INTERLEAVED: Alternating vertical
+ * columns of pixels represent the left and right eye view respectively.
+ * @GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED: Alternating horizontal
+ * rows of pixels represent the left and right eye view respectively.
+ * @GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM: The top half of the frame
+ * contains the left eye, and the bottom half the right eye.
+ * @GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD: Pixels are arranged with
+ * alternating pixels representing left and right eye views in a
+ * checkerboard fashion.
+ * @GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME: Left and right eye views
+ * are provided in separate frames alternately.
+ * @GST_VIDEO_MULTIVIEW_MODE_MULTIVIEW_FRAME_BY_FRAME: Multiple
+ * independent views are provided in separate frames in sequence.
+ * This method only applies to raw video buffers at the moment.
+ * Specific view identification is via the `GstVideoMultiviewMeta`
+ * and #GstVideoMeta(s) on raw video buffers.
+ * @GST_VIDEO_MULTIVIEW_MODE_SEPARATED: Multiple views are
+ * provided as separate #GstMemory framebuffers attached to each
+ * #GstBuffer, described by the `GstVideoMultiviewMeta`
+ * and #GstVideoMeta(s)
+ *
+ * All possible stereoscopic 3D and multiview representations.
+ * In conjunction with #GstVideoMultiviewFlags, describes how
+ * multiview content is being transported in the stream.
+ */
+typedef enum {
+ GST_VIDEO_MULTIVIEW_MODE_NONE = -1,
+ GST_VIDEO_MULTIVIEW_MODE_MONO = 0,
+ /* Single view modes */
+ GST_VIDEO_MULTIVIEW_MODE_LEFT,
+ GST_VIDEO_MULTIVIEW_MODE_RIGHT,
+ /* Stereo view modes */
+ GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE,
+ GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE_QUINCUNX,
+ GST_VIDEO_MULTIVIEW_MODE_COLUMN_INTERLEAVED,
+ GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED,
+ GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM,
+ GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD,
+ /* Padding for new frame packing modes */
+
+ GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME = 32,
+ /* Multivew mode(s) */
+ GST_VIDEO_MULTIVIEW_MODE_MULTIVIEW_FRAME_BY_FRAME,
+ GST_VIDEO_MULTIVIEW_MODE_SEPARATED
+ /* future expansion for annotated modes */
+} GstVideoMultiviewMode;
+
+/**
+ * GstVideoMultiviewFramePacking:
+ * @GST_VIDEO_MULTIVIEW_FRAME_PACKING_NONE: A special value indicating
+ * no frame packing info.
+ * @GST_VIDEO_MULTIVIEW_FRAME_PACKING_MONO: All frames are monoscopic.
+ * @GST_VIDEO_MULTIVIEW_FRAME_PACKING_LEFT: All frames represent a left-eye view.
+ * @GST_VIDEO_MULTIVIEW_FRAME_PACKING_RIGHT: All frames represent a right-eye view.
+ * @GST_VIDEO_MULTIVIEW_FRAME_PACKING_SIDE_BY_SIDE: Left and right eye views are
+ * provided in the left and right half of the frame respectively.
+ * @GST_VIDEO_MULTIVIEW_FRAME_PACKING_SIDE_BY_SIDE_QUINCUNX: Left and right eye
+ * views are provided in the left and right half of the frame, but
+ * have been sampled using quincunx method, with half-pixel offset
+ * between the 2 views.
+ * @GST_VIDEO_MULTIVIEW_FRAME_PACKING_COLUMN_INTERLEAVED: Alternating vertical
+ * columns of pixels represent the left and right eye view respectively.
+ * @GST_VIDEO_MULTIVIEW_FRAME_PACKING_ROW_INTERLEAVED: Alternating horizontal
+ * rows of pixels represent the left and right eye view respectively.
+ * @GST_VIDEO_MULTIVIEW_FRAME_PACKING_TOP_BOTTOM: The top half of the frame
+ * contains the left eye, and the bottom half the right eye.
+ * @GST_VIDEO_MULTIVIEW_FRAME_PACKING_CHECKERBOARD: Pixels are arranged with
+ * alternating pixels representing left and right eye views in a
+ * checkerboard fashion.
+ *
+ * #GstVideoMultiviewFramePacking represents the subset of #GstVideoMultiviewMode
+ * values that can be applied to any video frame without needing extra metadata.
+ * It can be used by elements that provide a property to override the
+ * multiview interpretation of a video stream when the video doesn't contain
+ * any markers.
+ *
+ * This enum is used (for example) on playbin, to re-interpret a played
+ * video stream as a stereoscopic video. The individual enum values are
+ * equivalent to and have the same value as the matching #GstVideoMultiviewMode.
+ *
+ */
+typedef enum {
+ GST_VIDEO_MULTIVIEW_FRAME_PACKING_NONE = GST_VIDEO_MULTIVIEW_MODE_NONE,
+ GST_VIDEO_MULTIVIEW_FRAME_PACKING_MONO = GST_VIDEO_MULTIVIEW_MODE_MONO,
+ GST_VIDEO_MULTIVIEW_FRAME_PACKING_LEFT = GST_VIDEO_MULTIVIEW_MODE_LEFT,
+ GST_VIDEO_MULTIVIEW_FRAME_PACKING_RIGHT = GST_VIDEO_MULTIVIEW_MODE_RIGHT,
+ GST_VIDEO_MULTIVIEW_FRAME_PACKING_SIDE_BY_SIDE = GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE,
+ GST_VIDEO_MULTIVIEW_FRAME_PACKING_SIDE_BY_SIDE_QUINCUNX = GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE_QUINCUNX,
+ GST_VIDEO_MULTIVIEW_FRAME_PACKING_COLUMN_INTERLEAVED = GST_VIDEO_MULTIVIEW_MODE_COLUMN_INTERLEAVED,
+ GST_VIDEO_MULTIVIEW_FRAME_PACKING_ROW_INTERLEAVED = GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED,
+ GST_VIDEO_MULTIVIEW_FRAME_PACKING_TOP_BOTTOM = GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM,
+ GST_VIDEO_MULTIVIEW_FRAME_PACKING_CHECKERBOARD = GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD
+} GstVideoMultiviewFramePacking;
+
+#define GST_VIDEO_MULTIVIEW_MAX_FRAME_PACKING GST_VIDEO_MULTIVIEW_FRAME_PACKING_CHECKERBOARD
+
+/**
+ * GstVideoMultiviewFlags:
+ * @GST_VIDEO_MULTIVIEW_FLAGS_NONE: No flags
+ * @GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST: For stereo streams, the
+ * normal arrangement of left and right views is reversed.
+ * @GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLIPPED: The left view is vertically
+ * mirrored.
+ * @GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLOPPED: The left view is horizontally
+ * mirrored.
+ * @GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLIPPED: The right view is
+ * vertically mirrored.
+ * @GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLOPPED: The right view is
+ * horizontally mirrored.
+ * @GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT: For frame-packed
+ * multiview modes, indicates that the individual
+ * views have been encoded with half the true width or height
+ * and should be scaled back up for display. This flag
+ * is used for overriding input layout interpretation
+ * by adjusting pixel-aspect-ratio.
+ * For side-by-side, column interleaved or checkerboard packings, the
+ * pixel width will be doubled. For row interleaved and top-bottom
+ * encodings, pixel height will be doubled.
+ * @GST_VIDEO_MULTIVIEW_FLAGS_MIXED_MONO: The video stream contains both
+ * mono and multiview portions, signalled on each buffer by the
+ * absence or presence of the @GST_VIDEO_BUFFER_FLAG_MULTIPLE_VIEW
+ * buffer flag.
+ *
+ * GstVideoMultiviewFlags are used to indicate extra properties of a
+ * stereo/multiview stream beyond the frame layout and buffer mapping
+ * that is conveyed in the #GstVideoMultiviewMode.
+ */
+typedef enum {
+ GST_VIDEO_MULTIVIEW_FLAGS_NONE = 0,
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST = (1 << 0),
+ GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLIPPED = (1 << 1),
+ GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLOPPED = (1 << 2),
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLIPPED = (1 << 3),
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLOPPED = (1 << 4),
+ GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT = (1 << 14),
+ GST_VIDEO_MULTIVIEW_FLAGS_MIXED_MONO = (1 << 15)
+} GstVideoMultiviewFlags;
+
+/**
+ * GstVideoFlags:
+ * @GST_VIDEO_FLAG_NONE: no flags
+ * @GST_VIDEO_FLAG_VARIABLE_FPS: a variable fps is selected, fps_n and fps_d
+ * denote the maximum fps of the video
+ * @GST_VIDEO_FLAG_PREMULTIPLIED_ALPHA: Each color has been scaled by the alpha
+ * value.
+ *
+ * Extra video flags
+ */
+typedef enum {
+ GST_VIDEO_FLAG_NONE = 0,
+ GST_VIDEO_FLAG_VARIABLE_FPS = (1 << 0),
+ GST_VIDEO_FLAG_PREMULTIPLIED_ALPHA = (1 << 1)
+} GstVideoFlags;
+
+/**
+ * GstVideoFieldOrder:
+ * @GST_VIDEO_FIELD_ORDER_UNKNOWN: unknown field order for interlaced content.
+ * The actual field order is signalled via buffer flags.
+ * @GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST: top field is first
+ * @GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST: bottom field is first
+ *
+ * Field order of interlaced content. This is only valid for
+ * interlace-mode=interleaved and not interlace-mode=mixed. In the case of
+ * mixed or GST_VIDEO_FIELD_ORDER_UNKOWN, the field order is signalled via
+ * buffer flags.
+ *
+ * Since: 1.12
+ */
+typedef enum {
+ GST_VIDEO_FIELD_ORDER_UNKNOWN = 0,
+ GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST = 1,
+ GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST = 2,
+} GstVideoFieldOrder;
+
+GST_VIDEO_API
+const gchar * gst_video_field_order_to_string (GstVideoFieldOrder order);
+
+GST_VIDEO_API
+GstVideoFieldOrder gst_video_field_order_from_string (const gchar * order);
+
+/**
+ * GstVideoInfo:
+ * @finfo: the format info of the video
+ * @interlace_mode: the interlace mode
+ * @flags: additional video flags
+ * @width: the width of the video
+ * @height: the height of the video
+ * @views: the number of views for multiview video
+ * @size: the default size of one frame
+ * @chroma_site: a #GstVideoChromaSite.
+ * @colorimetry: the colorimetry info
+ * @par_n: the pixel-aspect-ratio numerator
+ * @par_d: the pixel-aspect-ratio denominator
+ * @fps_n: the framerate numerator
+ * @fps_d: the framerate denominator
+ * @offset: offsets of the planes
+ * @stride: strides of the planes
+ * @multiview_mode: delivery mode for multiple views. (Since: 1.6)
+ * @multiview_flags: flags for multiple views configuration (Since: 1.6)
+ *
+ * Information describing image properties. This information can be filled
+ * in from GstCaps with gst_video_info_from_caps(). The information is also used
+ * to store the specific video info when mapping a video frame with
+ * gst_video_frame_map().
+ *
+ * Use the provided macros to access the info in this structure.
+ */
+struct _GstVideoInfo {
+ const GstVideoFormatInfo *finfo;
+
+ GstVideoInterlaceMode interlace_mode;
+ GstVideoFlags flags;
+ gint width;
+ gint height;
+ gsize size;
+ gint views;
+
+ GstVideoChromaSite chroma_site;
+ GstVideoColorimetry colorimetry;
+
+ gint par_n;
+ gint par_d;
+ gint fps_n;
+ gint fps_d;
+
+ gsize offset[GST_VIDEO_MAX_PLANES];
+ gint stride[GST_VIDEO_MAX_PLANES];
+
+ /* Union preserves padded struct size for backwards compat
+ * Consumer code should use the accessor macros for fields */
+ union {
+ struct { /* < skip > */
+ GstVideoMultiviewMode multiview_mode;
+ GstVideoMultiviewFlags multiview_flags;
+ GstVideoFieldOrder field_order;
+ } abi;
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+ } ABI;
+};
+
+#define GST_TYPE_VIDEO_INFO (gst_video_info_get_type ())
+GST_VIDEO_API
+GType gst_video_info_get_type (void);
+
+/* general info */
+#define GST_VIDEO_INFO_FORMAT(i) (GST_VIDEO_FORMAT_INFO_FORMAT((i)->finfo))
+#define GST_VIDEO_INFO_NAME(i) (GST_VIDEO_FORMAT_INFO_NAME((i)->finfo))
+#define GST_VIDEO_INFO_IS_YUV(i) (GST_VIDEO_FORMAT_INFO_IS_YUV((i)->finfo))
+#define GST_VIDEO_INFO_IS_RGB(i) (GST_VIDEO_FORMAT_INFO_IS_RGB((i)->finfo))
+#define GST_VIDEO_INFO_IS_GRAY(i) (GST_VIDEO_FORMAT_INFO_IS_GRAY((i)->finfo))
+#define GST_VIDEO_INFO_HAS_ALPHA(i) (GST_VIDEO_FORMAT_INFO_HAS_ALPHA((i)->finfo))
+
+#define GST_VIDEO_INFO_INTERLACE_MODE(i) ((i)->interlace_mode)
+#define GST_VIDEO_INFO_IS_INTERLACED(i) ((i)->interlace_mode != GST_VIDEO_INTERLACE_MODE_PROGRESSIVE)
+#define GST_VIDEO_INFO_FIELD_ORDER(i) ((i)->ABI.abi.field_order)
+#define GST_VIDEO_INFO_FLAGS(i) ((i)->flags)
+#define GST_VIDEO_INFO_WIDTH(i) ((i)->width)
+#define GST_VIDEO_INFO_HEIGHT(i) ((i)->height)
+/**
+ * GST_VIDEO_INFO_FIELD_HEIGHT:
+ *
+ * The height of a field. It's the height of the full frame unless split-field
+ * (alternate) interlacing is in use.
+ *
+ * Since: 1.16.
+ */
+#define GST_VIDEO_INFO_FIELD_HEIGHT(i) ((i)->interlace_mode == GST_VIDEO_INTERLACE_MODE_ALTERNATE? GST_ROUND_UP_2 ((i)->height) / 2 : (i)->height)
+#define GST_VIDEO_INFO_SIZE(i) ((i)->size)
+#define GST_VIDEO_INFO_VIEWS(i) ((i)->views)
+#define GST_VIDEO_INFO_PAR_N(i) ((i)->par_n)
+#define GST_VIDEO_INFO_PAR_D(i) ((i)->par_d)
+#define GST_VIDEO_INFO_FPS_N(i) ((i)->fps_n)
+#define GST_VIDEO_INFO_FIELD_RATE_N(i) ((GST_VIDEO_INFO_INTERLACE_MODE ((i)) == \
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE) ? \
+ (i)->fps_n * 2 : (i)->fps_n)
+#define GST_VIDEO_INFO_FPS_D(i) ((i)->fps_d)
+
+#define GST_VIDEO_INFO_COLORIMETRY(i) ((i)->colorimetry)
+#define GST_VIDEO_INFO_CHROMA_SITE(i) ((i)->chroma_site)
+
+#define GST_VIDEO_INFO_MULTIVIEW_MODE(i) ((i)->ABI.abi.multiview_mode)
+#define GST_VIDEO_INFO_MULTIVIEW_FLAGS(i) ((i)->ABI.abi.multiview_flags)
+
+/* dealing with GstVideoInfo flags */
+#define GST_VIDEO_INFO_FLAG_IS_SET(i,flag) ((GST_VIDEO_INFO_FLAGS(i) & (flag)) == (flag))
+#define GST_VIDEO_INFO_FLAG_SET(i,flag) (GST_VIDEO_INFO_FLAGS(i) |= (flag))
+#define GST_VIDEO_INFO_FLAG_UNSET(i,flag) (GST_VIDEO_INFO_FLAGS(i) &= ~(flag))
+
+/* dealing with planes */
+#define GST_VIDEO_INFO_N_PLANES(i) (GST_VIDEO_FORMAT_INFO_N_PLANES((i)->finfo))
+#define GST_VIDEO_INFO_PLANE_OFFSET(i,p) ((i)->offset[p])
+#define GST_VIDEO_INFO_PLANE_STRIDE(i,p) ((i)->stride[p])
+/**
+ * GST_VIDEO_INFO_PLANE_HEIGHT:
+ *
+ * The padded height in pixels of a plane (padded size divided by the plane stride).
+ * In case of GST_VIDEO_INTERLACE_MODE_ALTERNATE info, this macro returns the
+ * plane heights used to hold a single field, not the full frame.
+ *
+ * The size passed as third argument is the size of the pixel data and should
+ * not contain any extra metadata padding.
+ *
+ * It is not valid to use this macro with a TILED format.
+ *
+ * Since: 1.18
+ */
+#define GST_VIDEO_INFO_PLANE_HEIGHT(i,p,sizes) ((i)->stride[p] == 0 ? 0 : sizes[p] / (i)->stride[p])
+
+/* dealing with components */
+#define GST_VIDEO_INFO_N_COMPONENTS(i) GST_VIDEO_FORMAT_INFO_N_COMPONENTS((i)->finfo)
+#define GST_VIDEO_INFO_COMP_DEPTH(i,c) GST_VIDEO_FORMAT_INFO_DEPTH((i)->finfo,(c))
+#define GST_VIDEO_INFO_COMP_DATA(i,d,c) GST_VIDEO_FORMAT_INFO_DATA((i)->finfo,d,(c))
+#define GST_VIDEO_INFO_COMP_OFFSET(i,c) GST_VIDEO_FORMAT_INFO_OFFSET((i)->finfo,(i)->offset,(c))
+#define GST_VIDEO_INFO_COMP_STRIDE(i,c) GST_VIDEO_FORMAT_INFO_STRIDE((i)->finfo,(i)->stride,(c))
+#define GST_VIDEO_INFO_COMP_WIDTH(i,c) GST_VIDEO_FORMAT_INFO_SCALE_WIDTH((i)->finfo,(c),(i)->width)
+#define GST_VIDEO_INFO_COMP_HEIGHT(i,c) GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT((i)->finfo,(c),GST_VIDEO_INFO_FIELD_HEIGHT(i))
+#define GST_VIDEO_INFO_COMP_PLANE(i,c) GST_VIDEO_FORMAT_INFO_PLANE((i)->finfo,(c))
+#define GST_VIDEO_INFO_COMP_PSTRIDE(i,c) GST_VIDEO_FORMAT_INFO_PSTRIDE((i)->finfo,(c))
+#define GST_VIDEO_INFO_COMP_POFFSET(i,c) GST_VIDEO_FORMAT_INFO_POFFSET((i)->finfo,(c))
+
+GST_VIDEO_API
+GstVideoInfo * gst_video_info_new (void);
+
+GST_VIDEO_API
+void gst_video_info_init (GstVideoInfo *info);
+
+GST_VIDEO_API
+GstVideoInfo * gst_video_info_copy (const GstVideoInfo *info);
+
+GST_VIDEO_API
+void gst_video_info_free (GstVideoInfo *info);
+
+GST_VIDEO_API
+GstVideoInfo * gst_video_info_new_from_caps (const GstCaps * caps);
+
+GST_VIDEO_API
+gboolean gst_video_info_set_format (GstVideoInfo *info, GstVideoFormat format,
+ guint width, guint height);
+
+GST_VIDEO_API
+gboolean gst_video_info_set_interlaced_format
+ (GstVideoInfo *info,
+ GstVideoFormat format,
+ GstVideoInterlaceMode mode,
+ guint width,
+ guint height);
+
+GST_VIDEO_API
+gboolean gst_video_info_from_caps (GstVideoInfo *info, const GstCaps * caps);
+
+GST_VIDEO_API
+GstCaps * gst_video_info_to_caps (const GstVideoInfo *info);
+
+GST_VIDEO_API
+gboolean gst_video_info_convert (const GstVideoInfo *info,
+ GstFormat src_format,
+ gint64 src_value,
+ GstFormat dest_format,
+ gint64 *dest_value);
+
+GST_VIDEO_API
+gboolean gst_video_info_is_equal (const GstVideoInfo *info,
+ const GstVideoInfo *other);
+
+#include <gst/video/video.h>
+
+GST_VIDEO_API
+gboolean gst_video_info_align (GstVideoInfo * info, GstVideoAlignment * align);
+
+GST_VIDEO_API
+gboolean gst_video_info_align_full (GstVideoInfo * info, GstVideoAlignment * align, gsize plane_size[GST_VIDEO_MAX_PLANES]);
+
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoInfo, gst_video_info_free)
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_INFO_H__ */
diff --git a/include/gst/video/video-multiview.h b/include/gst/video/video-multiview.h
new file mode 100644
index 0000000000..275f0ac29e
--- /dev/null
+++ b/include/gst/video/video-multiview.h
@@ -0,0 +1,108 @@
+/* GStreamer
+ * Copyright (C) <2015> Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_MULTIVIEW_H__
+#define __GST_VIDEO_MULTIVIEW_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+
+G_BEGIN_DECLS
+
+/**
+ * GstVideoMultiviewFlagsSet:
+ *
+ * See #GstVideoMultiviewFlags.
+ */
+#define GST_TYPE_VIDEO_MULTIVIEW_FLAGSET (gst_video_multiview_flagset_get_type())
+GST_VIDEO_API
+GType gst_video_multiview_flagset_get_type (void);
+
+GST_VIDEO_API
+const gchar * gst_video_multiview_mode_to_caps_string (GstVideoMultiviewMode mview_mode);
+
+GST_VIDEO_API
+GstVideoMultiviewMode gst_video_multiview_mode_from_caps_string (const gchar * caps_mview_mode);
+
+GST_VIDEO_API
+const GValue *gst_video_multiview_get_mono_modes(void);
+
+GST_VIDEO_API
+const GValue *gst_video_multiview_get_unpacked_modes(void);
+
+GST_VIDEO_API
+const GValue *gst_video_multiview_get_doubled_height_modes(void);
+
+GST_VIDEO_API
+const GValue *gst_video_multiview_get_doubled_width_modes(void);
+
+GST_VIDEO_API
+const GValue *gst_video_multiview_get_doubled_size_modes(void);
+
+GST_VIDEO_API
+void gst_video_multiview_video_info_change_mode (GstVideoInfo *info,
+ GstVideoMultiviewMode out_mview_mode, GstVideoMultiviewFlags out_mview_flags);
+
+GST_VIDEO_API
+gboolean gst_video_multiview_guess_half_aspect (GstVideoMultiviewMode mv_mode,
+ guint width, guint height, guint par_n, guint par_d);
+
+
+#if 0 /* Place-holder for later MVC support */
+#define GST_VIDEO_MULTIVIEW_META_API_TYPE (gst_video_multiview_meta_api_get_type())
+#define GST_VIDEO_MULTIVIEW_META_INFO (gst_video_multiview_meta_get_info())
+
+typedef struct _GstVideoMultiviewMeta GstVideoMultiviewMeta;
+typedef struct _GstVideoMultiviewViewInfo GstVideoMultiviewViewInfo;
+
+GType gst_video_multiview_meta_api_get_type (void);
+const GstMetaInfo * gst_video_multiview_meta_get_info (void);
+
+GstVideoMultiviewMeta * gst_buffer_add_video_multiview_meta (GstBuffer *buffer, guint n_views);
+#define gst_buffer_get_video_multiview_meta(b) ((GstVideoMultiviewMeta *)gst_buffer_get_meta((b),GST_VIDEO_MULTIVIEW_META_API_TYPE))
+
+void gst_video_multiview_meta_set_n_views (GstVideoMultiviewMeta *mview_meta, guint n_views);
+
+typedef enum {
+ GST_VIDEO_MULTIVIEW_VIEW_UNKNOWN = 0,
+ GST_VIDEO_MULTIVIEW_VIEW_MONO = 1,
+ GST_VIDEO_MULTIVIEW_VIEW_LEFT = 2,
+ GST_VIDEO_MULTIVIEW_VIEW_RIGHT = 3
+} GstVideoMultiviewViewLabel;
+
+struct _GstVideoMultiviewViewInfo {
+ GstVideoMultiviewViewLabel view_label;
+
+ guint meta_id; /* id of the GstVideoMeta for this view */
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+struct _GstVideoMultiviewMeta {
+ GstMeta meta;
+
+ guint n_views;
+ GstVideoMultiviewViewInfo *view_info;
+};
+#endif
+
+G_END_DECLS
+
+#endif
diff --git a/include/gst/video/video-overlay-composition.h b/include/gst/video/video-overlay-composition.h
new file mode 100644
index 0000000000..7981e024d6
--- /dev/null
+++ b/include/gst/video/video-overlay-composition.h
@@ -0,0 +1,310 @@
+/* GStreamer Video Overlay Composition
+ * Copyright (C) 2011 Intel Corporation
+ * Copyright (C) 2011 Collabora Ltd.
+ * Copyright (C) 2011 Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_OVERLAY_COMPOSITION_H__
+#define __GST_VIDEO_OVERLAY_COMPOSITION_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+G_BEGIN_DECLS
+
+/**
+ * GstVideoOverlayRectangle:
+ *
+ * An opaque video overlay rectangle object. A rectangle contains a single
+ * overlay rectangle which can be added to a composition.
+ */
+#define GST_TYPE_VIDEO_OVERLAY_RECTANGLE \
+ (gst_video_overlay_rectangle_get_type ())
+#define GST_VIDEO_OVERLAY_RECTANGLE_CAST(obj) \
+ ((GstVideoOverlayRectangle *)(obj))
+#define GST_VIDEO_OVERLAY_RECTANGLE(obj) \
+ (GST_VIDEO_OVERLAY_RECTANGLE_CAST(obj))
+#define GST_IS_VIDEO_OVERLAY_RECTANGLE(obj) \
+ (GST_IS_MINI_OBJECT_TYPE(obj, GST_TYPE_VIDEO_OVERLAY_RECTANGLE))
+
+typedef struct _GstVideoOverlayRectangle GstVideoOverlayRectangle;
+
+/**
+ * gst_video_overlay_rectangle_ref:
+ * @comp: a a #GstVideoOverlayRectangle.
+ *
+ * Increases the refcount of the given rectangle by one.
+ *
+ * Note that the refcount affects the writeability
+ * of @comp, use gst_video_overlay_rectangle_copy() to ensure a rectangle can
+ * be modified (there is no gst_video_overlay_rectangle_make_writable() because
+ * it is unlikely that someone will hold the single reference to the rectangle
+ * and not know that that's the case).
+ *
+ * Returns: (transfer full): @comp
+ */
+static inline GstVideoOverlayRectangle *
+gst_video_overlay_rectangle_ref (GstVideoOverlayRectangle * comp)
+{
+ return (GstVideoOverlayRectangle *) gst_mini_object_ref (GST_MINI_OBJECT_CAST (comp));
+}
+
+/**
+ * gst_video_overlay_rectangle_unref:
+ * @comp: (transfer full): a #GstVideoOverlayRectangle.
+ *
+ * Decreases the refcount of the rectangle. If the refcount reaches 0, the
+ * rectangle will be freed.
+ */
+static inline void
+gst_video_overlay_rectangle_unref (GstVideoOverlayRectangle * comp)
+{
+ gst_mini_object_unref (GST_MINI_OBJECT_CAST (comp));
+}
+
+/**
+ * GstVideoOverlayFormatFlags:
+ * @GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE: no flags
+ * @GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA: RGB are premultiplied by A/255.
+ * @GST_VIDEO_OVERLAY_FORMAT_FLAG_GLOBAL_ALPHA: a global-alpha value != 1 is set.
+ *
+ * Overlay format flags.
+ */
+typedef enum {
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE = 0,
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA = (1<<0),
+ GST_VIDEO_OVERLAY_FORMAT_FLAG_GLOBAL_ALPHA = (1<<1)
+} GstVideoOverlayFormatFlags;
+
+#define GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION "meta:GstVideoOverlayComposition"
+
+/**
+ * GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB:
+ *
+ * Supported RGB overlay video format.
+ */
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB GST_VIDEO_FORMAT_BGRA
+#else
+#define GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB GST_VIDEO_FORMAT_ARGB
+#endif
+
+/**
+ * GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_YUV:
+ *
+ * Supported YUV overlay video format.
+ */
+#define GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_YUV GST_VIDEO_FORMAT_AYUV
+
+/**
+ * GST_VIDEO_OVERLAY_COMPOSITION_BLEND_FORMATS:
+ *
+ * Video formats supported by gst_video_overlay_composition_blend(), for
+ * use in overlay elements' pad template caps.
+ *
+ * Since: 1.2
+ */
+#define GST_VIDEO_OVERLAY_COMPOSITION_BLEND_FORMATS GST_VIDEO_FORMATS_ALL
+
+GST_VIDEO_API
+GType gst_video_overlay_rectangle_get_type (void);
+
+GST_VIDEO_API
+GstVideoOverlayRectangle * gst_video_overlay_rectangle_new_raw (GstBuffer * pixels,
+ gint render_x, gint render_y,
+ guint render_width, guint render_height,
+ GstVideoOverlayFormatFlags flags);
+
+GST_VIDEO_API
+GstVideoOverlayRectangle * gst_video_overlay_rectangle_copy (GstVideoOverlayRectangle * rectangle);
+
+GST_VIDEO_API
+guint gst_video_overlay_rectangle_get_seqnum (GstVideoOverlayRectangle * rectangle);
+
+GST_VIDEO_API
+void gst_video_overlay_rectangle_set_render_rectangle (GstVideoOverlayRectangle * rectangle,
+ gint render_x,
+ gint render_y,
+ guint render_width,
+ guint render_height);
+
+GST_VIDEO_API
+gboolean gst_video_overlay_rectangle_get_render_rectangle (GstVideoOverlayRectangle * rectangle,
+ gint * render_x,
+ gint * render_y,
+ guint * render_width,
+ guint * render_height);
+
+GST_VIDEO_API
+GstBuffer * gst_video_overlay_rectangle_get_pixels_raw (GstVideoOverlayRectangle * rectangle,
+ GstVideoOverlayFormatFlags flags);
+
+GST_VIDEO_API
+GstBuffer * gst_video_overlay_rectangle_get_pixels_argb (GstVideoOverlayRectangle * rectangle,
+ GstVideoOverlayFormatFlags flags);
+
+GST_VIDEO_API
+GstBuffer * gst_video_overlay_rectangle_get_pixels_ayuv (GstVideoOverlayRectangle * rectangle,
+ GstVideoOverlayFormatFlags flags);
+
+GST_VIDEO_API
+GstBuffer * gst_video_overlay_rectangle_get_pixels_unscaled_raw (GstVideoOverlayRectangle * rectangle,
+ GstVideoOverlayFormatFlags flags);
+
+GST_VIDEO_API
+GstBuffer * gst_video_overlay_rectangle_get_pixels_unscaled_argb (GstVideoOverlayRectangle * rectangle,
+ GstVideoOverlayFormatFlags flags);
+
+GST_VIDEO_API
+GstBuffer * gst_video_overlay_rectangle_get_pixels_unscaled_ayuv (GstVideoOverlayRectangle * rectangle,
+ GstVideoOverlayFormatFlags flags);
+
+GST_VIDEO_API
+GstVideoOverlayFormatFlags gst_video_overlay_rectangle_get_flags (GstVideoOverlayRectangle * rectangle);
+
+GST_VIDEO_API
+gfloat gst_video_overlay_rectangle_get_global_alpha (GstVideoOverlayRectangle * rectangle);
+
+GST_VIDEO_API
+void gst_video_overlay_rectangle_set_global_alpha (GstVideoOverlayRectangle * rectangle,
+ gfloat global_alpha);
+
+/**
+ * GstVideoOverlayComposition:
+ *
+ * An opaque video overlay composition object. A composition contains
+ * multiple overlay rectangles.
+ */
+#define GST_TYPE_VIDEO_OVERLAY_COMPOSITION \
+ (gst_video_overlay_composition_get_type ())
+#define GST_VIDEO_OVERLAY_COMPOSITION_CAST(obj) \
+ ((GstVideoOverlayComposition *)(obj))
+#define GST_VIDEO_OVERLAY_COMPOSITION(obj) \
+ (GST_VIDEO_OVERLAY_COMPOSITION_CAST(obj))
+#define GST_IS_VIDEO_OVERLAY_COMPOSITION(obj) \
+ (GST_IS_MINI_OBJECT_TYPE(obj, GST_TYPE_VIDEO_OVERLAY_COMPOSITION))
+
+typedef struct _GstVideoOverlayComposition GstVideoOverlayComposition;
+
+/**
+ * gst_video_overlay_composition_ref:
+ * @comp: a a #GstVideoOverlayComposition.
+ *
+ * Increases the refcount of the given composition by one.
+ *
+ * Note that the refcount affects the writeability
+ * of @comp, use gst_video_overlay_composition_make_writable() to ensure
+ * a composition and its rectangles can be modified.
+ *
+ * Returns: (transfer full): @comp
+ */
+static inline GstVideoOverlayComposition *
+gst_video_overlay_composition_ref (GstVideoOverlayComposition * comp)
+{
+ return (GstVideoOverlayComposition *) gst_mini_object_ref (GST_MINI_OBJECT_CAST (comp));
+}
+
+/**
+ * gst_video_overlay_composition_unref:
+ * @comp: (transfer full): a #GstVideoOverlayComposition.
+ *
+ * Decreases the refcount of the composition. If the refcount reaches 0, the
+ * composition will be freed.
+ */
+static inline void
+gst_video_overlay_composition_unref (GstVideoOverlayComposition * comp)
+{
+ gst_mini_object_unref (GST_MINI_OBJECT_CAST (comp));
+}
+
+GST_VIDEO_API
+GType gst_video_overlay_composition_get_type (void);
+
+GST_VIDEO_API
+GstVideoOverlayComposition * gst_video_overlay_composition_copy (GstVideoOverlayComposition * comp);
+
+GST_VIDEO_API
+GstVideoOverlayComposition * gst_video_overlay_composition_make_writable (GstVideoOverlayComposition * comp);
+
+GST_VIDEO_API
+GstVideoOverlayComposition * gst_video_overlay_composition_new (GstVideoOverlayRectangle * rectangle);
+
+GST_VIDEO_API
+void gst_video_overlay_composition_add_rectangle (GstVideoOverlayComposition * comp,
+ GstVideoOverlayRectangle * rectangle);
+
+GST_VIDEO_API
+guint gst_video_overlay_composition_n_rectangles (GstVideoOverlayComposition * comp);
+
+GST_VIDEO_API
+GstVideoOverlayRectangle * gst_video_overlay_composition_get_rectangle (GstVideoOverlayComposition * comp, guint n);
+
+GST_VIDEO_API
+guint gst_video_overlay_composition_get_seqnum (GstVideoOverlayComposition * comp);
+
+/* blend composition onto raw video buffer */
+
+GST_VIDEO_API
+gboolean gst_video_overlay_composition_blend (GstVideoOverlayComposition * comp,
+ GstVideoFrame * video_buf);
+
+/* attach/retrieve composition from buffers */
+
+#define GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE \
+ (gst_video_overlay_composition_meta_api_get_type())
+#define GST_VIDEO_OVERLAY_COMPOSITION_META_INFO \
+ (gst_video_overlay_composition_meta_get_info())
+
+typedef struct _GstVideoOverlayCompositionMeta GstVideoOverlayCompositionMeta;
+
+/**
+ * GstVideoOverlayCompositionMeta:
+ * @meta: parent #GstMeta
+ * @overlay: the attached #GstVideoOverlayComposition
+ *
+ * Extra buffer metadata describing image overlay data.
+ */
+struct _GstVideoOverlayCompositionMeta
+{
+ GstMeta meta;
+
+ GstVideoOverlayComposition *overlay;
+};
+
+GST_VIDEO_API
+GType gst_video_overlay_composition_meta_api_get_type (void);
+
+GST_VIDEO_API
+const GstMetaInfo *gst_video_overlay_composition_meta_get_info (void);
+
+GST_VIDEO_API
+GstVideoOverlayCompositionMeta * gst_buffer_add_video_overlay_composition_meta (GstBuffer * buf,
+ GstVideoOverlayComposition * comp);
+
+#define gst_buffer_get_video_overlay_composition_meta(b) \
+ ((GstVideoOverlayCompositionMeta*)gst_buffer_get_meta((b),GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE))
+#define gst_buffer_remove_video_overlay_composition_meta(b,m) \
+ gst_buffer_remove_meta((b),((GstMeta *) m))
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoOverlayComposition, gst_video_overlay_composition_unref)
+
+G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoOverlayRectangle, gst_video_overlay_rectangle_unref)
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_OVERLAY_COMPOSITION_H__ */
diff --git a/include/gst/video/video-prelude.h b/include/gst/video/video-prelude.h
new file mode 100644
index 0000000000..47a30dfef1
--- /dev/null
+++ b/include/gst/video/video-prelude.h
@@ -0,0 +1,41 @@
+/* GStreamer Video Library
+ * Copyright (C) 2018 GStreamer developers
+ *
+ * video-prelude.h: prelude include header for gst-video library
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_PRELUDE_H__
+#define __GST_VIDEO_PRELUDE_H__
+
+#include <gst/gst.h>
+
+#ifdef BUILDING_GST_VIDEO
+#define GST_VIDEO_API GST_API_EXPORT /* from config.h */
+#else
+#define GST_VIDEO_API GST_API_IMPORT
+#endif
+
+#ifndef GST_DISABLE_DEPRECATED
+#define GST_VIDEO_DEPRECATED GST_VIDEO_API
+#define GST_VIDEO_DEPRECATED_FOR(f) GST_VIDEO_API
+#else
+#define GST_VIDEO_DEPRECATED G_DEPRECATED GST_VIDEO_API
+#define GST_VIDEO_DEPRECATED_FOR(f) G_DEPRECATED_FOR(f) GST_VIDEO_API
+#endif
+
+#endif /* __GST_VIDEO_PRELUDE_H__ */
diff --git a/include/gst/video/video-resampler.h b/include/gst/video/video-resampler.h
new file mode 100644
index 0000000000..ffe9ddac01
--- /dev/null
+++ b/include/gst/video/video-resampler.h
@@ -0,0 +1,178 @@
+/* GStreamer
+ * Copyright (C) <2014> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_RESAMPLER_H__
+#define __GST_VIDEO_RESAMPLER_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstVideoResampler GstVideoResampler;
+
+/**
+ * GstVideoResamplerMethod:
+ * @GST_VIDEO_RESAMPLER_METHOD_NEAREST: Duplicates the samples when
+ * upsampling and drops when downsampling
+ * @GST_VIDEO_RESAMPLER_METHOD_LINEAR: Uses linear interpolation to reconstruct
+ * missing samples and averaging to downsample
+ * @GST_VIDEO_RESAMPLER_METHOD_CUBIC: Uses cubic interpolation
+ * @GST_VIDEO_RESAMPLER_METHOD_SINC: Uses sinc interpolation
+ * @GST_VIDEO_RESAMPLER_METHOD_LANCZOS: Uses lanczos interpolation
+ *
+ * Different subsampling and upsampling methods
+ *
+ * Since: 1.6
+ */
+typedef enum {
+ GST_VIDEO_RESAMPLER_METHOD_NEAREST,
+ GST_VIDEO_RESAMPLER_METHOD_LINEAR,
+ GST_VIDEO_RESAMPLER_METHOD_CUBIC,
+ GST_VIDEO_RESAMPLER_METHOD_SINC,
+ GST_VIDEO_RESAMPLER_METHOD_LANCZOS
+} GstVideoResamplerMethod;
+
+/**
+ * GST_VIDEO_RESAMPLER_OPT_CUBIC_B:
+ *
+ * G_TYPE_DOUBLE, B parameter of the cubic filter. The B
+ * parameter controls the bluriness. Values between 0.0 and
+ * 2.0 are accepted. 1/3 is the default.
+ *
+ * Below are some values of popular filters:
+ * B C
+ * Hermite 0.0 0.0
+ * Spline 1.0 0.0
+ * Catmull-Rom 0.0 1/2
+ * Mitchell 1/3 1/3
+ * Robidoux 0.3782 0.3109
+ * Robidoux
+ * Sharp 0.2620 0.3690
+ * Robidoux
+ * Soft 0.6796 0.1602
+ */
+#define GST_VIDEO_RESAMPLER_OPT_CUBIC_B "GstVideoResampler.cubic-b"
+/**
+ * GST_VIDEO_RESAMPLER_OPT_CUBIC_C:
+ *
+ * G_TYPE_DOUBLE, C parameter of the cubic filter. The C
+ * parameter controls the Keys alpha value. Values between 0.0 and
+ * 2.0 are accepted. 1/3 is the default.
+ *
+ * See #GST_VIDEO_RESAMPLER_OPT_CUBIC_B for some more common values
+ */
+#define GST_VIDEO_RESAMPLER_OPT_CUBIC_C "GstVideoResampler.cubic-c"
+
+/**
+ * GST_VIDEO_RESAMPLER_OPT_ENVELOPE:
+ *
+ * G_TYPE_DOUBLE, specifies the size of filter envelope for
+ * @GST_VIDEO_RESAMPLER_METHOD_LANCZOS. values are clamped between
+ * 1.0 and 5.0. 2.0 is the default.
+ */
+#define GST_VIDEO_RESAMPLER_OPT_ENVELOPE "GstVideoResampler.envelope"
+
+/**
+ * GST_VIDEO_RESAMPLER_OPT_SHARPNESS:
+ *
+ * G_TYPE_DOUBLE, specifies sharpness of the filter for
+ * @GST_VIDEO_RESAMPLER_METHOD_LANCZOS. values are clamped between
+ * 0.5 and 1.5. 1.0 is the default.
+ */
+#define GST_VIDEO_RESAMPLER_OPT_SHARPNESS "GstVideoResampler.sharpness"
+
+/**
+ * GST_VIDEO_RESAMPLER_OPT_SHARPEN:
+ *
+ * G_TYPE_DOUBLE, specifies sharpening of the filter for
+ * @GST_VIDEO_RESAMPLER_METHOD_LANCZOS. values are clamped between
+ * 0.0 and 1.0. 0.0 is the default.
+ */
+#define GST_VIDEO_RESAMPLER_OPT_SHARPEN "GstVideoResampler.sharpen"
+/**
+ * GST_VIDEO_RESAMPLER_OPT_MAX_TAPS:
+ *
+ * G_TYPE_INT, limits the maximum number of taps to use.
+ * 16 is the default.
+ */
+#define GST_VIDEO_RESAMPLER_OPT_MAX_TAPS "GstVideoResampler.max-taps"
+
+/**
+ * GstVideoResamplerFlags:
+ * @GST_VIDEO_RESAMPLER_FLAG_NONE: no flags
+ * @GST_VIDEO_RESAMPLER_FLAG_HALF_TAPS: when no taps are given, half the
+ * number of calculated taps. This can be used when making scalers
+ * for the different fields of an interlaced picture. Since: 1.10
+ *
+ * Different resampler flags.
+ *
+ * Since: 1.6
+ */
+typedef enum {
+ GST_VIDEO_RESAMPLER_FLAG_NONE = (0),
+ GST_VIDEO_RESAMPLER_FLAG_HALF_TAPS = (1 << 0),
+} GstVideoResamplerFlags;
+
+/**
+ * GstVideoResampler:
+ * @in_size: the input size
+ * @out_size: the output size
+ * @max_taps: the maximum number of taps
+ * @n_phases: the number of phases
+ * @offset: array with the source offset for each output element
+ * @phase: array with the phase to use for each output element
+ * @n_taps: array with new number of taps for each phase
+ * @taps: the taps for all phases
+ *
+ * A structure holding resampler information.
+ *
+ * Since: 1.6
+ */
+struct _GstVideoResampler
+{
+ gint in_size;
+ gint out_size;
+ guint max_taps;
+ guint n_phases;
+ guint32 *offset;
+ guint32 *phase;
+ guint32 *n_taps;
+ gdouble *taps;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+
+GST_VIDEO_API
+gboolean gst_video_resampler_init (GstVideoResampler *resampler,
+ GstVideoResamplerMethod method,
+ GstVideoResamplerFlags flags,
+ guint n_phases, guint n_taps,
+ gdouble shift,
+ guint in_size, guint out_size,
+ GstStructure *options);
+
+GST_VIDEO_API
+void gst_video_resampler_clear (GstVideoResampler *resampler);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_RESAMPLER_H__ */
diff --git a/include/gst/video/video-scaler.h b/include/gst/video/video-scaler.h
new file mode 100644
index 0000000000..7dc331666e
--- /dev/null
+++ b/include/gst/video/video-scaler.h
@@ -0,0 +1,101 @@
+/* GStreamer
+ * Copyright (C) <2014> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_SCALER_H__
+#define __GST_VIDEO_SCALER_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video-format.h>
+#include <gst/video/video-color.h>
+#include <gst/video/video-resampler.h>
+
+G_BEGIN_DECLS
+
+/**
+ * GST_VIDEO_SCALER_OPT_DITHER_METHOD:
+ *
+ * #GstVideoDitherMethod, The dither method to use for propagating
+ * quatization errors.
+ */
+#define GST_VIDEO_SCALER_OPT_DITHER_METHOD "GstVideoScaler.dither-method"
+
+/**
+ * GstVideoScalerFlags:
+ * @GST_VIDEO_SCALER_FLAG_NONE: no flags
+ * @GST_VIDEO_SCALER_FLAG_INTERLACED: Set up a scaler for interlaced content
+ *
+ * Different scale flags.
+ */
+typedef enum {
+ GST_VIDEO_SCALER_FLAG_NONE = (0),
+ GST_VIDEO_SCALER_FLAG_INTERLACED = (1 << 0),
+} GstVideoScalerFlags;
+
+typedef struct _GstVideoScaler GstVideoScaler;
+
+GST_VIDEO_API
+GstVideoScaler * gst_video_scaler_new (GstVideoResamplerMethod method,
+ GstVideoScalerFlags flags,
+ guint n_taps,
+ guint in_size, guint out_size,
+ GstStructure * options);
+
+GST_VIDEO_API
+void gst_video_scaler_free (GstVideoScaler *scale);
+
+GST_VIDEO_API
+guint gst_video_scaler_get_max_taps (GstVideoScaler *scale);
+
+GST_VIDEO_API
+const gdouble * gst_video_scaler_get_coeff (GstVideoScaler *scale,
+ guint out_offset,
+ guint *in_offset,
+ guint *n_taps);
+
+GST_VIDEO_API
+void gst_video_scaler_horizontal (GstVideoScaler *scale,
+ GstVideoFormat format,
+ gpointer src, gpointer dest,
+ guint dest_offset, guint width);
+
+GST_VIDEO_API
+void gst_video_scaler_vertical (GstVideoScaler *scale,
+ GstVideoFormat format,
+ gpointer src_lines[], gpointer dest,
+ guint dest_offset, guint width);
+
+GST_VIDEO_API
+GstVideoScaler * gst_video_scaler_combine_packed_YUV (GstVideoScaler * y_scale,
+ GstVideoScaler *uv_scale,
+ GstVideoFormat in_format,
+ GstVideoFormat out_format);
+
+GST_VIDEO_API
+void gst_video_scaler_2d (GstVideoScaler *hscale,
+ GstVideoScaler *vscale,
+ GstVideoFormat format,
+ gpointer src, gint src_stride,
+ gpointer dest, gint dest_stride,
+ guint x, guint y,
+ guint width, guint height);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_SCALER_H__ */
diff --git a/include/gst/video/video-tile.h b/include/gst/video/video-tile.h
new file mode 100644
index 0000000000..8992bb351d
--- /dev/null
+++ b/include/gst/video/video-tile.h
@@ -0,0 +1,140 @@
+/* GStreamer
+ * Copyright (C) <2013> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_TILE_H__
+#define __GST_VIDEO_TILE_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+
+G_BEGIN_DECLS
+
+/**
+ * GstVideoTileType:
+ * @GST_VIDEO_TILE_TYPE_INDEXED: Tiles are indexed. Use
+ * gst_video_tile_get_index () to retrieve the tile at the requested
+ * coordinates.
+ *
+ * Enum value describing the most common tiling types.
+ */
+typedef enum
+{
+ GST_VIDEO_TILE_TYPE_INDEXED = 0
+} GstVideoTileType;
+
+#define GST_VIDEO_TILE_TYPE_SHIFT (16)
+
+/**
+ * GST_VIDEO_TILE_TYPE_MASK: (value 65535)
+ */
+#define GST_VIDEO_TILE_TYPE_MASK ((1 << GST_VIDEO_TILE_TYPE_SHIFT) - 1)
+
+/**
+ * GST_VIDEO_TILE_MAKE_MODE:
+ * @num: the mode number to create
+ * @type: the tile mode type
+ *
+ * use this macro to create new tile modes.
+ */
+#define GST_VIDEO_TILE_MAKE_MODE(num, type) \
+ (((num) << GST_VIDEO_TILE_TYPE_SHIFT) | (GST_VIDEO_TILE_TYPE_ ##type))
+
+/**
+ * GST_VIDEO_TILE_MODE_TYPE:
+ * @mode: the tile mode
+ *
+ * Get the tile mode type of @mode
+ */
+#define GST_VIDEO_TILE_MODE_TYPE(mode) ((mode) & GST_VIDEO_TILE_TYPE_MASK)
+
+/**
+ * GST_VIDEO_TILE_MODE_IS_INDEXED:
+ * @mode: a tile mode
+ *
+ * Check if @mode is an indexed tile type
+ */
+#define GST_VIDEO_TILE_MODE_IS_INDEXED(mode) (GST_VIDEO_TILE_MODE_TYPE(mode) == GST_VIDEO_TILE_TYPE_INDEXED)
+
+
+#define GST_VIDEO_TILE_Y_TILES_SHIFT (16)
+
+/**
+ * GST_VIDEO_TILE_X_TILES_MASK: (value 65535)
+ */
+#define GST_VIDEO_TILE_X_TILES_MASK ((1 << GST_VIDEO_TILE_Y_TILES_SHIFT) - 1)
+
+/**
+ * GST_VIDEO_TILE_MAKE_STRIDE:
+ * @x_tiles: number of tiles in X
+ * @y_tiles: number of tiles in Y
+ *
+ * Encode the number of tile in X and Y into the stride.
+ */
+#define GST_VIDEO_TILE_MAKE_STRIDE(x_tiles, y_tiles) \
+ (((y_tiles) << GST_VIDEO_TILE_Y_TILES_SHIFT) | (x_tiles))
+
+/**
+ * GST_VIDEO_TILE_X_TILES:
+ * @stride: plane stride
+ *
+ * Extract the number of tiles in X from the stride value.
+ */
+#define GST_VIDEO_TILE_X_TILES(stride) ((stride) & GST_VIDEO_TILE_X_TILES_MASK)
+
+/**
+ * GST_VIDEO_TILE_Y_TILES:
+ * @stride: plane stride
+ *
+ * Extract the number of tiles in Y from the stride value.
+ */
+#define GST_VIDEO_TILE_Y_TILES(stride) ((stride) >> GST_VIDEO_TILE_Y_TILES_SHIFT)
+
+/**
+ * GstVideoTileMode:
+ * @GST_VIDEO_TILE_MODE_UNKNOWN: Unknown or unset tile mode
+ * @GST_VIDEO_TILE_MODE_ZFLIPZ_2X2: Every four adjacent blocks - two
+ * horizontally and two vertically are grouped together and are located
+ * in memory in Z or flipped Z order. In case of odd rows, the last row
+ * of blocks is arranged in linear order.
+ * @GST_VIDEO_TILE_MODE_LINEAR: Tiles are in row order. (Since: 1.18)
+ *
+ * Enum value describing the available tiling modes.
+ */
+typedef enum
+{
+ GST_VIDEO_TILE_MODE_UNKNOWN = 0,
+ GST_VIDEO_TILE_MODE_ZFLIPZ_2X2 = GST_VIDEO_TILE_MAKE_MODE (1, INDEXED),
+ /**
+ * GST_VIDEO_TILE_MODE_LINEAR:
+ *
+ * Tiles are in row order.
+ *
+ * Since: 1.18
+ */
+ GST_VIDEO_TILE_MODE_LINEAR = GST_VIDEO_TILE_MAKE_MODE (2, INDEXED),
+} GstVideoTileMode;
+
+GST_VIDEO_API
+guint gst_video_tile_get_index (GstVideoTileMode mode, gint x, gint y,
+ gint x_tiles, gint y_tiles);
+
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_TILE_H__ */
diff --git a/include/gst/video/video.h b/include/gst/video/video.h
new file mode 100644
index 0000000000..a31562a09b
--- /dev/null
+++ b/include/gst/video/video.h
@@ -0,0 +1,200 @@
+/* GStreamer
+ * Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_H__
+#define __GST_VIDEO_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video-prelude.h>
+
+typedef struct _GstVideoAlignment GstVideoAlignment;
+
+#include <gst/video/video-format.h>
+#include <gst/video/video-color.h>
+#include <gst/video/video-dither.h>
+#include <gst/video/video-info.h>
+#include <gst/video/video-frame.h>
+#include <gst/video/video-enumtypes.h>
+#include <gst/video/video-converter.h>
+#include <gst/video/video-scaler.h>
+#include <gst/video/video-multiview.h>
+
+G_BEGIN_DECLS
+
+/**
+ * GstVideoAlignment:
+ * @padding_left: extra pixels on the left side
+ * @padding_right: extra pixels on the right side
+ * @padding_top: extra pixels on the top
+ * @padding_bottom: extra pixels on the bottom
+ * @stride_align: array with extra alignment requirements for the strides
+ *
+ * Extra alignment parameters for the memory of video buffers. This
+ * structure is usually used to configure the bufferpool if it supports the
+ * #GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT.
+ */
+struct _GstVideoAlignment
+{
+ guint padding_top;
+ guint padding_bottom;
+ guint padding_left;
+ guint padding_right;
+ guint stride_align[GST_VIDEO_MAX_PLANES];
+};
+
+/**
+ * GstVideoOrientationMethod:
+ * @GST_VIDEO_ORIENTATION_IDENTITY: Identity (no rotation)
+ * @GST_VIDEO_ORIENTATION_90R: Rotate clockwise 90 degrees
+ * @GST_VIDEO_ORIENTATION_180: Rotate 180 degrees
+ * @GST_VIDEO_ORIENTATION_90L: Rotate counter-clockwise 90 degrees
+ * @GST_VIDEO_ORIENTATION_HORIZ: Flip horizontally
+ * @GST_VIDEO_ORIENTATION_VERT: Flip vertically
+ * @GST_VIDEO_ORIENTATION_UL_LR: Flip across upper left/lower right diagonal
+ * @GST_VIDEO_ORIENTATION_UR_LL: Flip across upper right/lower left diagonal
+ * @GST_VIDEO_ORIENTATION_AUTO: Select flip method based on image-orientation tag
+ * @GST_VIDEO_ORIENTATION_CUSTOM: Current status depends on plugin internal setup
+ *
+ * The different video orientation methods.
+ *
+ * Since: 1.10
+ */
+typedef enum {
+ GST_VIDEO_ORIENTATION_IDENTITY,
+ GST_VIDEO_ORIENTATION_90R,
+ GST_VIDEO_ORIENTATION_180,
+ GST_VIDEO_ORIENTATION_90L,
+ GST_VIDEO_ORIENTATION_HORIZ,
+ GST_VIDEO_ORIENTATION_VERT,
+ GST_VIDEO_ORIENTATION_UL_LR,
+ GST_VIDEO_ORIENTATION_UR_LL,
+ GST_VIDEO_ORIENTATION_AUTO,
+ GST_VIDEO_ORIENTATION_CUSTOM,
+} GstVideoOrientationMethod;
+
+/**
+ * GST_TYPE_VIDEO_ORIENTATION_METHOD:
+ *
+ * Since: 1.20
+ */
+
+/* metadata macros */
+/**
+ * GST_META_TAG_VIDEO_STR:
+ *
+ * This metadata is relevant for video streams.
+ *
+ * Since: 1.2
+ */
+#define GST_META_TAG_VIDEO_STR "video"
+/**
+ * GST_META_TAG_VIDEO_ORIENTATION_STR:
+ *
+ * This metadata stays relevant as long as video orientation is unchanged.
+ *
+ * Since: 1.2
+ */
+#define GST_META_TAG_VIDEO_ORIENTATION_STR "orientation"
+/**
+ * GST_META_TAG_VIDEO_SIZE_STR:
+ *
+ * This metadata stays relevant as long as video size is unchanged.
+ *
+ * Since: 1.2
+ */
+#define GST_META_TAG_VIDEO_SIZE_STR "size"
+/**
+ * GST_META_TAG_VIDEO_COLORSPACE_STR:
+ *
+ * This metadata stays relevant as long as video colorspace is unchanged.
+ *
+ * Since: 1.2
+ */
+#define GST_META_TAG_VIDEO_COLORSPACE_STR "colorspace"
+
+GST_VIDEO_API
+void gst_video_alignment_reset (GstVideoAlignment *align);
+
+
+/* some helper functions */
+
+GST_VIDEO_API
+gboolean gst_video_calculate_display_ratio (guint * dar_n,
+ guint * dar_d,
+ guint video_width,
+ guint video_height,
+ guint video_par_n,
+ guint video_par_d,
+ guint display_par_n,
+ guint display_par_d);
+
+GST_VIDEO_API
+gboolean gst_video_guess_framerate (GstClockTime duration,
+ gint * dest_n, gint * dest_d);
+
+/* convert/encode video sample from one format to another */
+
+typedef void (*GstVideoConvertSampleCallback) (GstSample * sample, GError *error, gpointer user_data);
+
+GST_VIDEO_API
+void gst_video_convert_sample_async (GstSample * sample,
+ const GstCaps * to_caps,
+ GstClockTime timeout,
+ GstVideoConvertSampleCallback callback,
+ gpointer user_data,
+ GDestroyNotify destroy_notify);
+
+GST_VIDEO_API
+GstSample * gst_video_convert_sample (GstSample * sample,
+ const GstCaps * to_caps,
+ GstClockTime timeout,
+ GError ** error);
+
+
+GST_VIDEO_API
+gboolean gst_video_orientation_from_tag (GstTagList * taglist,
+ GstVideoOrientationMethod * method);
+
+G_END_DECLS
+
+#include <gst/video/colorbalancechannel.h>
+#include <gst/video/colorbalance.h>
+#include <gst/video/gstvideoaffinetransformationmeta.h>
+#include <gst/video/gstvideoaggregator.h>
+#include <gst/video/gstvideocodecalphameta.h>
+#include <gst/video/gstvideodecoder.h>
+#include <gst/video/gstvideoencoder.h>
+#include <gst/video/gstvideofilter.h>
+#include <gst/video/gstvideometa.h>
+#include <gst/video/gstvideopool.h>
+#include <gst/video/gstvideosink.h>
+#include <gst/video/gstvideotimecode.h>
+#include <gst/video/gstvideoutils.h>
+#include <gst/video/navigation.h>
+#include <gst/video/video-anc.h>
+#include <gst/video/video-blend.h>
+#include <gst/video/videodirection.h>
+#include <gst/video/video-event.h>
+#include <gst/video/video-hdr.h>
+#include <gst/video/videoorientation.h>
+#include <gst/video/video-overlay-composition.h>
+#include <gst/video/videooverlay.h>
+
+#endif /* __GST_VIDEO_H__ */
diff --git a/include/gst/video/videodirection.h b/include/gst/video/videodirection.h
new file mode 100644
index 0000000000..127606c888
--- /dev/null
+++ b/include/gst/video/videodirection.h
@@ -0,0 +1,64 @@
+/* GStreamer
+ * Copyright (C) 2016 Igalia <calvaris@igalia.com>
+ *
+ * videodirection.h: video rotation and flipping interface
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_DIRECTION_H__
+#define __GST_VIDEO_DIRECTION_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_VIDEO_DIRECTION \
+ (gst_video_direction_get_type ())
+#define GST_VIDEO_DIRECTION(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VIDEO_DIRECTION, GstVideoDirection))
+#define GST_IS_VIDEO_DIRECTION(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VIDEO_DIRECTION))
+#define GST_VIDEO_DIRECTION_GET_INTERFACE(inst) \
+ (G_TYPE_INSTANCE_GET_INTERFACE ((inst), GST_TYPE_VIDEO_DIRECTION, GstVideoDirectionInterface))
+/**
+ * GstVideoDirection:
+ *
+ * Opaque #GstVideoDirection data structure.
+ *
+ * Since: 1.10
+ */
+typedef struct _GstVideoDirection GstVideoDirection;
+typedef struct _GstVideoDirectionInterface GstVideoDirectionInterface;
+
+/**
+ * GstVideoDirectionInterface:
+ * @iface: parent interface type.
+ *
+ * #GstVideoDirectionInterface interface.
+ *
+ * Since: 1.10
+ */
+struct _GstVideoDirectionInterface
+{
+ GTypeInterface iface;
+};
+
+GST_VIDEO_API
+GType gst_video_direction_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_VIDEO_DIRECTION_H__ */
diff --git a/include/gst/video/videoorientation.h b/include/gst/video/videoorientation.h
new file mode 100644
index 0000000000..8414be62b9
--- /dev/null
+++ b/include/gst/video/videoorientation.h
@@ -0,0 +1,111 @@
+/* GStreamer
+ * Copyright (C) 2006 Nokia <stefan.kost@nokia.com
+ *
+ * videoorientation.h: video flipping and centering interface
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_ORIENTATION_H__
+#define __GST_VIDEO_ORIENTATION_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-prelude.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VIDEO_ORIENTATION \
+ (gst_video_orientation_get_type ())
+#define GST_VIDEO_ORIENTATION(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VIDEO_ORIENTATION, GstVideoOrientation))
+#define GST_IS_VIDEO_ORIENTATION(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VIDEO_ORIENTATION))
+#define GST_VIDEO_ORIENTATION_GET_INTERFACE(inst) \
+ (G_TYPE_INSTANCE_GET_INTERFACE ((inst), GST_TYPE_VIDEO_ORIENTATION, GstVideoOrientationInterface))
+
+/**
+ * GstVideoOrientation:
+ *
+ * Opaque #GstVideoOrientation data structure.
+ */
+typedef struct _GstVideoOrientation GstVideoOrientation;
+typedef struct _GstVideoOrientationInterface GstVideoOrientationInterface;
+
+/**
+ * GstVideoOrientationInterface:
+ * @iface: parent interface type.
+ * @get_hflip: virtual method to get horizontal flipping state
+ * @get_vflip: virtual method to get vertical flipping state
+ * @get_hcenter: virtual method to get horizontal centering state
+ * @get_vcenter: virtual method to get vertical centering state
+ * @set_hflip: virtual method to set horizontal flipping state
+ * @set_vflip: virtual method to set vertical flipping state
+ * @set_hcenter: virtual method to set horizontal centering state
+ * @set_vcenter: virtual method to set vertical centering state
+ *
+ * #GstVideoOrientationInterface interface.
+ */
+struct _GstVideoOrientationInterface {
+ GTypeInterface iface;
+
+ /* FIXME 0.11: fix awkward API? add some kind of get_supported flags thing
+ * and then just return booleans/int from all vfuncs requiring the caller
+ * to check the flags first */
+
+ /* virtual functions */
+ gboolean (* get_hflip) (GstVideoOrientation *video_orientation, gboolean *flip);
+ gboolean (* get_vflip) (GstVideoOrientation *video_orientation, gboolean *flip);
+ gboolean (* get_hcenter) (GstVideoOrientation *video_orientation, gint *center);
+ gboolean (* get_vcenter) (GstVideoOrientation *video_orientation, gint *center);
+
+ gboolean (* set_hflip) (GstVideoOrientation *video_orientation, gboolean flip);
+ gboolean (* set_vflip) (GstVideoOrientation *video_orientation, gboolean flip);
+ gboolean (* set_hcenter) (GstVideoOrientation *video_orientation, gint center);
+ gboolean (* set_vcenter) (GstVideoOrientation *video_orientation, gint center);
+};
+
+GST_VIDEO_API
+GType gst_video_orientation_get_type (void);
+
+/* virtual class function wrappers */
+
+GST_VIDEO_API
+gboolean gst_video_orientation_get_hflip (GstVideoOrientation *video_orientation, gboolean *flip);
+
+GST_VIDEO_API
+gboolean gst_video_orientation_get_vflip (GstVideoOrientation *video_orientation, gboolean *flip);
+
+GST_VIDEO_API
+gboolean gst_video_orientation_get_hcenter (GstVideoOrientation *video_orientation, gint *center);
+
+GST_VIDEO_API
+gboolean gst_video_orientation_get_vcenter (GstVideoOrientation *video_orientation, gint *center);
+
+GST_VIDEO_API
+gboolean gst_video_orientation_set_hflip (GstVideoOrientation *video_orientation, gboolean flip);
+
+GST_VIDEO_API
+gboolean gst_video_orientation_set_vflip (GstVideoOrientation *video_orientation, gboolean flip);
+
+GST_VIDEO_API
+gboolean gst_video_orientation_set_hcenter (GstVideoOrientation *video_orientation, gint center);
+
+GST_VIDEO_API
+gboolean gst_video_orientation_set_vcenter (GstVideoOrientation *video_orientation, gint center);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_ORIENTATION_H__ */
diff --git a/include/gst/video/videooverlay.h b/include/gst/video/videooverlay.h
new file mode 100644
index 0000000000..93530c4b39
--- /dev/null
+++ b/include/gst/video/videooverlay.h
@@ -0,0 +1,119 @@
+/* GStreamer Video Overlay Interface
+ * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2003 Julien Moutte <julien@moutte.net>
+ * Copyright (C) 2011 Tim-Philipp Müller <tim@centricular.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_OVERLAY_H__
+#define __GST_VIDEO_OVERLAY_H__
+
+#include <gst/gst.h>
+#include <gst/video/gstvideosink.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VIDEO_OVERLAY \
+ (gst_video_overlay_get_type ())
+#define GST_VIDEO_OVERLAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VIDEO_OVERLAY, GstVideoOverlay))
+#define GST_IS_VIDEO_OVERLAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VIDEO_OVERLAY))
+#define GST_VIDEO_OVERLAY_GET_INTERFACE(inst) \
+ (G_TYPE_INSTANCE_GET_INTERFACE ((inst), GST_TYPE_VIDEO_OVERLAY, GstVideoOverlayInterface))
+
+/**
+ * GstVideoOverlay:
+ *
+ * Opaque #GstVideoOverlay interface structure
+ */
+typedef struct _GstVideoOverlay GstVideoOverlay;
+typedef struct _GstVideoOverlayInterface GstVideoOverlayInterface;
+
+/**
+ * GstVideoOverlayInterface:
+ * @iface: parent interface type.
+ * @expose: virtual method to handle expose events
+ * @handle_events: virtual method to handle events
+ * @set_render_rectangle: virtual method to set the render rectangle
+ * @set_window_handle: virtual method to configure the window handle
+ *
+ * #GstVideoOverlay interface
+ */
+struct _GstVideoOverlayInterface {
+ GTypeInterface iface;
+
+ /* virtual functions */
+ void (*expose) (GstVideoOverlay *overlay);
+
+ void (*handle_events) (GstVideoOverlay *overlay, gboolean handle_events);
+
+ void (*set_render_rectangle) (GstVideoOverlay *overlay,
+ gint x, gint y,
+ gint width, gint height);
+
+ void (*set_window_handle) (GstVideoOverlay *overlay, guintptr handle);
+};
+
+GST_VIDEO_API
+GType gst_video_overlay_get_type (void);
+
+/* virtual function wrappers */
+
+GST_VIDEO_API
+gboolean gst_video_overlay_set_render_rectangle (GstVideoOverlay * overlay,
+ gint x,
+ gint y,
+ gint width,
+ gint height);
+
+GST_VIDEO_API
+void gst_video_overlay_expose (GstVideoOverlay * overlay);
+
+GST_VIDEO_API
+void gst_video_overlay_handle_events (GstVideoOverlay * overlay,
+ gboolean handle_events);
+
+GST_VIDEO_API
+void gst_video_overlay_set_window_handle (GstVideoOverlay * overlay,
+ guintptr handle);
+
+/* public methods to dispatch bus messages */
+
+GST_VIDEO_API
+void gst_video_overlay_got_window_handle (GstVideoOverlay * overlay,
+ guintptr handle);
+
+GST_VIDEO_API
+void gst_video_overlay_prepare_window_handle (GstVideoOverlay * overlay);
+
+GST_VIDEO_API
+gboolean gst_is_video_overlay_prepare_window_handle_message (GstMessage * msg);
+
+GST_VIDEO_API
+void gst_video_overlay_install_properties (GObjectClass * oclass,
+ gint last_prop_id);
+
+GST_VIDEO_API
+gboolean gst_video_overlay_set_property (GObject * object,
+ gint last_prop_id,
+ guint property_id,
+ const GValue * value);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_OVERLAY_H__ */