RFC - concept patch to use Gstreamer

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



Hi folks,

I've been fighting gstreamer for a while now, and I thought
I'd take a break from my battle to share this concept patch.

I'm hoping to get feedback on the basic direction and approach, so
I can change course if I'm way off base.

The concept is to enable the use of gstreamer, and then we should
get any sort of video codec supported by gstreamer 'for free'.

This patch is flawed in a variety of ways:
  1.  It uses gstreamer 0.10; I know that we'll want 1.0 support
      for a final patch (I'm hoping to have support for both,
      as I need this on RHEL 6).
  2.  I haven't built a configuration mechanism to select codecs yet;
      that should be trivial.
  3.  It doesn't flow through any sort of rate control semantics
      for the gstreamer path; that will eventually cause gstreamer to
      lock up.  That's the fight I'm tired of at the moment <grin>.

It does, I think, fully preserve the 'old' pathway.  At least in my
testing, it seems to function identically.

Feedback greatly appreciated.

Cheers,

Jeremy

Signed-off-by: Jeremy White <jwhite@xxxxxxxxxxxxxxx>
---
 configure.ac               |   18 +++
 server/Makefile.am         |   11 ++
 server/gstreamer_encoder.c |  339 ++++++++++++++++++++++++++++++++++++++++++++
 server/gstreamer_encoder.h |   89 ++++++++++++
 server/mjpeg_encoder.c     |  102 ++++++++++++-
 server/mjpeg_encoder.h     |   42 +-----
 server/red_worker.c        |  155 ++++++--------------
 server/video_encoder.c     |  128 +++++++++++++++++
 server/video_encoder.h     |  116 +++++++++++++++
 9 files changed, 846 insertions(+), 154 deletions(-)
 create mode 100644 server/gstreamer_encoder.c
 create mode 100644 server/gstreamer_encoder.h
 create mode 100644 server/video_encoder.c
 create mode 100644 server/video_encoder.h

diff --git a/configure.ac b/configure.ac
index 4e57ef3..7aad06c 100644
--- a/configure.ac
+++ b/configure.ac
@@ -101,6 +101,15 @@ if test "x$enable_smartcard" = "xyes"; then
    AC_DEFINE([USE_SMARTCARD], [1], [Define if supporting smartcard proxying])
 fi
 
+AC_ARG_ENABLE(gstreamer-0.10,
+[  --enable-gstreamer-0.10   Enable gstreamer 0.10 support],,
+[enable_gstreamer_0_10="yes"])
+AS_IF([test x"$enable_gstreamer_0_10" != "xno"], [enable_gstreamer_0_10="yes"])
+AM_CONDITIONAL(SUPPORT_GSTREAMER_0_10, test "x$enable_gstreamer_0_10" != "xno")
+if test "x$enable_gstreamer_0_10" = "xyes"; then
+   AC_DEFINE([USE_GSTREAMER_0_10], [1], [Define if we are using Gstreamer 0.10])
+fi
+
 AC_ARG_ENABLE(automated_tests,
 [  --enable-automated-tests     Enable automated tests using spicy-screenshot (part of spice--gtk)],,
 [enable_automated_tests="no"])
@@ -137,6 +146,13 @@ if test "x$enable_smartcard" = "xyes"; then
     AS_VAR_APPEND([SPICE_REQUIRES], [" libcacard >= 0.1.2"])
 fi
 
+if test "x$enable_gstreamer_0_10" = "xyes"; then
+    PKG_CHECK_MODULES(GSTREAMER_0_10, [gstreamer-0.10, gstreamer-app-0.10])
+    AC_SUBST(GSTREAMER_0_10_LIBS)
+    AC_SUBST(GSTREAMER_0_10_CFLAGS)
+    AS_VAR_APPEND([SPICE_REQUIRES], [" gstreamer-0.10"])
+fi
+
 
 PKG_CHECK_MODULES([GLIB2], [glib-2.0 >= 2.22])
 AS_VAR_APPEND([SPICE_REQUIRES], [" glib-2.0 >= 2.22"])
@@ -369,6 +385,8 @@ echo "
 
         Smartcard:                ${enable_smartcard}
 
+        Gstreamer-0.10:           ${enable_gstreamer-0.10}
+
         SASL support:             ${enable_sasl}
 
         Automated tests:          ${enable_automated_tests}
diff --git a/server/Makefile.am b/server/Makefile.am
index 89a375c..c89d774 100644
--- a/server/Makefile.am
+++ b/server/Makefile.am
@@ -11,6 +11,7 @@ AM_CPPFLAGS =					\
 	$(SASL_CFLAGS)				\
 	$(SLIRP_CFLAGS)				\
 	$(SMARTCARD_CFLAGS)			\
+	$(GSTREAMER_0_10_CFLAGS)			\
 	$(SSL_CFLAGS)				\
 	$(VISIBILITY_HIDDEN_CFLAGS)		\
 	$(WARN_CFLAGS)				\
@@ -41,6 +42,7 @@ libspice_server_la_LIBADD =						\
 	$(PIXMAN_LIBS)							\
 	$(SASL_LIBS)							\
 	$(SLIRP_LIBS)							\
+	$(GSTREAMER_0_10_LIBS)							\
 	$(SSL_LIBS)							\
 	$(Z_LIBS)							\
 	$(SPICE_NONPKGCONFIG_LIBS)					\
@@ -82,6 +84,8 @@ libspice_server_la_SOURCES =			\
 	main_channel.h				\
 	mjpeg_encoder.c				\
 	mjpeg_encoder.h				\
+	video_encoder.c				\
+	video_encoder.h				\
 	red_bitmap_utils.h			\
 	red_channel.c				\
 	red_channel.h				\
@@ -138,6 +142,13 @@ libspice_server_la_SOURCES +=	\
 	$(NULL)
 endif
 
+if SUPPORT_GSTREAMER_0_10
+libspice_server_la_SOURCES +=	\
+	gstreamer_encoder.c		\
+	gstreamer_encoder.h		\
+	$(NULL)
+endif
+
 EXTRA_DIST =					\
 	glz_encode_match_tmpl.c			\
 	glz_encode_tmpl.c			\
diff --git a/server/gstreamer_encoder.c b/server/gstreamer_encoder.c
new file mode 100644
index 0000000..e5262d5
--- /dev/null
+++ b/server/gstreamer_encoder.c
@@ -0,0 +1,339 @@
+/* -*- Mode: C; c-basic-offset: 4; indent-tabs-mode: nil -*- */
+/*
+   Copyright (C) 2015 Jeremy White
+
+   This library is free software; you can redistribute it and/or
+   modify it under the terms of the GNU Lesser General Public
+   License as published by the Free Software Foundation; either
+   version 2.1 of the License, or (at your option) any later version.
+
+   This library is distributed in the hope that it will be useful,
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   Lesser General Public License for more details.
+
+   You should have received a copy of the GNU Lesser General Public
+   License along with this library; if not, see <http://www.gnu.org/licenses/>.
+*/
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <gst/gst.h>
+#include <gst/app/gstappsrc.h>
+#include <gst/app/gstappsink.h>
+
+#include "red_common.h"
+#include "video_encoder.h"
+#include "gstreamer_encoder.h"
+
+typedef struct {
+    SpiceBitmapFmt  spice_format;
+    int             bpp;
+    int             depth;
+    int             endianness;
+    int             blue_mask;
+    int             green_mask;
+    int             red_mask;
+} SpiceFormatForGstreamer;
+
+struct GstreamerEncoder {
+    GstElement *pipeline;
+    GstElement *appsource;
+    GstElement *appsink;
+    GstElement *yuv;
+    GstElement *ff;
+    GstCaps *rgb_caps;
+    int frame;
+    uint32_t base_time;
+
+    SpiceFormatForGstreamer *format;
+    SpiceBitmapFmt  spice_format;
+    int width;
+    int height;
+
+    VideoEncoderRateControlCbs cbs;
+    void *cbs_opaque;
+};
+
+GstreamerEncoder *gstreamer_encoder_new(uint64_t bit_rate, VideoEncoderRateControlCbs *cbs, void *opaque)
+{
+    GstreamerEncoder *enc;
+
+    gst_init(NULL, NULL);
+
+    enc = spice_new0(GstreamerEncoder, 1);
+    enc->cbs.get_roundtrip_ms = cbs->get_roundtrip_ms;
+    enc->cbs.get_source_fps = cbs->get_source_fps;
+    enc->cbs.update_client_playback_delay = cbs->update_client_playback_delay;
+    enc->cbs_opaque = opaque;
+
+    return enc;
+}
+
+
+static SpiceFormatForGstreamer *map_format(SpiceBitmapFmt format)
+{
+    int i;
+    static SpiceFormatForGstreamer format_map[] =  {
+        { SPICE_BITMAP_FMT_RGBA, 32, 24, 4321, 0xff000000, 0xff0000, 0xff00},
+    };
+
+    /* SPICE_BITMAP_FMT_1BIT_LE, SPICE_BITMAP_FMT_1BIT_BE, SPICE_BITMAP_FMT_4BIT_LE, SPICE_BITMAP_FMT_4BIT_BE,
+       SPICE_BITMAP_FMT_8BIT, SPICE_BITMAP_FMT_16BIT, SPICE_BITMAP_FMT_24BIT, SPICE_BITMAP_FMT_32BIT,
+       SPICE_BITMAP_FMT_RGBA, SPICE_BITMAP_FMT_8BIT_A, */
+
+    for (i = 0; i < sizeof(format_map) / sizeof(format_map[0]); i++)
+        if (format_map[i].spice_format == format)
+            return &format_map[i];
+
+    return NULL;
+}
+
+static gboolean drain_pipeline(GstreamerEncoder *enc)
+{
+    GstBuffer *buffer = NULL;
+    if (gst_app_sink_is_eos(GST_APP_SINK(enc->appsink)))
+        return TRUE;
+
+    do {
+        if (buffer)
+            gst_buffer_unref(buffer);
+        buffer = gst_app_sink_pull_buffer(GST_APP_SINK(enc->appsink));
+    } while(buffer);
+
+    return gst_app_sink_is_eos(GST_APP_SINK(enc->appsink));
+}
+
+static int construct_pipeline(GstreamerEncoder *enc, const SpiceBitmap *bitmap,
+                              int width, int height)
+{
+    GstStateChangeReturn ret;
+
+    if (enc->pipeline) {
+        GstPad *pad = gst_element_get_pad(enc->yuv, "sink");
+        if (! pad) {
+            spice_error("Unable to get appsrc sink pad to flush the pipe");
+            return -1;
+        }
+        gst_pad_send_event(pad, gst_event_new_eos());
+
+        gst_object_unref(pad);
+        if (! drain_pipeline(enc))
+            return -1;
+
+        gst_bin_remove_many(GST_BIN(enc->pipeline), enc->appsource, enc->yuv, enc->ff, enc->appsink, NULL);
+        gst_object_unref(enc->pipeline);
+    }
+
+    if (enc->rgb_caps)
+        gst_caps_unref(enc->rgb_caps);
+
+    enc->rgb_caps = gst_caps_new_simple ("video/x-raw-rgb",
+                "bpp", G_TYPE_INT, enc->format->bpp,
+                "depth", G_TYPE_INT, enc->format->depth,
+                "width", G_TYPE_INT, width,
+                "height", G_TYPE_INT, height,
+                "endianness", G_TYPE_INT, enc->format->endianness,
+                "red_mask", G_TYPE_INT, enc->format->red_mask,
+                "green_mask", G_TYPE_INT, enc->format->green_mask,
+                "blue_mask", G_TYPE_INT, enc->format->blue_mask,
+                "framerate", GST_TYPE_FRACTION, enc->cbs.get_source_fps(enc->cbs_opaque), 1,
+                NULL);
+    if (!enc->rgb_caps) {
+        spice_error("Gstreamer error: unable to create rgb_caps");
+        return -1;
+    }
+
+    enc->appsource = gst_element_factory_make ("appsrc", NULL);
+    if (! enc->appsource) {
+        spice_error("Gstreamer error creating appsrc");
+        return -1;
+    }
+    /* set caps */
+    g_object_set (G_OBJECT (enc->appsource), "caps", enc->rgb_caps, NULL);
+
+    /* indicate we're live */
+    g_object_set (G_OBJECT (enc->appsource), "is-live", 1, NULL);
+
+    /* Build the pipeline */
+    enc->appsink = gst_element_factory_make ("appsink", NULL);
+    enc->yuv = gst_element_factory_make ("ffmpegcolorspace", NULL);
+    enc->ff = gst_element_factory_make ("ffenc_mjpeg", NULL);
+
+    enc->pipeline = gst_pipeline_new ("pipeline");
+
+    if (!enc->pipeline || !enc->appsource || !enc->appsink || !enc->yuv || !enc->ff) {
+        spice_error("Gstreamer error: not all elements could be created.");
+        return -1;
+    }
+
+    gst_bin_add_many (GST_BIN(enc->pipeline), enc->appsource, enc->yuv, enc->ff, enc->appsink, NULL);
+    if (gst_element_link_many(enc->appsource, enc->yuv, enc->ff, enc->appsink, NULL) != TRUE) {
+        spice_error("Gstreamer error: could not link all the pieces.");
+        gst_object_unref (enc->pipeline);
+        return -1;
+    }
+
+    /* Start playing */
+    gst_pipeline_use_clock(GST_PIPELINE(enc->pipeline), NULL);
+    ret = gst_element_set_state (enc->pipeline, GST_STATE_PLAYING);
+    if (ret == GST_STATE_CHANGE_FAILURE) {
+        spice_error("Gstreamer error: Unable to set the pipeline to the playing state.");
+        gst_object_unref (enc->pipeline);
+        return -1;
+    }
+
+    enc->width = width;
+    enc->height = height;
+
+    return 0;
+}
+
+static inline uint8_t *get_image_line(SpiceChunks *chunks, size_t *offset,
+                                          int *chunk_nr, int stride)
+{
+    uint8_t *ret;
+    SpiceChunk *chunk;
+
+    chunk = &chunks->chunk[*chunk_nr];
+
+    if (*offset == chunk->len) {
+        if (*chunk_nr == chunks->num_chunks - 1) {
+            return NULL; /* Last chunk */
+        }
+        *offset = 0;
+        (*chunk_nr)++;
+        chunk = &chunks->chunk[*chunk_nr];
+    }
+
+    if (chunk->len - *offset < stride) {
+        spice_warning("bad chunk alignment");
+        return NULL;
+    }
+    ret = chunk->data + *offset;
+    *offset += stride;
+    return ret;
+}
+
+
+static int push_frame(GstreamerEncoder *enc,
+                    const SpiceBitmap *bitmap,
+                    const SpiceRect *src,
+                    int top_down, uint32_t frame_mm_time)
+{
+    SpiceChunks *chunks;
+    uint32_t image_stride;
+    size_t offset;
+    int i, chunk;
+    uint8_t *p;
+    int fps;
+
+    GstBuffer *buffer;
+    GstFlowReturn ret;
+
+    fps = enc->cbs.get_source_fps(enc->cbs_opaque);
+
+    const unsigned int stream_height = src->bottom - src->top;
+    const unsigned int stream_width = src->right - src->left;
+
+    /* TODO - this may be inefficient - copy from source directly? */
+    buffer = gst_buffer_new_and_alloc (stream_width * stream_height * (enc->format->bpp / 8));
+
+    chunks = bitmap->data;
+    offset = 0;
+    chunk = 0;
+    image_stride = bitmap->stride;
+
+    const int skip_lines = top_down ? src->top : bitmap->y - (src->bottom - 0);
+    for (i = 0; i < skip_lines; i++) {
+        get_image_line(chunks, &offset, &chunk, image_stride);
+    }
+
+    for (i = 0, p = GST_BUFFER_DATA(buffer); i < stream_height; i++) {
+        uint8_t *src_line =
+            (uint8_t *)get_image_line(chunks, &offset, &chunk, image_stride);
+
+        if (!src_line) {
+            return FALSE;
+        }
+
+        src_line += src->left * (enc->format->bpp / 8);
+
+        memcpy(p, src_line, stream_width * (enc->format->bpp / 8));
+        p += stream_width * (enc->format->bpp / 8);
+    }
+
+    if (enc->base_time == 0)
+        enc->base_time = frame_mm_time;
+
+    GST_BUFFER_TIMESTAMP(buffer) = (frame_mm_time - enc->base_time) * 1000L * 1000L;
+    GST_BUFFER_DURATION(buffer) = GST_SECOND / fps;
+    GST_BUFFER_OFFSET(buffer) = enc->frame++;
+    gst_buffer_set_caps(buffer, enc->rgb_caps);
+
+    ret = gst_app_src_push_buffer(GST_APP_SRC(enc->appsource), buffer);
+    if (ret != GST_FLOW_OK)
+    {
+        spice_debug("Unable to push source buffer");
+        return -1;
+    }
+
+    return 0;
+}
+
+static int pull_frame(GstreamerEncoder *enc,
+                    uint8_t **outbuf, size_t *outbuf_size, int *data_size)
+{
+    GstBuffer *buffer;
+
+    buffer = gst_app_sink_pull_buffer(GST_APP_SINK(enc->appsink));
+
+    if (buffer) {
+        int len = GST_BUFFER_SIZE(buffer);
+        /* TODO - think this through a bit more... */
+        spice_assert(outbuf && *outbuf && outbuf_size && *outbuf_size > len);
+        memcpy(*outbuf, GST_BUFFER_DATA(buffer), len);
+        gst_buffer_unref(buffer);
+        *data_size = len;
+
+        return 0;
+    }
+    return -1;
+}
+
+void gstreamer_encoder_destroy(GstreamerEncoder *enc)
+{
+    gst_object_unref(enc->pipeline);
+    // TODO - contemplate this
+    //gst_deinit();
+    enc->pipeline = NULL;
+    free(enc);
+}
+
+int gstreamer_encoder_encode_frame(GstreamerEncoder *enc,
+                    const SpiceBitmap *bitmap,
+                    const SpiceRect *src,
+                    int width, int height, int top_down, uint32_t frame_mm_time,
+                    uint8_t **outbuf, size_t *outbuf_size, int *data_size)
+{
+    if (width != enc->width || height != enc->height || enc->spice_format != bitmap->format) {
+        enc->format = map_format(bitmap->format);
+        if (! enc->format) {
+            spice_debug("Unable to map format type %d", bitmap->format);
+            return VIDEO_ENCODER_FRAME_UNSUPPORTED;
+        }
+        enc->spice_format = bitmap->format;
+
+        if (construct_pipeline(enc, bitmap, width, height))
+            return VIDEO_ENCODER_FRAME_DROP;
+    }
+
+    if (push_frame(enc, bitmap, src, top_down, frame_mm_time))
+        return VIDEO_ENCODER_FRAME_DROP;
+
+    if (pull_frame(enc, outbuf, outbuf_size, data_size))
+        return VIDEO_ENCODER_FRAME_DROP;
+
+    return VIDEO_ENCODER_FRAME_ENCODE_DONE;
+}
diff --git a/server/gstreamer_encoder.h b/server/gstreamer_encoder.h
new file mode 100644
index 0000000..e2ea2c7
--- /dev/null
+++ b/server/gstreamer_encoder.h
@@ -0,0 +1,89 @@
+/* -*- Mode: C; c-basic-offset: 4; indent-tabs-mode: nil -*- */
+/*
+   Copyright (C) 2015 Jeremy White
+
+   This library is free software; you can redistribute it and/or
+   modify it under the terms of the GNU Lesser General Public
+   License as published by the Free Software Foundation; either
+   version 2.1 of the License, or (at your option) any later version.
+
+   This library is distributed in the hope that it will be useful,
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   Lesser General Public License for more details.
+
+   You should have received a copy of the GNU Lesser General Public
+   License along with this library; if not, see <http://www.gnu.org/licenses/>.
+*/
+
+#ifndef _H_GSTREAMER_ENCODER
+#define _H_GSTREAMER_ENCODER
+
+#include "red_common.h"
+
+typedef struct GstreamerEncoder GstreamerEncoder;
+
+typedef struct GstreamerEncoderStats {
+    uint64_t starting_bit_rate;
+    uint64_t cur_bit_rate;
+    double avg_quality;
+} GstreamerEncoderStats;
+
+GstreamerEncoder *gstreamer_encoder_new(uint64_t bit_rate, VideoEncoderRateControlCbs *cbs, void *opaque);
+void gstreamer_encoder_destroy(GstreamerEncoder *encoder);
+
+/*
+ * dest must be either NULL or allocated by malloc, since it might be freed
+ * during the encoding, if its size is too small.
+ *
+ * return:
+ *  VIDEO_ENCODER_FRAME_UNSUPPORTED : frame cannot be encoded
+ *  VIDEO_ENCODER_FRAME_DROP        : frame should be dropped. This value can only be returned
+ *                                    if mjpeg rate control is active.
+ *  VIDEO_ENCODER_FRAME_ENCODE_DONE : frame encoding completed.
+ */
+
+int gstreamer_encoder_encode_frame(GstreamerEncoder *encoder,
+                    const SpiceBitmap *bitmap,
+                    const SpiceRect *src,
+                    int width, int height, int top_down, uint32_t frame_mm_time,
+                    uint8_t **outbuf, size_t *outbuf_size, int *data_size);
+
+/*
+ * bit rate control
+ */
+
+/*
+ * Data that should be periodically obtained from the client. The report contains:
+ * num_frames         : the number of frames that reached the client during the time
+ *                      the report is referring to.
+ * num_drops          : the part of the above frames that was dropped by the client due to
+ *                      late arrival time.
+ * start_frame_mm_time: the mm_time of the first frame included in the report
+ * end_frame_mm_time  : the mm_time of the last_frame included in the report
+ * end_frame_delay    : (end_frame_mm_time - client_mm_time)
+ * audio delay        : the latency of the audio playback.
+ *                      If there is no audio playback, set it to MAX_UINT.
+ *
+ */
+void gstreamer_encoder_client_stream_report(GstreamerEncoder *encoder,
+                                        uint32_t num_frames,
+                                        uint32_t num_drops,
+                                        uint32_t start_frame_mm_time,
+                                        uint32_t end_frame_mm_time,
+                                        int32_t end_frame_delay,
+                                        uint32_t audio_delay);
+
+/*
+ * Notify the encoder each time a frame is dropped due to pipe
+ * congestion.
+ * We can deduce the client state by the frame dropping rate in the server.
+ * Monitoring the frame drops can help in fine tuning the playback parameters
+ * when the client reports are delayed.
+ */
+void gstreamer_encoder_notify_server_frame_drop(GstreamerEncoder *encoder);
+
+uint64_t gstreamer_encoder_get_bit_rate(GstreamerEncoder *encoder);
+void gstreamer_encoder_get_stats(GstreamerEncoder *encoder, GstreamerEncoderStats *stats);
+
+#endif
diff --git a/server/mjpeg_encoder.c b/server/mjpeg_encoder.c
index 12447da..15c9225 100644
--- a/server/mjpeg_encoder.c
+++ b/server/mjpeg_encoder.c
@@ -20,6 +20,7 @@
 #endif
 
 #include "red_common.h"
+#include "video_encoder.h"
 #include "mjpeg_encoder.h"
 #include <jerror.h>
 #include <jpeglib.h>
@@ -166,7 +167,7 @@ struct MJpegEncoder {
 
     int rate_control_is_active;
     MJpegEncoderRateControl rate_control;
-    MJpegEncoderRateControlCbs cbs;
+    VideoEncoderRateControlCbs cbs;
     void *cbs_opaque;
 
     /* stats */
@@ -186,7 +187,7 @@ static uint32_t get_min_required_playback_delay(uint64_t frame_enc_size,
                                                 uint32_t latency);
 
 MJpegEncoder *mjpeg_encoder_new(int bit_rate_control, uint64_t starting_bit_rate,
-                                MJpegEncoderRateControlCbs *cbs, void *opaque)
+                                VideoEncoderRateControlCbs *cbs, void *opaque)
 {
     MJpegEncoder *enc;
 
@@ -753,7 +754,7 @@ int mjpeg_encoder_start_frame(MJpegEncoder *encoder, SpiceBitmapFmt format,
         interval = (now - rate_control->bit_rate_info.last_frame_time);
 
         if (interval < (1000*1000*1000) / rate_control->adjusted_fps) {
-            return MJPEG_ENCODER_FRAME_DROP;
+            return VIDEO_ENCODER_FRAME_DROP;
         }
 
         mjpeg_encoder_adjust_params_to_bit_rate(encoder);
@@ -801,14 +802,14 @@ int mjpeg_encoder_start_frame(MJpegEncoder *encoder, SpiceBitmapFmt format,
         break;
     default:
         spice_debug("unsupported format %d", format);
-        return MJPEG_ENCODER_FRAME_UNSUPPORTED;
+        return VIDEO_ENCODER_FRAME_UNSUPPORTED;
     }
 
     if (encoder->pixel_converter != NULL) {
         unsigned int stride = width * 3;
         /* check for integer overflow */
         if (stride < width) {
-            return MJPEG_ENCODER_FRAME_UNSUPPORTED;
+            return VIDEO_ENCODER_FRAME_UNSUPPORTED;
         }
         if (encoder->row_size < stride) {
             encoder->row = spice_realloc(encoder->row, stride);
@@ -828,7 +829,7 @@ int mjpeg_encoder_start_frame(MJpegEncoder *encoder, SpiceBitmapFmt format,
 
     encoder->num_frames++;
     encoder->avg_quality += quality;
-    return MJPEG_ENCODER_FRAME_ENCODE_START;
+    return VIDEO_ENCODER_FRAME_ENCODE_DONE;
 }
 
 int mjpeg_encoder_encode_scanline(MJpegEncoder *encoder, uint8_t *src_pixels,
@@ -887,6 +888,93 @@ size_t mjpeg_encoder_end_frame(MJpegEncoder *encoder)
     return encoder->rate_control.last_enc_size;
 }
 
+static inline uint8_t *get_image_line(SpiceChunks *chunks, size_t *offset,
+                                          int *chunk_nr, int stride)
+{
+    uint8_t *ret;
+    SpiceChunk *chunk;
+
+    chunk = &chunks->chunk[*chunk_nr];
+
+    if (*offset == chunk->len) {
+        if (*chunk_nr == chunks->num_chunks - 1) {
+            return NULL; /* Last chunk */
+        }
+        *offset = 0;
+        (*chunk_nr)++;
+        chunk = &chunks->chunk[*chunk_nr];
+    }
+
+    if (chunk->len - *offset < stride) {
+        spice_warning("bad chunk alignment");
+        return NULL;
+    }
+    ret = chunk->data + *offset;
+    *offset += stride;
+    return ret;
+}
+
+
+
+static int encode_mjpeg_frame(MJpegEncoder *encoder, const SpiceRect *src,
+                        const SpiceBitmap *image, int top_down)
+{
+    SpiceChunks *chunks;
+    uint32_t image_stride;
+    size_t offset;
+    int i, chunk;
+
+    chunks = image->data;
+    offset = 0;
+    chunk = 0;
+    image_stride = image->stride;
+
+    const int skip_lines = top_down ? src->top : image->y - (src->bottom - 0);
+    for (i = 0; i < skip_lines; i++) {
+            get_image_line(chunks, &offset, &chunk, image_stride);
+    }
+
+    const unsigned int stream_height = src->bottom - src->top;
+    const unsigned int stream_width = src->right - src->left;
+
+    for (i = 0; i < stream_height; i++) {
+        uint8_t *src_line = (uint8_t *)get_image_line(chunks, &offset, &chunk, image_stride);
+
+        if (!src_line) {
+            return FALSE;
+        }
+
+        src_line += src->left * mjpeg_encoder_get_bytes_per_pixel(encoder);
+        if (mjpeg_encoder_encode_scanline(encoder, src_line, stream_width) == 0)
+            return FALSE;
+    }
+
+    return TRUE;
+}
+
+int mjpeg_encoder_encode_frame(MJpegEncoder *encoder,
+                const SpiceBitmap *bitmap,
+                const SpiceRect *src,
+                int width, int height, int top_down, uint32_t frame_mm_time,
+                uint8_t **outbuf, size_t *outbuf_size, int *data_size)
+{
+    int ret;
+
+    ret = mjpeg_encoder_start_frame(encoder, bitmap->format,
+                                    width, height, outbuf, outbuf_size,
+                                    frame_mm_time);
+    if (ret != VIDEO_ENCODER_FRAME_ENCODE_DONE)
+        return ret;
+
+    if (!encode_mjpeg_frame(encoder, src, bitmap, top_down))
+        return VIDEO_ENCODER_FRAME_UNSUPPORTED;
+
+    *data_size = mjpeg_encoder_end_frame(encoder);
+
+    return VIDEO_ENCODER_FRAME_ENCODE_DONE;
+}
+
+
 static void mjpeg_encoder_quality_eval_stop(MJpegEncoder *encoder)
 {
     MJpegEncoderRateControl *rate_control = &encoder->rate_control;
@@ -1266,7 +1354,7 @@ uint64_t mjpeg_encoder_get_bit_rate(MJpegEncoder *encoder)
     return encoder->rate_control.byte_rate * 8;
 }
 
-void mjpeg_encoder_get_stats(MJpegEncoder *encoder, MJpegEncoderStats *stats)
+void mjpeg_encoder_get_stats(MJpegEncoder *encoder, VideoEncoderStats *stats)
 {
     spice_assert(encoder != NULL && stats != NULL);
     stats->starting_bit_rate = encoder->starting_bit_rate;
diff --git a/server/mjpeg_encoder.h b/server/mjpeg_encoder.h
index 741ea1c..4d09eb9 100644
--- a/server/mjpeg_encoder.h
+++ b/server/mjpeg_encoder.h
@@ -21,36 +21,10 @@
 
 #include "red_common.h"
 
-enum {
-    MJPEG_ENCODER_FRAME_UNSUPPORTED = -1,
-    MJPEG_ENCODER_FRAME_DROP,
-    MJPEG_ENCODER_FRAME_ENCODE_START,
-};
-
 typedef struct MJpegEncoder MJpegEncoder;
 
-/*
- * Callbacks required for controling and adjusting
- * the stream bit rate:
- * get_roundtrip_ms: roundtrip time in milliseconds
- * get_source_fps: the input frame rate (#frames per second), i.e.,
- * the rate of frames arriving from the guest to spice-server,
- * before any drops.
- */
-typedef struct MJpegEncoderRateControlCbs {
-    uint32_t (*get_roundtrip_ms)(void *opaque);
-    uint32_t (*get_source_fps)(void *opaque);
-    void (*update_client_playback_delay)(void *opaque, uint32_t delay_ms);
-} MJpegEncoderRateControlCbs;
-
-typedef struct MJpegEncoderStats {
-    uint64_t starting_bit_rate;
-    uint64_t cur_bit_rate;
-    double avg_quality;
-} MJpegEncoderStats;
-
 MJpegEncoder *mjpeg_encoder_new(int bit_rate_control, uint64_t starting_bit_rate,
-                                MJpegEncoderRateControlCbs *cbs, void *opaque);
+                                VideoEncoderRateControlCbs *cbs, void *opaque);
 void mjpeg_encoder_destroy(MJpegEncoder *encoder);
 
 uint8_t mjpeg_encoder_get_bytes_per_pixel(MJpegEncoder *encoder);
@@ -66,13 +40,11 @@ uint8_t mjpeg_encoder_get_bytes_per_pixel(MJpegEncoder *encoder);
  *  MJPEG_ENCODER_FRAME_ENCODE_START: frame encoding started. Continue with
  *                                    mjpeg_encoder_encode_scanline.
  */
-int mjpeg_encoder_start_frame(MJpegEncoder *encoder, SpiceBitmapFmt format,
-                              int width, int height,
-                              uint8_t **dest, size_t *dest_len,
-                              uint32_t frame_mm_time);
-int mjpeg_encoder_encode_scanline(MJpegEncoder *encoder, uint8_t *src_pixels,
-                                  size_t image_width);
-size_t mjpeg_encoder_end_frame(MJpegEncoder *encoder);
+int mjpeg_encoder_encode_frame(MJpegEncoder *encoder,
+                const SpiceBitmap *bitmap,
+                const SpiceRect *src,
+                int width, int height, int top_down, uint32_t frame_mm_time,
+                uint8_t **outbuf, size_t *outbuf_size, int *data_size);
 
 /*
  * bit rate control
@@ -109,6 +81,6 @@ void mjpeg_encoder_client_stream_report(MJpegEncoder *encoder,
 void mjpeg_encoder_notify_server_frame_drop(MJpegEncoder *encoder);
 
 uint64_t mjpeg_encoder_get_bit_rate(MJpegEncoder *encoder);
-void mjpeg_encoder_get_stats(MJpegEncoder *encoder, MJpegEncoderStats *stats);
+void mjpeg_encoder_get_stats(MJpegEncoder *encoder, VideoEncoderStats *stats);
 
 #endif
diff --git a/server/red_worker.c b/server/red_worker.c
index 58cc827..e3a13be 100644
--- a/server/red_worker.c
+++ b/server/red_worker.c
@@ -70,7 +70,7 @@
 #include "glz_encoder.h"
 #include "stat.h"
 #include "reds.h"
-#include "mjpeg_encoder.h"
+#include "video_encoder.h"
 #include "red_memslots.h"
 #include "red_parse_qxl.h"
 #include "jpeg_encoder.h"
@@ -484,7 +484,7 @@ typedef struct StreamAgent {
     PipeItem destroy_item;
     Stream *stream;
     uint64_t last_send_time;
-    MJpegEncoder *mjpeg_encoder;
+    VideoEncoder *video_encoder;
     DisplayChannelClient *dcc;
 
     int frames;
@@ -723,7 +723,7 @@ struct DisplayChannelClient {
     QRegion surface_client_lossy_region[NUM_SURFACES];
 
     StreamAgent stream_agents[NUM_STREAMS];
-    int use_mjpeg_encoder_rate_control;
+    int use_video_encoder_rate_control;
     uint32_t streams_max_latency;
     uint64_t streams_max_bit_rate;
 };
@@ -2642,10 +2642,10 @@ static void red_print_stream_stats(DisplayChannelClient *dcc, StreamAgent *agent
 #ifdef STREAM_STATS
     StreamStats *stats = &agent->stats;
     double passed_mm_time = (stats->end - stats->start) / 1000.0;
-    MJpegEncoderStats encoder_stats = {0};
+    VideoEncoderStats encoder_stats = {0};
 
-    if (agent->mjpeg_encoder) {
-        mjpeg_encoder_get_stats(agent->mjpeg_encoder, &encoder_stats);
+    if (agent->video_encoder) {
+        video_encoder_get_stats(agent->video_encoder, &encoder_stats);
     }
 
     spice_debug("stream=%"PRIdPTR" dim=(%dx%d) #in-frames=%"PRIu64" #in-avg-fps=%.2f #out-frames=%"PRIu64" "
@@ -2688,8 +2688,8 @@ static void red_stop_stream(RedWorker *worker, Stream *stream)
         region_clear(&stream_agent->vis_region);
         region_clear(&stream_agent->clip);
         spice_assert(!pipe_item_is_linked(&stream_agent->destroy_item));
-        if (stream_agent->mjpeg_encoder && dcc->use_mjpeg_encoder_rate_control) {
-            uint64_t stream_bit_rate = mjpeg_encoder_get_bit_rate(stream_agent->mjpeg_encoder);
+        if (stream_agent->video_encoder && dcc->use_video_encoder_rate_control) {
+            uint64_t stream_bit_rate = video_encoder_get_bit_rate(stream_agent->video_encoder);
 
             if (stream_bit_rate > dcc->streams_max_bit_rate) {
                 spice_debug("old max-bit-rate=%.2f new=%.2f",
@@ -2996,7 +2996,7 @@ static uint64_t red_stream_get_initial_bit_rate(DisplayChannelClient *dcc,
            stream->width * stream->height) / dcc->common.worker->streams_size_total;
 }
 
-static uint32_t red_stream_mjpeg_encoder_get_roundtrip(void *opaque)
+static uint32_t red_stream_video_encoder_get_roundtrip(void *opaque)
 {
     StreamAgent *agent = opaque;
     int roundtrip;
@@ -3017,7 +3017,7 @@ static uint32_t red_stream_mjpeg_encoder_get_roundtrip(void *opaque)
     return roundtrip;
 }
 
-static uint32_t red_stream_mjpeg_encoder_get_source_fps(void *opaque)
+static uint32_t red_stream_video_encoder_get_source_fps(void *opaque)
 {
     StreamAgent *agent = opaque;
 
@@ -3040,7 +3040,7 @@ static void red_display_update_streams_max_latency(DisplayChannelClient *dcc, St
     }
     for (i = 0; i < NUM_STREAMS; i++) {
         StreamAgent *other_agent = &dcc->stream_agents[i];
-        if (other_agent == remove_agent || !other_agent->mjpeg_encoder) {
+        if (other_agent == remove_agent || !other_agent->video_encoder) {
             continue;
         }
         if (other_agent->client_required_latency > new_max_latency) {
@@ -3053,9 +3053,9 @@ static void red_display_update_streams_max_latency(DisplayChannelClient *dcc, St
 static void red_display_stream_agent_stop(DisplayChannelClient *dcc, StreamAgent *agent)
 {
     red_display_update_streams_max_latency(dcc, agent);
-    if (agent->mjpeg_encoder) {
-        mjpeg_encoder_destroy(agent->mjpeg_encoder);
-        agent->mjpeg_encoder = NULL;
+    if (agent->video_encoder) {
+        video_encoder_destroy(agent->video_encoder);
+        agent->video_encoder = NULL;
     }
 }
 
@@ -3091,18 +3091,18 @@ static void red_display_create_stream(DisplayChannelClient *dcc, Stream *stream)
     agent->fps = MAX_FPS;
     agent->dcc = dcc;
 
-    if (dcc->use_mjpeg_encoder_rate_control) {
-        MJpegEncoderRateControlCbs mjpeg_cbs;
+    if (dcc->use_video_encoder_rate_control) {
+        VideoEncoderRateControlCbs video_cbs;
         uint64_t initial_bit_rate;
 
-        mjpeg_cbs.get_roundtrip_ms = red_stream_mjpeg_encoder_get_roundtrip;
-        mjpeg_cbs.get_source_fps = red_stream_mjpeg_encoder_get_source_fps;
-        mjpeg_cbs.update_client_playback_delay = red_stream_update_client_playback_latency;
+        video_cbs.get_roundtrip_ms = red_stream_video_encoder_get_roundtrip;
+        video_cbs.get_source_fps = red_stream_video_encoder_get_source_fps;
+        video_cbs.update_client_playback_delay = red_stream_update_client_playback_latency;
 
         initial_bit_rate = red_stream_get_initial_bit_rate(dcc, stream);
-        agent->mjpeg_encoder = mjpeg_encoder_new(TRUE, initial_bit_rate, &mjpeg_cbs, agent);
+        agent->video_encoder = video_encoder_new(VIDEO_ENCODER_TYPE_GSTREAMER, TRUE, initial_bit_rate, &video_cbs, agent);
     } else {
-        agent->mjpeg_encoder = mjpeg_encoder_new(FALSE, 0, NULL, NULL);
+        agent->video_encoder = video_encoder_new(VIDEO_ENCODER_TYPE_GSTREAMER, FALSE, 0, NULL, NULL);
     }
     red_channel_client_pipe_add(&dcc->common.base, &agent->create_item);
 
@@ -3209,7 +3209,7 @@ static void red_display_client_init_streams(DisplayChannelClient *dcc)
         red_channel_pipe_item_init(channel, &agent->create_item, PIPE_ITEM_TYPE_STREAM_CREATE);
         red_channel_pipe_item_init(channel, &agent->destroy_item, PIPE_ITEM_TYPE_STREAM_DESTROY);
     }
-    dcc->use_mjpeg_encoder_rate_control =
+    dcc->use_video_encoder_rate_control =
         red_channel_client_test_remote_cap(&dcc->common.base, SPICE_DISPLAY_CAP_STREAM_REPORT);
 }
 
@@ -3221,9 +3221,9 @@ static void red_display_destroy_streams_agents(DisplayChannelClient *dcc)
         StreamAgent *agent = &dcc->stream_agents[i];
         region_destroy(&agent->vis_region);
         region_destroy(&agent->clip);
-        if (agent->mjpeg_encoder) {
-            mjpeg_encoder_destroy(agent->mjpeg_encoder);
-            agent->mjpeg_encoder = NULL;
+        if (agent->video_encoder) {
+            video_encoder_destroy(agent->video_encoder);
+            agent->video_encoder = NULL;
         }
     }
 }
@@ -3350,7 +3350,7 @@ static inline void pre_stream_item_swap(RedWorker *worker, Stream *stream, Drawa
         dcc = dpi->dcc;
         agent = &dcc->stream_agents[index];
 
-        if (!dcc->use_mjpeg_encoder_rate_control &&
+        if (!dcc->use_video_encoder_rate_control &&
             !dcc->common.is_low_bandwidth) {
             continue;
         }
@@ -3359,8 +3359,8 @@ static inline void pre_stream_item_swap(RedWorker *worker, Stream *stream, Drawa
 #ifdef STREAM_STATS
             agent->stats.num_drops_pipe++;
 #endif
-            if (dcc->use_mjpeg_encoder_rate_control) {
-                mjpeg_encoder_notify_server_frame_drop(agent->mjpeg_encoder);
+            if (dcc->use_video_encoder_rate_control) {
+                video_encoder_notify_server_frame_drop(agent->video_encoder);
             } else {
                 ++agent->drops;
             }
@@ -3373,7 +3373,7 @@ static inline void pre_stream_item_swap(RedWorker *worker, Stream *stream, Drawa
 
         agent = &dcc->stream_agents[index];
 
-        if (dcc->use_mjpeg_encoder_rate_control) {
+        if (dcc->use_video_encoder_rate_control) {
             continue;
         }
         if (agent->frames / agent->fps < FPS_TEST_INTERVAL) {
@@ -8480,70 +8480,6 @@ static inline void display_begin_send_message(RedChannelClient *rcc)
     red_channel_client_begin_send_message(rcc);
 }
 
-static inline uint8_t *red_get_image_line(SpiceChunks *chunks, size_t *offset,
-                                          int *chunk_nr, int stride)
-{
-    uint8_t *ret;
-    SpiceChunk *chunk;
-
-    chunk = &chunks->chunk[*chunk_nr];
-
-    if (*offset == chunk->len) {
-        if (*chunk_nr == chunks->num_chunks - 1) {
-            return NULL; /* Last chunk */
-        }
-        *offset = 0;
-        (*chunk_nr)++;
-        chunk = &chunks->chunk[*chunk_nr];
-    }
-
-    if (chunk->len - *offset < stride) {
-        spice_warning("bad chunk alignment");
-        return NULL;
-    }
-    ret = chunk->data + *offset;
-    *offset += stride;
-    return ret;
-}
-
-static int encode_frame(DisplayChannelClient *dcc, const SpiceRect *src,
-                        const SpiceBitmap *image, Stream *stream)
-{
-    SpiceChunks *chunks;
-    uint32_t image_stride;
-    size_t offset;
-    int i, chunk;
-    StreamAgent *agent = &dcc->stream_agents[stream - dcc->common.worker->streams_buf];
-
-    chunks = image->data;
-    offset = 0;
-    chunk = 0;
-    image_stride = image->stride;
-
-    const int skip_lines = stream->top_down ? src->top : image->y - (src->bottom - 0);
-    for (i = 0; i < skip_lines; i++) {
-        red_get_image_line(chunks, &offset, &chunk, image_stride);
-    }
-
-    const unsigned int stream_height = src->bottom - src->top;
-    const unsigned int stream_width = src->right - src->left;
-
-    for (i = 0; i < stream_height; i++) {
-        uint8_t *src_line =
-            (uint8_t *)red_get_image_line(chunks, &offset, &chunk, image_stride);
-
-        if (!src_line) {
-            return FALSE;
-        }
-
-        src_line += src->left * mjpeg_encoder_get_bytes_per_pixel(agent->mjpeg_encoder);
-        if (mjpeg_encoder_encode_scanline(agent->mjpeg_encoder, src_line, stream_width) == 0)
-            return FALSE;
-    }
-
-    return TRUE;
-}
-
 static inline int red_marshall_stream_data(RedChannelClient *rcc,
                   SpiceMarshaller *base_marshaller, Drawable *drawable)
 {
@@ -8588,7 +8524,7 @@ static inline int red_marshall_stream_data(RedChannelClient *rcc,
     uint64_t time_now = red_now();
     size_t outbuf_size;
 
-    if (!dcc->use_mjpeg_encoder_rate_control) {
+    if (!dcc->use_video_encoder_rate_control) {
         if (time_now - agent->last_send_time < (1000 * 1000 * 1000) / agent->fps) {
             agent->frames--;
 #ifdef STREAM_STATS
@@ -8602,33 +8538,28 @@ static inline int red_marshall_stream_data(RedChannelClient *rcc,
     frame_mm_time =  drawable->red_drawable->mm_time ?
                         drawable->red_drawable->mm_time :
                         reds_get_mm_time();
+
     outbuf_size = dcc->send_data.stream_outbuf_size;
-    ret = mjpeg_encoder_start_frame(agent->mjpeg_encoder, image->u.bitmap.format,
-                                    width, height,
-                                    &dcc->send_data.stream_outbuf,
-                                    &outbuf_size,
-                                    frame_mm_time);
+    ret = video_encoder_encode_frame(agent->video_encoder, &image->u.bitmap,
+            &drawable->red_drawable->u.copy.src_area,
+            width, height, stream->top_down, frame_mm_time,
+            &dcc->send_data.stream_outbuf, &outbuf_size, &n);
+
     switch (ret) {
-    case MJPEG_ENCODER_FRAME_DROP:
-        spice_assert(dcc->use_mjpeg_encoder_rate_control);
+    case VIDEO_ENCODER_FRAME_DROP:
+        spice_assert(dcc->use_video_encoder_rate_control);
 #ifdef STREAM_STATS
         agent->stats.num_drops_fps++;
 #endif
         return TRUE;
-    case MJPEG_ENCODER_FRAME_UNSUPPORTED:
+    case VIDEO_ENCODER_FRAME_UNSUPPORTED:
         return FALSE;
-    case MJPEG_ENCODER_FRAME_ENCODE_START:
+    case VIDEO_ENCODER_FRAME_ENCODE_DONE:
         break;
     default:
-        spice_error("bad return value (%d) from mjpeg_encoder_start_frame", ret);
-        return FALSE;
-    }
-
-    if (!encode_frame(dcc, &drawable->red_drawable->u.copy.src_area,
-                      &image->u.bitmap, stream)) {
+        spice_error("bad return value (%d) from video_encoder_start_frame", ret);
         return FALSE;
     }
-    n = mjpeg_encoder_end_frame(agent->mjpeg_encoder);
     dcc->send_data.stream_outbuf_size = outbuf_size;
 
     if (!drawable->sized_stream) {
@@ -10254,7 +10185,7 @@ static int display_channel_handle_stream_report(DisplayChannelClient *dcc,
         return FALSE;
     }
     stream_agent = &dcc->stream_agents[stream_report->stream_id];
-    if (!stream_agent->mjpeg_encoder) {
+    if (!stream_agent->video_encoder) {
         spice_info("stream_report: no encoder for stream id %u."
                     "Probably the stream has been destroyed", stream_report->stream_id);
         return TRUE;
@@ -10265,7 +10196,7 @@ static int display_channel_handle_stream_report(DisplayChannelClient *dcc,
                       stream_agent->report_id, stream_report->unique_id);
         return TRUE;
     }
-    mjpeg_encoder_client_stream_report(stream_agent->mjpeg_encoder,
+    video_encoder_client_stream_report(stream_agent->video_encoder,
                                        stream_report->num_frames,
                                        stream_report->num_drops,
                                        stream_report->start_frame_mm_time,
diff --git a/server/video_encoder.c b/server/video_encoder.c
new file mode 100644
index 0000000..4e4d3d8
--- /dev/null
+++ b/server/video_encoder.c
@@ -0,0 +1,128 @@
+/* -*- Mode: C; c-basic-offset: 4; indent-tabs-mode: nil -*- */
+/*
+   Copyright (C) 2009 Red Hat, Inc.
+
+   This library is free software; you can redistribute it and/or
+   modify it under the terms of the GNU Lesser General Public
+   License as published by the Free Software Foundation; either
+   version 2.1 of the License, or (at your option) any later version.
+
+   This library is distributed in the hope that it will be useful,
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   Lesser General Public License for more details.
+
+   You should have received a copy of the GNU Lesser General Public
+   License along with this library; if not, see <http://www.gnu.org/licenses/>.
+*/
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include "red_common.h"
+#include "video_encoder.h"
+#include "mjpeg_encoder.h"
+#if defined(USE_GSTREAMER_0_10)
+#include "gstreamer_encoder.h"
+#endif
+
+struct VideoEncoder {
+    VideoEncoderType type;
+    void *encoder_opaque;
+
+    VideoEncoderRateControlCbs cbs;
+    void *cbs_opaque;
+};
+
+VideoEncoder *video_encoder_new(VideoEncoderType type,
+                                int bit_rate_control, uint64_t starting_bit_rate,
+                                VideoEncoderRateControlCbs *cbs, void *opaque)
+{
+    VideoEncoder *enc;
+
+    enc = spice_new0(VideoEncoder, 1);
+
+    enc->type = type;
+
+    if (type == VIDEO_ENCODER_TYPE_JPEG) {
+        enc->encoder_opaque = (void *) mjpeg_encoder_new(bit_rate_control, starting_bit_rate, cbs, opaque);
+    }
+
+#if defined(USE_GSTREAMER_0_10)
+    if (type == VIDEO_ENCODER_TYPE_GSTREAMER) {
+        enc->encoder_opaque = (void *) gstreamer_encoder_new(starting_bit_rate, cbs, opaque);
+    }
+#endif
+    return enc;
+}
+
+void video_encoder_destroy(VideoEncoder *encoder)
+{
+    if (encoder->type == VIDEO_ENCODER_TYPE_JPEG)
+        mjpeg_encoder_destroy(encoder->encoder_opaque);
+#if defined(USE_GSTREAMER_0_10)
+    else if (encoder->type == VIDEO_ENCODER_TYPE_GSTREAMER)
+        gstreamer_encoder_destroy(encoder->encoder_opaque);
+#endif
+    free(encoder);
+}
+
+int video_encoder_encode_frame(VideoEncoder *encoder,
+                    const SpiceBitmap *bitmap,
+                    const SpiceRect *src,
+                    int width, int height, int top_down, uint32_t frame_mm_time,
+                    uint8_t **outbuf, size_t *outbuf_size, int *data_size)
+{
+    int ret;
+
+    if (encoder->type == VIDEO_ENCODER_TYPE_JPEG)
+        ret = mjpeg_encoder_encode_frame(encoder->encoder_opaque, bitmap, src,
+                                        width, height, top_down, frame_mm_time,
+                                        outbuf, outbuf_size, data_size);
+
+#if defined(USE_GSTREAMER_0_10)
+    else if (encoder->type == VIDEO_ENCODER_TYPE_GSTREAMER)
+        ret = gstreamer_encoder_encode_frame(encoder->encoder_opaque,
+                bitmap, src, width, height, top_down, frame_mm_time,
+                outbuf, outbuf_size, data_size);
+#endif
+
+    else
+        return VIDEO_ENCODER_FRAME_UNSUPPORTED;
+
+    return ret;
+}
+
+void video_encoder_client_stream_report(VideoEncoder *encoder,
+                                        uint32_t num_frames,
+                                        uint32_t num_drops,
+                                        uint32_t start_frame_mm_time,
+                                        uint32_t end_frame_mm_time,
+                                        int32_t end_frame_delay,
+                                        uint32_t audio_delay)
+{
+    if (encoder->type == VIDEO_ENCODER_TYPE_JPEG)
+        mjpeg_encoder_client_stream_report(encoder->encoder_opaque,
+                num_frames, num_drops, start_frame_mm_time, end_frame_mm_time,
+                end_frame_delay, audio_delay);
+}
+
+void video_encoder_notify_server_frame_drop(VideoEncoder *encoder)
+{
+    if (encoder->type == VIDEO_ENCODER_TYPE_JPEG)
+        mjpeg_encoder_notify_server_frame_drop(encoder->encoder_opaque);
+}
+
+uint64_t video_encoder_get_bit_rate(VideoEncoder *encoder)
+{
+    if (encoder->type == VIDEO_ENCODER_TYPE_JPEG)
+        return mjpeg_encoder_get_bit_rate(encoder->encoder_opaque);
+
+    return 0;
+}
+
+void video_encoder_get_stats(VideoEncoder *encoder, VideoEncoderStats *stats)
+{
+    if (encoder->type == VIDEO_ENCODER_TYPE_JPEG)
+        return mjpeg_encoder_get_stats(encoder->encoder_opaque, stats);
+}
diff --git a/server/video_encoder.h b/server/video_encoder.h
new file mode 100644
index 0000000..c1ec488
--- /dev/null
+++ b/server/video_encoder.h
@@ -0,0 +1,116 @@
+/* -*- Mode: C; c-basic-offset: 4; indent-tabs-mode: nil -*- */
+/*
+   Copyright (C) 2009 Red Hat, Inc.
+
+   This library is free software; you can redistribute it and/or
+   modify it under the terms of the GNU Lesser General Public
+   License as published by the Free Software Foundation; either
+   version 2.1 of the License, or (at your option) any later version.
+
+   This library is distributed in the hope that it will be useful,
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   Lesser General Public License for more details.
+
+   You should have received a copy of the GNU Lesser General Public
+   License along with this library; if not, see <http://www.gnu.org/licenses/>.
+*/
+
+#ifndef _H_video_encoder
+#define _H_video_encoder
+
+#include "red_common.h"
+
+enum {
+    VIDEO_ENCODER_FRAME_UNSUPPORTED = -1,
+    VIDEO_ENCODER_FRAME_DROP,
+    VIDEO_ENCODER_FRAME_ENCODE_DONE,
+};
+
+typedef enum {
+    VIDEO_ENCODER_TYPE_NONE = 0,
+    VIDEO_ENCODER_TYPE_JPEG,
+    VIDEO_ENCODER_TYPE_GSTREAMER,
+} VideoEncoderType;
+
+typedef struct VideoEncoder VideoEncoder;
+
+/*
+ * Callbacks required for controling and adjusting
+ * the stream bit rate:
+ * get_roundtrip_ms: roundtrip time in milliseconds
+ * get_source_fps: the input frame rate (#frames per second), i.e.,
+ * the rate of frames arriving from the guest to spice-server,
+ * before any drops.
+ */
+typedef struct VideoEncoderRateControlCbs {
+    uint32_t (*get_roundtrip_ms)(void *opaque);
+    uint32_t (*get_source_fps)(void *opaque);
+    void (*update_client_playback_delay)(void *opaque, uint32_t delay_ms);
+} VideoEncoderRateControlCbs;
+
+typedef struct VideoEncoderStats {
+    uint64_t starting_bit_rate;
+    uint64_t cur_bit_rate;
+    double avg_quality;
+} VideoEncoderStats;
+
+VideoEncoder *video_encoder_new(VideoEncoderType type,
+                                int bit_rate_control, uint64_t starting_bit_rate,
+                                VideoEncoderRateControlCbs *cbs, void *opaque);
+void video_encoder_destroy(VideoEncoder *encoder);
+
+/*
+ * dest must be either NULL or allocated by malloc, since it might be freed
+ * during the encoding, if its size is too small.
+ *
+ * return:
+ *  VIDEO_ENCODER_FRAME_UNSUPPORTED : frame cannot be encoded
+ *  VIDEO_ENCODER_FRAME_DROP        : frame should be dropped. This value can only be returned
+ *                                    if mjpeg rate control is active.
+ *  VIDEO_ENCODER_FRAME_ENCODE_DONE : frame encoding completed.
+ */
+
+int video_encoder_encode_frame(VideoEncoder *encoder,
+                    const SpiceBitmap *bitmap,
+                    const SpiceRect *src,
+                    int width, int height, int top_down, uint32_t frame_mm_time,
+                    uint8_t **outbuf, size_t *outbuf_size, int *data_size);
+/*
+ * bit rate control
+ */
+
+/*
+ * Data that should be periodically obtained from the client. The report contains:
+ * num_frames         : the number of frames that reached the client during the time
+ *                      the report is referring to.
+ * num_drops          : the part of the above frames that was dropped by the client due to
+ *                      late arrival time.
+ * start_frame_mm_time: the mm_time of the first frame included in the report
+ * end_frame_mm_time  : the mm_time of the last_frame included in the report
+ * end_frame_delay    : (end_frame_mm_time - client_mm_time)
+ * audio delay        : the latency of the audio playback.
+ *                      If there is no audio playback, set it to MAX_UINT.
+ *
+ */
+void video_encoder_client_stream_report(VideoEncoder *encoder,
+                                        uint32_t num_frames,
+                                        uint32_t num_drops,
+                                        uint32_t start_frame_mm_time,
+                                        uint32_t end_frame_mm_time,
+                                        int32_t end_frame_delay,
+                                        uint32_t audio_delay);
+
+/*
+ * Notify the encoder each time a frame is dropped due to pipe
+ * congestion.
+ * We can deduce the client state by the frame dropping rate in the server.
+ * Monitoring the frame drops can help in fine tuning the playback parameters
+ * when the client reports are delayed.
+ */
+void video_encoder_notify_server_frame_drop(VideoEncoder *encoder);
+
+uint64_t video_encoder_get_bit_rate(VideoEncoder *encoder);
+void video_encoder_get_stats(VideoEncoder *encoder, VideoEncoderStats *stats);
+
+#endif
-- 
1.7.10.4

_______________________________________________
Spice-devel mailing list
Spice-devel@xxxxxxxxxxxxxxxxxxxxx
http://lists.freedesktop.org/mailman/listinfo/spice-devel





[Index of Archives]     [Linux ARM Kernel]     [Linux ARM]     [Linux Omap]     [Fedora ARM]     [IETF Annouce]     [Security]     [Bugtraq]     [Linux]     [Linux OMAP]     [Linux MIPS]     [ECOS]     [Asterisk Internet PBX]     [Linux API]     [Monitors]