"Fossies" - the Fresh Open Source Software Archive

Member "google-gadgets-for-linux-0.11.2/extensions/gst_video_element/gadget_videosink.cc" (28 Dec 2009, 33779 Bytes) of package /linux/misc/old/google-gadgets-for-linux-0.11.2.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) C and C++ source code syntax highlighting (style: standard) with prefixed line numbers and code folding option. Alternatively you can here view or download the uninterpreted source code file.

    1 /*
    2   Copyright 2008 Google Inc.
    3 
    4   Licensed under the Apache License, Version 2.0 (the "License");
    5   you may not use this file except in compliance with the License.
    6   You may obtain a copy of the License at
    7 
    8        http://www.apache.org/licenses/LICENSE-2.0
    9 
   10   Unless required by applicable law or agreed to in writing, software
   11   distributed under the License is distributed on an "AS IS" BASIS,
   12   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   13   See the License for the specific language governing permissions and
   14   limitations under the License.
   15 */
   16 
   17 #include "gadget_videosink.h"
   18 #include <pthread.h>
   19 
   20 namespace ggadget {
   21 namespace gst {
   22 
   23 class GadgetVideoSink::ImageBuffer {
   24  public:
   25   enum BufferRecycleFlag {
   26     BUFFER_NOT_RECYCLED,
   27     BUFFER_TO_BE_RECYCLED,
   28     BUFFER_RECYCLED
   29   };
   30 
   31   static GType ImageBufferGetType()
   32   {
   33     static GType image_buffer_type;
   34 
   35     if (G_UNLIKELY(image_buffer_type == 0)) {
   36       static const GTypeInfo image_buffer_info = {
   37         sizeof(GstBufferClass),
   38         NULL,
   39         NULL,
   40         ImageBufferClassInit,
   41         NULL,
   42         NULL,
   43         sizeof(ImageBuffer),
   44         0,
   45         0,
   46         NULL
   47       };
   48       image_buffer_type = g_type_register_static(GST_TYPE_BUFFER,
   49                                                  "ImageBuffer",
   50                                                  &image_buffer_info,
   51                                                  static_cast<GTypeFlags>(0));
   52     }
   53 
   54     return image_buffer_type;
   55   }
   56 
   57 #define IS_IMAGE_BUFFER(obj) \
   58     (G_TYPE_CHECK_INSTANCE_TYPE((obj), ImageBuffer::ImageBufferGetType()))
   59 #define IMAGE_BUFFER(obj) \
   60     (G_TYPE_CHECK_INSTANCE_CAST((obj), ImageBuffer::ImageBufferGetType(), \
   61                                 ImageBuffer))
   62 
   63   static void ImageBufferClassInit(gpointer g_class, gpointer class_data)
   64   {
   65     GGL_UNUSED(class_data);
   66     GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS(g_class);
   67     mini_object_class->finalize =
   68         (GstMiniObjectFinalizeFunction)Finalize;
   69   }
   70 
   71   static void Finalize(ImageBuffer *image) {
   72     g_return_if_fail(image != NULL);
   73 
   74     if (!image->videosink_) {
   75       GST_WARNING_OBJECT(image->videosink_, "no sink found");
   76       return;
   77     }
   78 
   79     // For those that are already recycled or to be recycled, just return.
   80     if (image->recycle_flag_ != BUFFER_NOT_RECYCLED)
   81       return;
   82 
   83     if (image->width_ != GST_VIDEO_SINK_WIDTH(image->videosink_) ||
   84         image->height_ != GST_VIDEO_SINK_HEIGHT(image->videosink_)) {
   85       // The data buffer is allocated by us, we free it ourselves.
   86       g_free(GST_BUFFER_DATA(image));
   87     } else {
   88       // Append to buffer pool.
   89       gst_buffer_ref(GST_BUFFER_CAST(image));
   90       image->recycle_flag_ = BUFFER_RECYCLED;
   91       image->videosink_->buffer_pool_ =
   92           g_slist_prepend(image->videosink_->buffer_pool_, image);
   93     }
   94   }
   95 
   96   static ImageBuffer *CreateInstance(GadgetVideoSink *videosink,
   97                                      GstCaps *caps) {
   98     ImageBuffer *image =
   99         IMAGE_BUFFER(gst_mini_object_new(ImageBufferGetType()));
  100     if (!image)
  101       return NULL;
  102 
  103     GstStructure *structure = gst_caps_get_structure(caps, 0);
  104     if (!gst_structure_get_int (structure, "width", &image->width_) ||
  105         !gst_structure_get_int (structure, "height", &image->height_)) {
  106       GST_WARNING("failed getting geometry from caps %" GST_PTR_FORMAT, caps);
  107       return NULL;
  108     }
  109 
  110     // We use 32-bpp.
  111     image->bytes_per_line_ = 4 * image->width_;
  112     image->size_ = image->bytes_per_line_ * image->height_;
  113 
  114     GST_BUFFER_DATA(image) = (guchar *)g_malloc(image->size_);
  115     if (!GST_BUFFER_DATA(image)) {
  116       gst_buffer_unref(GST_BUFFER_CAST(image));
  117       return NULL;
  118     }
  119     GST_BUFFER_SIZE(image) = static_cast<guint>(image->size_);
  120     image->recycle_flag_ = BUFFER_NOT_RECYCLED;
  121 
  122     // Keep a ref to our sink.
  123     image->videosink_ = videosink;
  124     gst_object_ref(videosink);
  125 
  126     return image;
  127   }
  128 
  129   static void FreeInstance(ImageBuffer *image) {
  130     if (image == NULL)
  131       return;
  132 
  133     // Make sure it is not recycled.
  134     image->width_ = -1;
  135     image->height_ = -1;
  136 
  137     if (image->videosink_) {
  138       gst_object_unref(image->videosink_);
  139       image->videosink_ = NULL;
  140     }
  141 
  142     g_free(GST_BUFFER_DATA(image));
  143     gst_buffer_unref(GST_BUFFER_CAST(image));
  144   }
  145 
  146   void SetRecycleFlag(BufferRecycleFlag flag) {
  147     recycle_flag_ = flag;
  148   }
  149 
  150   BufferRecycleFlag GetRecycleFlag() {
  151     return recycle_flag_;
  152   }
  153 
  154   // Must be the first non-static property.
  155   GstBuffer buffer_;
  156   GadgetVideoSink *videosink_;
  157 
  158   // Image's real size, width, and height.
  159   size_t size_;
  160   int width_, height_;
  161 
  162   // We need the following information to show an image.
  163   int x_, y_;
  164   int w_, h_;
  165   int bytes_per_line_; // Stride
  166 
  167   // The state of the buffer.
  168   BufferRecycleFlag recycle_flag_;
  169 
  170  private:
  171    // Cannot new/delete image buffer object explicitly.
  172    // Use @CreateInstance and @FreeInstance instead.
  173   ImageBuffer();
  174   ~ImageBuffer();
  175 };
  176 
  177 // ImageQueue is a cycle buffer which manages ImageBuffers provided
  178 // by the host.
  179 class GadgetVideoSink::ImageQueue {
  180  public:
  181   static const int kMaxLength = 4;
  182 
  183   ImageQueue() : p_(0), c_(0) {
  184     pthread_mutex_init(&mutex_, NULL);
  185     for (int i = 0; i < kMaxLength; i++)
  186       images_[i] = NULL;
  187   }
  188 
  189   ~ImageQueue() {
  190     // Maybe consumer is holding the lock.
  191     pthread_mutex_lock(&mutex_);
  192     pthread_mutex_destroy(&mutex_);
  193     for (int i = 0; i < kMaxLength; i++) {
  194       if (images_[i])
  195         ImageBuffer::FreeInstance(images_[i]);
  196     }
  197   }
  198 
  199   // Only provided to producer. It can help avoid passing in duplicated image
  200   // buffer pointer. Since consumer never changes the image queue, it's ok with
  201   // no using lock here for the sole producer.
  202   bool DupImage(ImageBuffer *image) {
  203     for (int i = 0; i < kMaxLength; i++) {
  204       if (image && images_[i] == image)
  205         return true;
  206     }
  207     return false;
  208   }
  209 
  210   // Store @a image to the queue, return one that won't be used and can
  211   // be recycled or destroyed by the host.
  212   ImageBuffer *ProduceOneImage(ImageBuffer *image) {
  213     ASSERT(image);
  214 
  215     // If the mutex is being destroyed, lock may fail.
  216     if (pthread_mutex_lock(&mutex_) != 0)
  217       return NULL;
  218 
  219     // If it's full, don't produce new images, just discard it.
  220     if ((p_ + 1) % kMaxLength == c_) {
  221       pthread_mutex_unlock(&mutex_);
  222       return image;
  223     }
  224 
  225     ImageBuffer *to_be_recycled = images_[p_];
  226     images_[p_] = image;
  227     p_ = (p_ + 1) % kMaxLength;
  228 
  229     pthread_mutex_unlock(&mutex_);
  230     return to_be_recycled;
  231   }
  232 
  233   ImageBuffer *ConsumeOneImage() {
  234     // If the mutex is being destroyed, lock may fail.
  235     if (pthread_mutex_lock(&mutex_) != 0)
  236       return NULL;
  237 
  238     // Check if it's null.
  239     if (p_ == c_) {
  240       pthread_mutex_unlock(&mutex_);
  241       return NULL;
  242     }
  243 
  244     ImageBuffer *cur = images_[c_];
  245     c_ = (c_ + 1) % kMaxLength;
  246 
  247     pthread_mutex_unlock(&mutex_);
  248     return cur;
  249   }
  250 
  251  private:
  252   int p_;
  253   int c_;
  254   ImageBuffer *images_[kMaxLength];
  255   pthread_mutex_t mutex_;
  256 };
  257 
  258 bool GadgetVideoSink::registered_ = false;
  259 GstVideoSinkClass *GadgetVideoSink::parent_class_ = NULL;
  260 GstStaticPadTemplate GadgetVideoSink::gadget_videosink_template_factory_ =
  261     GST_STATIC_PAD_TEMPLATE(const_cast<gchar*>("sink"),
  262                             GST_PAD_SINK,
  263                             GST_PAD_ALWAYS,
  264                             GST_STATIC_CAPS("video/x-raw-rgb, "
  265                                             "framerate = (fraction) [ 0, MAX ],"
  266                                             "width = (int) [ 1, MAX ], "
  267                                             "height = (int) [ 1, MAX ]"));
  268 const GstElementDetails GadgetVideoSink::gst_videosink_details_ =
  269     GST_ELEMENT_DETAILS(const_cast<gchar*>("Video sink"),
  270                         const_cast<gchar*>("Sink/Video"),
  271                         const_cast<gchar*>("A standard X based videosink"),
  272                         const_cast<gchar*>("Yuxiang Luo<luoyx@google.com>"));
  273 
  274 #define IS_GADGET_VIDEOSINK(obj) \
  275     (G_TYPE_CHECK_INSTANCE_TYPE((obj), GadgetVideoSinkGetType()))
  276 #define GADGET_VIDEOSINK(obj) \
  277     (G_TYPE_CHECK_INSTANCE_CAST((obj), GadgetVideoSinkGetType(), \
  278                                 GadgetVideoSink))
  279 
  280 bool GadgetVideoSink::Register() {
  281   if (registered_)
  282     return true;
  283 // gst_plugin_register_static() is available after gstreamter 0.10.16.
  284 #if GST_VERSION_MAJOR > 0 || GST_VERSION_MINOR > 10 || \
  285     (GST_VERSION_MINOR == 10 && GST_VERSION_MICRO >= 16)
  286   if (!gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
  287                                   "gadget_videosink_plugin",
  288                                   const_cast<gchar *>(""),
  289                                   GadgetVideoSink::InitPlugin,
  290                                   "1.0", "unknown", "", "", ""))
  291     return false;
  292 #else
  293   // Hacked GST_PLUGIN_DEFINE_STATIC. GST_PLUGIN_DEFINE_STATIC uses gcc
  294   // specific "__attribute__((constructor))" which is not portable and reliable.
  295   static GstPluginDesc plugin_desc = {
  296     GST_VERSION_MAJOR, GST_VERSION_MINOR, "gadget_videosink_plugin", "",
  297     GadgetVideoSink::InitPlugin, "1.0", "unknown", "", "", "",
  298     GST_PADDING_INIT
  299   };
  300   _gst_plugin_register_static(&plugin_desc);
  301 #endif
  302 
  303   // registered_ is set in InitPlugin().
  304   return registered_;
  305 }
  306 
  307 gboolean GadgetVideoSink::InitPlugin(GstPlugin *plugin) {
  308   registered_ = gst_element_register(plugin, kGadgetVideoSinkElementName,
  309                                      GST_RANK_SECONDARY,
  310                                      GadgetVideoSinkGetType());
  311   return registered_;
  312 }
  313 
  314 GType GadgetVideoSink::GadgetVideoSinkGetType(void) {
  315   static GType videosink_type = 0;
  316 
  317   if (!videosink_type) {
  318     static const GTypeInfo videosink_info = {
  319       sizeof(GadgetVideoSinkClass),
  320       BaseInit,
  321       NULL,
  322       (GClassInitFunc)ClassInit,
  323       NULL,
  324       NULL,
  325       sizeof(GadgetVideoSink),
  326       0,
  327       (GInstanceInitFunc)Init,
  328       (GTypeValueTable*)NULL
  329     };
  330 
  331     videosink_type = g_type_register_static(GST_TYPE_VIDEO_SINK,
  332                                             "GadgetVideoSink",
  333                                             &videosink_info,
  334                                             static_cast<GTypeFlags>(0));
  335 
  336     g_type_class_ref(ImageBuffer::ImageBufferGetType());
  337   }
  338 
  339   return videosink_type;
  340 }
  341 
  342 void GadgetVideoSink::Init(GadgetVideoSink *videosink) {
  343   videosink->caps_ = NULL;
  344   videosink->bus_ = NULL;
  345   videosink->image_ = NULL;
  346   videosink->image_queue_ = NULL;
  347   videosink->buffer_pool_ = NULL;
  348   videosink->fps_n_ = 0;
  349   videosink->fps_d_ = 1;
  350   videosink->par_ = NULL;
  351   videosink->keep_aspect_ = FALSE;
  352 }
  353 
  354 void GadgetVideoSink::BaseInit(gpointer g_class) {
  355   GstElementClass *element_class = GST_ELEMENT_CLASS(g_class);
  356   gst_element_class_set_details(element_class, &gst_videosink_details_);
  357   gst_element_class_add_pad_template(
  358       element_class,
  359       gst_static_pad_template_get(&gadget_videosink_template_factory_));
  360 }
  361 
  362 void GadgetVideoSink::ClassInit(GadgetVideoSinkClass *klass) {
  363   GObjectClass *gobject_class;
  364   GstElementClass *gstelement_class;
  365   GstBaseSinkClass *gstbasesink_class;
  366 
  367   gobject_class = reinterpret_cast<GObjectClass*>(klass);
  368   gstelement_class = reinterpret_cast<GstElementClass*>(klass);
  369   gstbasesink_class = reinterpret_cast<GstBaseSinkClass*>(klass);
  370 
  371   parent_class_ =
  372       static_cast<GstVideoSinkClass*>(g_type_class_peek_parent(klass));
  373 
  374   gobject_class->finalize = Finalize;
  375   gobject_class->set_property = SetProperty;
  376   gobject_class->get_property = GetProperty;
  377 
  378   g_object_class_install_property(
  379       gobject_class, PROP_FORCE_ASPECT_RATIO,
  380       g_param_spec_boolean("force-aspect-ratio",
  381                            "Force aspect ratio",
  382                            "When enabled, reverse caps negotiation (scaling)"
  383                            "will respect original aspect ratio",
  384                            FALSE,
  385                            static_cast<GParamFlags>(G_PARAM_READWRITE)));
  386   g_object_class_install_property(
  387       gobject_class, PROP_PIXEL_ASPECT_RATIO,
  388       g_param_spec_string("pixel-aspect-ratio",
  389                           "Pixel Aspect Ratio",
  390                           "The pixel aspect ratio of the device",
  391                           "1/1",
  392                           static_cast<GParamFlags>(G_PARAM_READWRITE)));
  393   g_object_class_install_property(
  394       gobject_class, PROP_GEOMETRY_WIDTH,
  395       g_param_spec_int("geometry-width",
  396                        "Geometry Width",
  397                        "Geometry Width",
  398                        0,
  399                        G_MAXINT,
  400                        0,
  401                        static_cast<GParamFlags>(G_PARAM_WRITABLE)));
  402   g_object_class_install_property(
  403       gobject_class, PROP_GEOMETRY_HEIGHT,
  404       g_param_spec_int("geometry-height",
  405                        "Geometry Height",
  406                        "Geometry height",
  407                        0,
  408                        G_MAXINT,
  409                        0,
  410                        static_cast<GParamFlags>(G_PARAM_WRITABLE)));
  411   g_object_class_install_property(
  412       gobject_class, PROP_RECEIVE_IMAGE_HANDLER,
  413       g_param_spec_pointer("receive-image-handler",
  414                            "Receive Image Handler",
  415                            "The handler is the only way to receive images"
  416                            "from the sink",
  417                            static_cast<GParamFlags>(G_PARAM_READABLE)));
  418 
  419   gstelement_class->change_state = ChangeState;
  420   gstelement_class->set_bus = SetBus;
  421   gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR(GetCaps);
  422   gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR(SetCaps);
  423   gstbasesink_class->buffer_alloc = GST_DEBUG_FUNCPTR(BufferAlloc);
  424   gstbasesink_class->get_times = GST_DEBUG_FUNCPTR(GetTimes);
  425   gstbasesink_class->event = GST_DEBUG_FUNCPTR(Event);
  426   gstbasesink_class->preroll = GST_DEBUG_FUNCPTR(ShowFrame);
  427   gstbasesink_class->render = GST_DEBUG_FUNCPTR(ShowFrame);
  428 }
  429 
  430 void GadgetVideoSink::Finalize(GObject * object) {
  431   g_return_if_fail(object != NULL);
  432 
  433   // We cannot delete the object directly, as gstreamer doesn't expect us
  434   // to free the object pointer.
  435 
  436   GadgetVideoSink *videosink = GADGET_VIDEOSINK(object);
  437   videosink->Reset();
  438 
  439   G_OBJECT_CLASS(parent_class_)->finalize(object);
  440 }
  441 
  442 GstCaps *GadgetVideoSink::GetCaps(GstBaseSink *bsink) {
  443   GadgetVideoSink *videosink= GADGET_VIDEOSINK(bsink);
  444 
  445   if (videosink->caps_) {
  446     return gst_caps_ref(videosink->caps_);
  447   }
  448 
  449   // get a template copy and add the pixel aspect ratio.
  450   size_t i;
  451   GstCaps *caps;
  452   caps = gst_caps_copy(
  453       gst_pad_get_pad_template_caps(GST_BASE_SINK(videosink)->sinkpad));
  454 
  455   for (i = 0; i < gst_caps_get_size(caps); ++i) {
  456     GstStructure *structure =
  457         gst_caps_get_structure(caps, static_cast<guint>(i));
  458     if (videosink->par_) {
  459       int nom, den;
  460       nom = gst_value_get_fraction_numerator(videosink->par_);
  461       den = gst_value_get_fraction_denominator(videosink->par_);
  462       gst_structure_set(structure, "pixel-aspect-ratio",
  463                         GST_TYPE_FRACTION, nom, den, NULL);
  464     } else {
  465       gst_structure_set(structure, "pixel-aspect-ratio",
  466                         GST_TYPE_FRACTION, 1, 1, NULL);
  467     }
  468   }
  469 
  470   return caps;
  471 }
  472 
  473 gboolean GadgetVideoSink::SetCaps(GstBaseSink *bsink, GstCaps *caps) {
  474   // We intersect caps with our template to make sure they are correct.
  475   GadgetVideoSink *videosink = GADGET_VIDEOSINK(bsink);
  476   GstCaps *intersection = gst_caps_intersect(videosink->caps_, caps);
  477   GST_DEBUG_OBJECT(videosink, "intersection returned %" GST_PTR_FORMAT,
  478                    intersection);
  479 
  480   if (gst_caps_is_empty(intersection)) {
  481     gst_caps_unref(intersection);
  482     return FALSE;
  483   }
  484 
  485   gst_caps_unref(intersection);
  486 
  487   gboolean ret = TRUE;
  488   GstStructure *structure;
  489   gint new_width, new_height;
  490   const GValue *fps;
  491   structure = gst_caps_get_structure(caps, 0);
  492   ret &= gst_structure_get_int(structure, "width", &new_width);
  493   ret &= gst_structure_get_int(structure, "height", &new_height);
  494   fps = gst_structure_get_value(structure, "framerate");
  495   ret &= (fps != NULL);
  496   if (!ret) {
  497     return FALSE;
  498   }
  499 
  500   // If the caps contain pixel-aspect-ratio, they have to match ours, otherwise
  501   // linking should fail.
  502   const GValue *par = gst_structure_get_value(structure,
  503                                               "pixel-aspect-ratio");
  504   if (par) {
  505     if (videosink->par_) {
  506       if (gst_value_compare(par, videosink->par_) != GST_VALUE_EQUAL) {
  507         goto wrong_aspect;
  508       }
  509     } else {
  510       // Try the default.
  511       int nom, den;
  512       nom = gst_value_get_fraction_numerator(par);
  513       den = gst_value_get_fraction_denominator(par);
  514       if (nom != 1 || den != 1) {
  515         goto wrong_aspect;
  516       }
  517     }
  518   }
  519 
  520   GST_VIDEO_SINK_WIDTH(videosink) = new_width;
  521   GST_VIDEO_SINK_HEIGHT(videosink) = new_height;
  522   videosink->fps_n_ = gst_value_get_fraction_numerator(fps);
  523   videosink->fps_d_ = gst_value_get_fraction_denominator(fps);
  524 
  525   if (GST_VIDEO_SINK_WIDTH(videosink) <= 0 ||
  526       GST_VIDEO_SINK_HEIGHT(videosink) <= 0) {
  527     return FALSE;
  528   }
  529 
  530   return TRUE;
  531 
  532   // ERRORS
  533 wrong_aspect:
  534   {
  535     GST_INFO_OBJECT(videosink, "pixel aspect ratio does not match");
  536     return FALSE;
  537   }
  538 }
  539 
  540 GstStateChangeReturn GadgetVideoSink::ChangeState(GstElement *element,
  541                                                   GstStateChange transition) {
  542   GadgetVideoSink *videosink = GADGET_VIDEOSINK(element);
  543   GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
  544 
  545   switch (transition) {
  546     case GST_STATE_CHANGE_NULL_TO_READY:
  547       videosink->InitCaps();
  548       break;
  549     case GST_STATE_CHANGE_READY_TO_PAUSED:
  550       videosink->image_ = new Image;
  551       videosink->image_queue_ = new ImageQueue;
  552     case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
  553       break;
  554     default:
  555       break;
  556   }
  557 
  558   ret = GST_ELEMENT_CLASS(parent_class_)->change_state(element, transition);
  559 
  560   switch (transition) {
  561     case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
  562       break;
  563     case GST_STATE_CHANGE_PAUSED_TO_READY:
  564       videosink->fps_n_ = 0;
  565       videosink->fps_d_ = 1;
  566       GST_VIDEO_SINK_WIDTH(videosink) = 0;
  567       GST_VIDEO_SINK_HEIGHT(videosink) = 0;
  568       delete videosink->image_;
  569       delete videosink->image_queue_;
  570       videosink->image_ = NULL;
  571       videosink->image_queue_ = NULL;
  572       break;
  573     case GST_STATE_CHANGE_READY_TO_NULL:
  574       videosink->Reset();
  575       break;
  576     default:
  577       break;
  578   }
  579 
  580   return ret;
  581 }
  582 
  583 void GadgetVideoSink::SetBus(GstElement *element, GstBus *bus) {
  584   GadgetVideoSink *videosink = GADGET_VIDEOSINK(element);
  585   videosink->bus_ = bus;
  586 }
  587 
  588 void GadgetVideoSink::GetTimes(GstBaseSink * bsink, GstBuffer * buf,
  589                                GstClockTime * start, GstClockTime * end) {
  590   GadgetVideoSink *videosink = GADGET_VIDEOSINK(bsink);
  591 
  592   if (GST_BUFFER_TIMESTAMP_IS_VALID(buf)) {
  593     *start = GST_BUFFER_TIMESTAMP(buf);
  594     if (GST_BUFFER_DURATION_IS_VALID(buf)) {
  595       *end = *start + GST_BUFFER_DURATION(buf);
  596     } else {
  597       if (videosink->fps_n_ > 0) {
  598         *end = *start + gst_util_uint64_scale_int(GST_SECOND,
  599                                                   videosink->fps_d_,
  600                                                   videosink->fps_n_);
  601       }
  602     }
  603   }
  604 }
  605 
  606 // Buffer management
  607 //
  608 // The buffer_alloc function must either return a buffer with given size and
  609 // caps or create a buffer with different caps attached to the buffer. This
  610 // last option is called reverse negotiation, ie, where the sink suggests a
  611 // different format from the upstream peer.
  612 //
  613 // We try to do reverse negotiation when our geometry changes and we like a
  614 // resized buffer.
  615 GstFlowReturn GadgetVideoSink::BufferAlloc(GstBaseSink * bsink,
  616                                            guint64 offset,
  617                                            guint size,
  618                                            GstCaps * caps,
  619                                            GstBuffer ** buf) {
  620   ImageBuffer *image = NULL;
  621   GstStructure *structure = NULL;
  622   GstFlowReturn ret = GST_FLOW_OK;
  623   gint width = 0, height = 0;
  624   GadgetVideoSink *videosink = GADGET_VIDEOSINK(bsink);
  625 
  626   GST_LOG_OBJECT(videosink,
  627                  "a buffer of %d bytes was requested with caps %"
  628                  GST_PTR_FORMAT " and offset %" G_GUINT64_FORMAT,
  629                  size, caps, offset);
  630 
  631   // assume we're going to alloc what was requested, keep track of wheter
  632   // we need to unref or not. When we suggest a new format upstream we will
  633   // create a new caps that we need to unref.
  634   GstCaps *alloc_caps = caps;
  635   bool alloc_unref = false;
  636 
  637   // get struct to see what is requested.
  638   structure = gst_caps_get_structure(caps, 0);
  639 
  640   if (gst_structure_get_int(structure, "width", &width) &&
  641       gst_structure_get_int(structure, "height", &height)) {
  642     GstVideoRectangle dst, src, result;
  643 
  644     src.w = width;
  645     src.h = height;
  646 
  647     // What is our geometry.
  648     dst.w = videosink->geometry_width_;
  649     dst.h = videosink->geometry_height_;
  650 
  651     if (videosink->keep_aspect_) {
  652       GST_LOG_OBJECT(videosink,
  653                      "enforcing aspect ratio in reverse caps negotiation");
  654       src.x = src.y = dst.x = dst.y = 0;
  655       gst_video_sink_center_rect(src, dst, &result, TRUE);
  656     } else {
  657       GST_LOG_OBJECT(videosink, "trying to resize to window geometry "
  658                      "ignoring aspect ratio");
  659       result.x = result.y = 0;
  660       result.w = dst.w;
  661       result.h = dst.h;
  662     }
  663 
  664     // We would like another geometry.
  665     if (width != result.w || height != result.h) {
  666       int nom, den;
  667       GstCaps *desired_caps;
  668       GstStructure *desired_struct;
  669 
  670       // Make a copy of the incomming caps to create the new suggestion.
  671       // We can't use make_writable because we might then destroy the original
  672       // caps which we still need when the peer does not accept the suggestion.
  673       desired_caps = gst_caps_copy(caps);
  674       desired_struct = gst_caps_get_structure(desired_caps, 0);
  675 
  676       gst_structure_set (desired_struct, "width", G_TYPE_INT, result.w, NULL);
  677       gst_structure_set (desired_struct, "height", G_TYPE_INT, result.h, NULL);
  678 
  679       // PAR property overrides the default one.
  680       if (videosink->par_) {
  681         nom = gst_value_get_fraction_numerator(videosink->par_);
  682         den = gst_value_get_fraction_denominator(videosink->par_);
  683         gst_structure_set(desired_struct, "pixel-aspect-ratio",
  684                            GST_TYPE_FRACTION, nom, den, NULL);
  685       } else {
  686         gst_structure_set(desired_struct, "pixel-aspect-ratio",
  687                           GST_TYPE_FRACTION, 1, 1, NULL);
  688       }
  689 
  690       // see if peer accepts our new suggestion, if there is no peer, this
  691       // function returns true.
  692       if (gst_pad_peer_accept_caps(GST_VIDEO_SINK_PAD (videosink),
  693                                    desired_caps)) {
  694         gint bpp;
  695         bpp = size / height / width;
  696 
  697         // we will not alloc a buffer of the new suggested caps. Make sure
  698         // we also unref this new caps after we set it on the buffer.
  699         alloc_caps = desired_caps;
  700         alloc_unref = true;
  701         width = result.w;
  702         height = result.h;
  703         size = bpp * width * height;
  704         GST_DEBUG ("peed pad accepts our desired caps %" GST_PTR_FORMAT
  705             " buffer size is now %d bytes", desired_caps, size);
  706       } else {
  707         GST_DEBUG("peer pad does not accept our desired caps %" GST_PTR_FORMAT,
  708                   desired_caps);
  709         // we alloc a buffer with the original incomming caps.
  710         width = GST_VIDEO_SINK_WIDTH(videosink);
  711         height = GST_VIDEO_SINK_HEIGHT(videosink);
  712       }
  713     }
  714   }
  715 
  716   // Check whether we can reuse any buffer from our buffer pool.
  717   while (videosink->buffer_pool_) {
  718     image = static_cast<ImageBuffer*>(videosink->buffer_pool_->data);
  719     if (image) {
  720       // Removing from the pool.
  721       videosink->buffer_pool_ = g_slist_delete_link(videosink->buffer_pool_,
  722                                                     videosink->buffer_pool_);
  723       // If the image is invalid for our need, destroy.
  724       if ((image->width_ != width) || (image->height_ != height)) {
  725         ImageBuffer::FreeInstance(image);
  726         image = NULL;
  727       } else {
  728         // We found a suitable image. Reset the recycle flag.
  729         ASSERT(image->GetRecycleFlag() == ImageBuffer::BUFFER_RECYCLED);
  730         image->SetRecycleFlag(ImageBuffer::BUFFER_NOT_RECYCLED);
  731         break;
  732       }
  733     } else {
  734       break;
  735     }
  736   }
  737 
  738   // We haven't found anything, creating a new one.
  739   if (!image) {
  740     image = ImageBuffer::CreateInstance(videosink, alloc_caps);
  741   }
  742 
  743   // Now we should have an image, set appropriate caps on it.
  744   g_return_val_if_fail(image != NULL, GST_FLOW_ERROR);
  745   gst_buffer_set_caps(GST_BUFFER_CAST(image), alloc_caps);
  746 
  747   // Could be our new reffed suggestion or the original unreffed caps.
  748   if (alloc_unref)
  749     gst_caps_unref(alloc_caps);
  750 
  751   *buf = GST_BUFFER_CAST(image);
  752 
  753   return ret;
  754 }
  755 
  756 gboolean GadgetVideoSink::Event(GstBaseSink *sink, GstEvent *event) {
  757   // FIXME:
  758   // The default event handler would post an EOS message after it receives
  759   // the EOS event. But it seems not for our gadget video sink. So we post
  760   // the EOS message manually here.
  761   if (GST_EVENT_TYPE(event) == GST_EVENT_EOS) {
  762     GadgetVideoSink *videosink = GADGET_VIDEOSINK(sink);
  763     GstMessage *eos = gst_message_new_eos(reinterpret_cast<GstObject*>(sink));
  764     if (eos)
  765       gst_bus_post(videosink->bus_, eos);
  766   }
  767   return TRUE;
  768 }
  769 
  770 GstFlowReturn GadgetVideoSink::ShowFrame(GstBaseSink *bsink, GstBuffer *buf) {
  771   g_return_val_if_fail(buf != NULL, GST_FLOW_ERROR);
  772   GadgetVideoSink *videosink = GADGET_VIDEOSINK(bsink);
  773 
  774   if (IS_IMAGE_BUFFER(buf)) {
  775     GST_LOG_OBJECT(videosink, "buffer from our pool, writing directly");
  776     videosink->PutImage(IMAGE_BUFFER(buf));
  777   } else {
  778     // Else we have to copy the data into our image buffer.
  779     GST_LOG_OBJECT(videosink, "normal buffer, copying from it");
  780     GST_DEBUG_OBJECT(videosink, "creating our image");
  781     ImageBuffer *image_buf =
  782       ImageBuffer::CreateInstance(videosink, GST_BUFFER_CAPS(buf));
  783     if (!image_buf)
  784       goto no_image;
  785 
  786     if (image_buf->size_ < GST_BUFFER_SIZE(buf)) {
  787       ImageBuffer::FreeInstance(image_buf);
  788       goto no_image;
  789     }
  790     memcpy(GST_BUFFER_DATA(image_buf), GST_BUFFER_DATA(buf),
  791            MIN(GST_BUFFER_SIZE(buf), image_buf->size_));
  792 
  793     videosink->PutImage(image_buf);
  794 
  795     ImageBuffer::Finalize(image_buf);
  796     gst_buffer_unref(GST_BUFFER_CAST(image_buf));
  797   }
  798 
  799   return GST_FLOW_OK;
  800 
  801 no_image:
  802   // No image available.
  803   GST_DEBUG("could not create image");
  804   return GST_FLOW_ERROR;
  805 }
  806 
  807 void GadgetVideoSink::SetProperty(GObject *object,
  808                                   guint prop_id,
  809                                   const GValue *value,
  810                                   GParamSpec *pspec) {
  811   g_return_if_fail(IS_GADGET_VIDEOSINK(object));
  812   GadgetVideoSink *videosink = GADGET_VIDEOSINK(object);
  813 
  814   switch (prop_id) {
  815     case PROP_FORCE_ASPECT_RATIO:
  816       videosink->keep_aspect_ = g_value_get_boolean(value);
  817       break;
  818     case PROP_PIXEL_ASPECT_RATIO:
  819     {
  820       GValue *tmp;
  821 
  822       tmp = g_new0(GValue, 1);
  823       g_value_init(tmp, GST_TYPE_FRACTION);
  824 
  825       if (!g_value_transform(value, tmp)) {
  826         GST_WARNING_OBJECT(videosink,
  827                            "Could not transform string to aspect ratio");
  828         g_free(tmp);
  829       } else {
  830         GST_DEBUG_OBJECT(videosink, "set PAR to %d/%d",
  831                          gst_value_get_fraction_numerator(tmp),
  832                          gst_value_get_fraction_denominator(tmp));
  833         g_free(videosink->par_);
  834         videosink->par_ = tmp;
  835       }
  836     }
  837       break;
  838     case PROP_GEOMETRY_WIDTH:
  839       videosink->geometry_width_ = g_value_get_int(value);
  840       break;
  841     case PROP_GEOMETRY_HEIGHT:
  842       videosink->geometry_height_ = g_value_get_int(value);
  843       break;
  844     default:
  845       G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
  846       break;
  847   }
  848 }
  849 
  850 void GadgetVideoSink::GetProperty(GObject * object,
  851                                   guint prop_id,
  852                                   GValue * value,
  853                                   GParamSpec * pspec) {
  854   g_return_if_fail(IS_GADGET_VIDEOSINK (object));
  855   GadgetVideoSink *videosink = GADGET_VIDEOSINK(object);
  856 
  857   switch (prop_id) {
  858     case PROP_FORCE_ASPECT_RATIO:
  859       g_value_set_boolean(value, videosink->keep_aspect_);
  860       break;
  861     case PROP_PIXEL_ASPECT_RATIO:
  862       if (videosink->par_)
  863         g_value_transform(videosink->par_, value);
  864       break;
  865     case PROP_RECEIVE_IMAGE_HANDLER:
  866       g_value_set_pointer(value, (void*)ReceiveImageHandler);
  867       break;
  868     default:
  869       G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
  870       break;
  871   }
  872 }
  873 
  874 // This function initializes caps for the only supported format.
  875 void GadgetVideoSink::InitCaps() {
  876   ASSERT(!caps_);
  877   caps_ = gst_caps_new_simple("video/x-raw-rgb",
  878                               "bpp", G_TYPE_INT, 32,
  879                               "depth", G_TYPE_INT, 24,
  880                               "endianness", G_TYPE_INT, G_BIG_ENDIAN,
  881                               "red_mask", G_TYPE_INT, 0xff00,
  882                               "green_mask", G_TYPE_INT, 0xff0000,
  883                               "blue_mask", G_TYPE_INT,0xff000000,
  884                               "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
  885                               "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
  886                               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
  887 
  888   // Update par with the default one if not set yet.
  889   if (!par_) {
  890     par_ = g_new0(GValue, 1);
  891     g_value_init(par_, GST_TYPE_FRACTION);
  892     gst_value_set_fraction(par_, 1, 1);  // 1:1
  893   }
  894 
  895   int nom, den;
  896   nom = gst_value_get_fraction_numerator(par_);
  897   den = gst_value_get_fraction_denominator(par_);
  898   gst_caps_set_simple(caps_, const_cast<gchar*>("pixel-aspect-ratio"),
  899                       GST_TYPE_FRACTION, 1, 1, NULL);
  900 }
  901 
  902 // This function converts the image format if necessary, puts the image into
  903 // the image queue, sends message to notify that a new image frame is coming,
  904 // and recycles any reusable image buffer.
  905 gboolean GadgetVideoSink::PutImage(ImageBuffer *image) {
  906   if (!image)
  907     return TRUE;
  908 
  909   // The upstream may pass in the same image buffer twice. For example, before
  910   // upstream finalizes an image buffer, a seeking operation occurs, and under
  911   // some condition, the upstream may reuse the image buffer instead of
  912   // finalizing and freeing it. Since we may store image buffers in
  913   // image queue or buffer pool during putting images, the image passed in
  914   // this time may already be stored in image queue or buffer pool during
  915   // the last call of this function. So, first check whether the buffer is
  916   // duplicated, and simply discuss it and return if it's.
  917   if (g_slist_find(buffer_pool_, image) || image_queue_->DupImage(image)) {
  918     return TRUE;
  919   }
  920 
  921   GstVideoRectangle dst, src, result;
  922   src.w = image->width_;
  923   src.h = image->height_;
  924   dst.w = geometry_width_;
  925   dst.h = geometry_height_;
  926   src.x = src.y = dst.x = dst.y = 0;
  927   gst_video_sink_center_rect(src, dst, &result, FALSE);
  928   image->x_ = result.x;
  929   image->y_ = result.y;
  930   image->w_ = result.w;
  931   image->h_ = result.h;
  932 
  933   // Increase refcount and set TO_BE_RECYCLED flag so that image buffer won't
  934   // be finalized/freed by upstream.
  935   gst_buffer_ref(GST_BUFFER_CAST(image));
  936   image->SetRecycleFlag(ImageBuffer::BUFFER_TO_BE_RECYCLED);
  937 
  938   // Put it to the buffer queue.
  939   ImageBuffer *to_be_recycled = image_queue_->ProduceOneImage(image);
  940 
  941   // Send a message to notify that a new frame is coming.
  942   if (bus_) {
  943     GstStructure *structure =
  944         gst_structure_new("New Image", kGadgetVideoSinkMessageName,
  945                           G_TYPE_INT, NEW_IMAGE, NULL);
  946     GstMessage *message =
  947         gst_message_new_element(reinterpret_cast<GstObject*>(this), structure);
  948     if (message)
  949       gst_bus_post(bus_, message);
  950   }
  951 
  952   if (to_be_recycled) {
  953     if (to_be_recycled->width_ != GST_VIDEO_SINK_WIDTH(this) ||
  954         to_be_recycled->height_ != GST_VIDEO_SINK_HEIGHT(this)) {
  955       ImageBuffer::FreeInstance(to_be_recycled);
  956     } else {
  957       to_be_recycled->SetRecycleFlag(ImageBuffer::BUFFER_RECYCLED);
  958       buffer_pool_ = g_slist_prepend(buffer_pool_, to_be_recycled);
  959     }
  960   }
  961 
  962   return TRUE;
  963 }
  964 
  965 void GadgetVideoSink::BufferPoolClear() {
  966   while (buffer_pool_) {
  967     ImageBuffer *image = static_cast<ImageBuffer*>(buffer_pool_->data);
  968     buffer_pool_ = g_slist_delete_link(buffer_pool_, buffer_pool_);
  969     ImageBuffer::FreeInstance(image);
  970   }
  971 }
  972 
  973 void GadgetVideoSink::Reset() {
  974   if (caps_) {
  975     gst_caps_unref(caps_);
  976     caps_ = NULL;
  977   }
  978   if (image_) {
  979     delete image_;
  980     image_ = NULL;
  981   }
  982   if (image_queue_) {
  983     delete image_queue_;
  984     image_queue_ = NULL;
  985   }
  986 
  987   BufferPoolClear();
  988 
  989   if(par_) {
  990     g_free(par_);
  991     par_ = NULL;
  992   }
  993 }
  994 
  995 GadgetVideoSink::Image *
  996 GadgetVideoSink::ReceiveImageHandler(GstElement *element) {
  997   ASSERT(element);
  998   GadgetVideoSink *videosink = GADGET_VIDEOSINK(element);
  999   if (videosink->image_queue_) {
 1000     ImageBuffer *image_internal = videosink->image_queue_->ConsumeOneImage();
 1001     if (image_internal != NULL) {
 1002       ASSERT(videosink->image_);
 1003       videosink->image_->data =
 1004           reinterpret_cast<char*>(GST_BUFFER_DATA(image_internal));
 1005       videosink->image_->x = image_internal->x_;
 1006       videosink->image_->y = image_internal->y_;
 1007       videosink->image_->w = image_internal->w_;
 1008       videosink->image_->h = image_internal->h_;
 1009       videosink->image_->stride = image_internal->bytes_per_line_;
 1010       return videosink->image_;
 1011     }
 1012   }
 1013   return NULL;
 1014 }
 1015 
 1016 } // namespace gst
 1017 } // namespace ggadget