Nelu
unread,Dec 1, 2010, 5:32:04 AM12/1/10Sign in to reply to author
Sign in to forward
You do not have permission to delete messages in this group
Either email addresses are anonymous for this group or you need the view member email addresses permission to view the original message
to C++ RTMP Server
Hi,
there is the test application I've used.
Is need to install gtk+ and GStreamer, then compile the
application using:
g++ -Wall `pkg-config --cflags --libs gstreamer-0.10 gstreamer-
interfaces-0.10` `pkg-config --cflags --libs gtk+-2.0` capture_test.c -
o capture_test
Run the application using:
./capture_test
your web cam should appear in main application window.
If you press button 'Start recording', each frame will go into
'buffer_probe_callback' function. There you can use external
encoder(ffmpeg)
and compress the video data.
Does anybody knows how to do this using ffmpeg API?
There is the code:
#include <stdlib.h>
#include <string.h>
#include <gtk/gtk.h>
#include <gdk/gdkx.h>
#include <gst/gst.h>
#include <gst/interfaces/xoverlay.h>
#include <gtk/gtk.h>
/* Define sources and sinks according to
* running environment
* NOTE: If you want to run the application
* in ARM scratchbox, you have to change these*/
#ifdef __arm__
/* The device by default supports only
* vl4l2src for camera and xvimagesink
* for screen */
#define VIDEO_SRC "v4l2camsrc"
#define VIDEO_SINK "xvimagesink"
#else
/* These are for the X86 SDK. Xephyr doesn't
* support XVideo extension, so the application
* must use ximagesink. The video source depends
* on driver of your Video4Linux device so this
* may have to be changed */
#define VIDEO_SRC "videotestsrc"
#define VIDEO_SINK "ximagesink"
#endif
/* Define structure for variables that
* are needed thruout the application */
typedef struct
{
GtkWidget *window;
GstElement *pipeline;
GtkWidget *screen;
guint buffer_cb_id;
} AppData;
/* This callback will be registered to the image sink
* after user starts recording */
static gboolean buffer_probe_callback(
GstElement *image_sink,
GstBuffer *buffer, GstPad *pad, AppData *appdata)
{
g_print("-receive buffer=%x ",(unsigned int)buffer);
/* This is the YUV (I420) buffer that you can use for encoding... */
unsigned char *data_frame =
(unsigned char *) GST_BUFFER_DATA(buffer);
/* Returning TRUE means that the buffer can is OK to be
* sent forward. When using fakesink this doesn't really
* matter because the data is discarded anyway */
return TRUE;
}
/* Callback that gets called when user clicks the "Take photo" button
*/
static void on_start_recording(GtkWidget *widget, AppData *appdata)
{
GstElement *image_sink;
/* Get the image sink element from the pipeline */
image_sink = gst_bin_get_by_name(GST_BIN(appdata->pipeline),
"image_sink");
/* Display a note to the user */
g_print("\nStart recording...");
/* Connect the "handoff"-signal of the image sink to the
* callback. This gets called whenever the sink gets a
* buffer it's ready to pass forward on the pipeline */
appdata->buffer_cb_id = g_signal_connect(
G_OBJECT(image_sink), "handoff",
G_CALLBACK(buffer_probe_callback), appdata);
}
/* Callback that gets called whenever pipeline's message bus has
* a message */
static void bus_callback(GstBus *bus, GstMessage *message, AppData
*appdata)
{
gchar *message_str;
const gchar *message_name;
GError *error;
//message_name =
gst_structure_get_name(gst_message_get_structure(message));
//g_print("got message: %s\n", message_name);
/* Report errors to the console */
if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR)
{
gst_message_parse_error(message, &error, &message_str);
g_error("GST error: %s\n", message_str);
g_free(error);
g_free(message_str);
}
/* Report warnings to the console */
if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_WARNING)
{
gst_message_parse_warning(message, &error, &message_str);
g_warning("GST warning: %s\n", message_str);
g_free(error);
g_free(message_str);
}
/* See if the message type is GST_MESSAGE_APPLICATION which means
* thet the message is sent by the client code (this program) and
* not by gstreamer. */
if(GST_MESSAGE_TYPE(message) == GST_MESSAGE_APPLICATION)
{
/* Get name of the message's structure */
message_name =
gst_structure_get_name(gst_message_get_structure(message));
if(!message_name) return;
g_print("got message application: %s\n", message_name);
/* The hildon banner must be shown in here, because the bus callback
is
* called in the main thread and calling GUI-functions in gstreamer
threads
* usually leads to problems with X-server */
/* "photo-taken" message means that the photo was succefully taken
* and saved and message is shown to user */
if(!strcmp(message_name, "photo-taken"))
{
g_print("\nPhoto taken");
}
/* "photo-failed" means that the photo couldn't be captured or saved
*/
if(!strcmp(message_name, "photo-failed"))
{
g_print("\nError: Saving photo failed");
}
}
}
/* Callback to be called when the screen-widget is exposed */
static gboolean expose_cb(GtkWidget * widget, GdkEventExpose * event,
gpointer data)
{
/* Tell the xvimagesink/ximagesink the x-window-id of the screen
* widget in which the video is shown. After this the video
* is shown in the correct widget */
g_print("expose-event- \n");
gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(data),
GDK_WINDOW_XWINDOW(widget->window));
return FALSE;
}
/* Initialize the the Gstreamer pipeline. Below is a diagram
* of the pipeline that will be created:
*
* |Screen| |Screen|
* ->|queue |->|sink |-> Display
* |Camera| |CSP | |Tee|/
* |src |->|Filter|->| |\ |Image| |Image | |Image|
* ->|queue|-> |filter|->|sink |-> JPEG file
*/
static gboolean initialize_pipeline(AppData *appdata,
int *argc, char ***argv)
{
GstElement *pipeline, *camera_src, *screen_sink, *image_sink;
GstElement *screen_queue, *image_queue;
GstElement *csp_filter, *image_filter, *tee;
GstCaps *caps;
GstBus *bus;
/* Initialize Gstreamer */
gst_init(argc, argv);
/* Create pipeline and attach a callback to it's
* message bus */
pipeline = gst_pipeline_new("test-camera");
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, (GstBusFunc)bus_callback, appdata);
gst_object_unref(GST_OBJECT(bus));
/* Save pipeline to the AppData structure */
appdata->pipeline = pipeline;
/* Create elements */
/* Camera video stream comes from a Video4Linux driver */
//camera_src = gst_element_factory_make(VIDEO_SRC, "camera_src");
camera_src = gst_element_factory_make("v4l2src", "camera_src");
/* Colorspace filter is needed to make sure that sinks understands
* the stream coming from the camera */
csp_filter = gst_element_factory_make("ffmpegcolorspace",
"csp_filter");
/* Tee that copies the stream to multiple outputs */
tee = gst_element_factory_make("tee", "tee");
/* Queue creates new thread for the stream */
screen_queue = gst_element_factory_make("queue", "screen_queue");
/* Sink that shows the image on screen. Xephyr doesn't support XVideo
* extension, so it needs to use ximagesink, but the device uses
* xvimagesink */
screen_sink = gst_element_factory_make(VIDEO_SINK, "screen_sink");
/* Creates separate thread for the stream from which the image
* is captured */
image_queue = gst_element_factory_make("queue", "image_queue");
/* Filter to convert stream to use format that the gdkpixbuf library
* can use */
image_filter = gst_element_factory_make("ffmpegcolorspace",
"image_filter");
/* A dummy sink for the image stream. Goes to bitheaven */
image_sink = gst_element_factory_make("fakesink", "image_sink");
g_print("Check that elements are correctly initialized \n");
/* Check that elements are correctly initialized */
if(!(pipeline && camera_src && screen_sink && csp_filter &&
screen_queue
&& image_queue && image_filter && image_sink))
{
g_critical("Couldn't create pipeline elements");
return FALSE;
}
g_print("Set image sink to emit handoff-signal before throwing away
it's buffer \n");
/* Set image sink to emit handoff-signal before throwing away
* it's buffer */
g_object_set(G_OBJECT(image_sink),
"signal-handoffs", TRUE, NULL);
g_print("Add elements to the pipeline. This has to be done prior to
linking them \n");
/* Add elements to the pipeline. This has to be done prior to
* linking them */
gst_bin_add_many(GST_BIN(pipeline), camera_src, csp_filter,
tee, screen_queue, screen_sink, image_queue,
image_filter, image_sink, NULL);
/* Specify what kind of video is wanted from the camera */
g_print(" Specify what kind of video is wanted from the camera \n");
/*caps = gst_caps_new_simple("video/x-raw-rgb", */
/*caps = gst_caps_new_simple("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'),
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"bpp", G_TYPE_INT, 24,
"framerate", GST_TYPE_FRACTION, 25, 1,
NULL);
caps = gst_caps_new_simple("video/x-raw-rgb",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"bpp", G_TYPE_INT, 24,
"framerate", GST_TYPE_FRACTION, 25, 1,
NULL); */
caps = gst_caps_new_simple("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 20, 1,
NULL);
g_print("Link the camera source and colorspace filter using
capabilities specified \n");
/* Link the camera source and colorspace filter using capabilities
* specified */
if(!gst_element_link_filtered(camera_src, csp_filter, caps))
{
return FALSE;
}
gst_caps_unref(caps);
g_print("Connect Colorspace Filter -> Tee -> Screen Queue -> Screen
Sink * This finalizes the initialization of the screen-part of the
pipeline \n");
/* Connect Colorspace Filter -> Tee -> Screen Queue -> Screen Sink
* This finalizes the initialization of the screen-part of the
pipeline */
if(!gst_element_link_many(csp_filter, tee, screen_queue, screen_sink,
NULL))
{
return FALSE;
}
g_print("gdkpixbuf requires 8 bits per sample which is 24 bits per
pixel \n");
/* gdkpixbuf requires 8 bits per sample which is 24 bits per
* pixel */
/*caps = gst_caps_new_simple("video/x-raw-rgb",
"width", G_TYPE_INT, 352,
"height", G_TYPE_INT, 288,
"bpp", G_TYPE_INT, 24,
NULL); */
caps = gst_caps_new_simple("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 20, 1,
NULL);
g_print("Link the image-branch of the pipeline. The pipeline is ready
after this \n");
/* Link the image-branch of the pipeline. The pipeline is
* ready after this */
if(!gst_element_link_many(tee, image_queue, image_filter, NULL))
return FALSE;
if(!gst_element_link_filtered(image_filter, image_sink, caps)) return
FALSE;
gst_caps_unref(caps);
/* As soon as screen is exposed, window ID will be advised to the
sink */
g_signal_connect(appdata->screen, "expose-event",
G_CALLBACK(expose_cb),
screen_sink);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
return TRUE;
}
/* Destroy the pipeline on exit */
static void destroy_pipeline(GtkWidget *widget, AppData *appdata)
{
/* Free the pipeline. This automatically also unrefs all elements
* added to the pipeline */
gst_element_set_state(appdata->pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(appdata->pipeline));
}
/* Initialize the gui by creating a HildonProgram
* and HildonWindow */
void example_gui_initialize(
GtkWidget **window,
int *argc, char ***argv,
gchar *example_name)
{
g_thread_init(NULL);
/* Initialize GTK+ */
gtk_init(argc, argv);
/* Create HildonProgram and set application name */
*window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
gtk_window_set_title (GTK_WINDOW (*window), "Video Capture");
gtk_widget_set_size_request (*window, 740, 576);
/* Connect destroying of the main window to gtk_main_quit */
g_signal_connect(G_OBJECT(*window), "delete_event",
G_CALLBACK(gtk_main_quit), NULL);
}
void example_gui_run(GtkWidget *window)
{
/* Show the window and widgets it contains
* and go to the main loop. */
gtk_widget_show_all(window);
gtk_main();
}
int main(int argc, char **argv)
{
AppData appdata;
GtkWidget *button, *hbox, *vbox_button, *vbox;
/* Initialize and create the GUI */
example_gui_initialize(
&appdata.window,
&argc, &argv, (gchar *)"Camera example");
vbox = gtk_vbox_new(FALSE, 0);
hbox = gtk_hbox_new(FALSE, 0);
vbox_button = gtk_vbox_new(FALSE, 0);
gtk_box_pack_start(GTK_BOX(hbox), vbox, FALSE, FALSE, 0);
gtk_box_pack_start(GTK_BOX(hbox), vbox_button, FALSE, FALSE, 0);
appdata.screen = gtk_drawing_area_new();
gtk_widget_set_size_request(appdata.screen, 500, 380);
gtk_box_pack_start(GTK_BOX(vbox), appdata.screen, FALSE, FALSE, 0);
button = gtk_button_new_with_label("Start recording");
gtk_widget_set_size_request(button, 170, 380);
gtk_box_pack_start(GTK_BOX(vbox_button), button, FALSE, FALSE, 0);
g_signal_connect(G_OBJECT(button), "clicked",
G_CALLBACK(on_start_recording), &appdata);
gtk_container_add(GTK_CONTAINER(appdata.window), hbox);
/* Initialize the GTK pipeline */
if(!initialize_pipeline(&appdata, &argc, &argv))
{
g_print("\n Failed to initialize pipeline");
return -1;
}
g_print("It seems is initialized OK !!!\n");
g_signal_connect(G_OBJECT(appdata.window), "destroy",
G_CALLBACK(destroy_pipeline), &appdata);
/* Begin the main application */
example_gui_run( appdata.window);
/* Free the gstreamer resources. Elements added
* to the pipeline will be freed automatically */
return 0;
}
Regards,
Nelu Cociag