gstreamer: Как изменить разрешение ввода / вывода веб-камеры?

Я хочу изменить разрешение вывода / ввода моей веб-камеры с помощью gstreamer.
например, я хочу изменить разрешение с 800×600 до 640×480 пикселей.
Я не могу найти синтаксис в C для этого. Кто-нибудь может мне помочь?

Этот код должен отображать видеопоток с веб-камеры:

#include <gst/gst.h>

int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *sink, *convert;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;

/* Initialize GStreamer */
gst_init (&argc, &argv);

/* Create the elements */
source = gst_element_factory_make ("v4l2src", "source");
sink = gst_element_factory_make ("autovideosink", "sink");
convert =gst_element_factory_make("ffmpegcolorspace","convert");

/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");

if (!pipeline || !source || !sink || !convert) {
g_printerr ("Not all elements could be created.\n");
return -1;
}

/*set der source*/
g_object_set (source, "device", "/dev/video0", NULL);

/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, sink, convert, NULL);
if (gst_element_link (convert, sink) != TRUE) {
g_printerr ("Elements could not be linked confert sink.\n");
gst_object_unref (pipeline);
return -1;
}if (gst_element_link (source, convert) != TRUE) {
g_printerr ("Elements could not be linked source -convert.\n");
gst_object_unref (pipeline);
return -1;
}

/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}

/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;

switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}

/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}

4

Решение

Спасибо, парень, ты спас мне жизнь. в любом случае я отчаянно нахожу код для захвата видео0 с использованием кода C ++. ты решил свою проблему ??
надеюсь, что это приложение может помочь

static gboolean initialize_pipeline(AppData *appdata,
int *argc, char ***argv)
{
GstElement *pipeline, *camera_src, *screen_sink, *image_sink;
GstElement *screen_queue, *image_queue;
GstElement *csp_filter, *image_filter, *tee;
GstCaps *caps;
GstBus *bus;/* Initialize Gstreamer */
gst_init(argc, argv);

/* Create pipeline and attach a callback to it's
* message bus */
pipeline = gst_pipeline_new("test-camera");

bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, (GstBusFunc)bus_callback, appdata);
gst_object_unref(GST_OBJECT(bus));

/* Save pipeline to the AppData structure */
appdata->pipeline = pipeline;

/* Create elements */
/* Camera video stream comes from a Video4Linux driver */
camera_src = gst_element_factory_make(VIDEO_SRC, "camera_src");
/* Colorspace filter is needed to make sure that sinks understands
* the stream coming from the camera */
csp_filter = gst_element_factory_make("ffmpegcolorspace", "csp_filter");
/* Tee that copies the stream to multiple outputs */
tee = gst_element_factory_make("tee", "tee");
/* Queue creates new thread for the stream */
screen_queue = gst_element_factory_make("queue", "screen_queue");
/* Sink that shows the image on screen. Xephyr doesn't support XVideo
* extension, so it needs to use ximagesink, but the device uses
* xvimagesink */
screen_sink = gst_element_factory_make(VIDEO_SINK, "screen_sink");
/* Creates separate thread for the stream from which the image
* is captured */
image_queue = gst_element_factory_make("queue", "image_queue");
/* Filter to convert stream to use format that the gdkpixbuf library
* can use */
image_filter = gst_element_factory_make("ffmpegcolorspace", "image_filter");
/* A dummy sink for the image stream. Goes to bitheaven */
image_sink = gst_element_factory_make("fakesink", "image_sink");

/* Check that elements are correctly initialized */
if(!(pipeline && camera_src && screen_sink && csp_filter && screen_queue
&& image_queue && image_filter && image_sink))
{
g_critical("Couldn't create pipeline elements");
return FALSE;
}

/* Set image sink to emit handoff-signal before throwing away
* it's buffer */
g_object_set(G_OBJECT(image_sink),
"signal-handoffs", TRUE, NULL);

/* Add elements to the pipeline. This has to be done prior to
* linking them */
gst_bin_add_many(GST_BIN(pipeline), camera_src, csp_filter,
tee, screen_queue, screen_sink, image_queue,
image_filter, image_sink, NULL);

/* Specify what kind of video is wanted from the camera */
caps = gst_caps_new_simple("video/x-raw-rgb",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
NULL);/* Link the camera source and colorspace filter using capabilities
* specified */
if(!gst_element_link_filtered(camera_src, csp_filter, caps))
{
return FALSE;
}
gst_caps_unref(caps);

/* Connect Colorspace Filter -> Tee -> Screen Queue -> Screen Sink
* This finalizes the initialization of the screen-part of the pipeline */
if(!gst_element_link_many(csp_filter, tee, screen_queue, screen_sink, NULL))
{
return FALSE;
}

/* gdkpixbuf requires 8 bits per sample which is 24 bits per
* pixel */
caps = gst_caps_new_simple("video/x-raw-rgb",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"bpp", G_TYPE_INT, 24,
"depth", G_TYPE_INT, 24,
"framerate", GST_TYPE_FRACTION, 15, 1,
NULL);

/* Link the image-branch of the pipeline. The pipeline is
* ready after this */
if(!gst_element_link_many(tee, image_queue, image_filter, NULL)) return FALSE;
if(!gst_element_link_filtered(image_filter, image_sink, caps)) return FALSE;

gst_caps_unref(caps);

/* As soon as screen is exposed, window ID will be advised to the sink */
g_signal_connect(appdata->screen, "expose-event", G_CALLBACK(expose_cb),
screen_sink);

gst_element_set_state(pipeline, GST_STATE_PLAYING);

return TRUE;

}

Благодарю.

4

Другие решения

Да, я нашел решение. Это почти так же, как у вас.

Я использую фильтр-элемент и заглавные буквы.

GstCaps *Cap,*Cap2;
GstElement *filter;

Cap = gst_caps_from_string("video/x-raw-yuv, width=320, height=240,framerate=20/1");
Cap2 = gst_caps_from_string("video/x-raw-yuv, width=640, height=320,framerate=20/1");

filter = gst_element_factory_make("capsfilter","filter");

Теперь я могу создать конвейер и, прежде чем я переведу конвейер в состояние PLAYING, я использую следующую команду для установки разрешения:

g_object_set(filter,"caps",Cap,NULL);

Спасибо за ваш ответ, и я надеюсь, что это поможет и другим людям.

Стефан

2

По вопросам рекламы [email protected]