Discussion:
[gst-devel] play audio and video simultaneously
(too old to reply)
alavariega
2009-04-22 20:37:01 UTC
Permalink
Hi everybody!

Actually im writing an gstreamer application to playback video and audio
simultaneously in gstreamer0.10 but i have some problems to gain my goal,
the on_decpad_added function is not executed, do you have any ideas what
could be the problem:

#include

static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data);
static void on_pad_added (GstElement *element, GstPad *pad);
static void on_decpad_added(GstElement *element, GstPad *pad );

GstElement *pipeline, *src, *demux, *decoderv,*sinkv,*decodera, *convert,
*resample, *sinka;
GstElement *queueA,* *queueV;

int main(int argc, char *argv[])
{
GstStateChangeReturn ret;
GMainLoop *loop;
GstBus *bus;
/*initialization*/
gst_init(&argc,&argv);
loop = g_main_loop_new(NULL, FALSE);

if(argc != 2)
{
g_print("Usage: %s ", argv[0]);
return -1;
}

pipeline = gst_pipeline_new("VIDEO PLAYER");
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, bus_call, loop);

src = gst_element_factory_make("filesrc", "filesource");
demux = gst_element_factory_make("dvddemux", "mpg-demux");

/*Gstreamer video elements*/
decoderv = gst_element_factory_make("mpeg2dec", "mpeg-decoder");
sinkv = gst_element_factory_make("xvimagesink", "video-out");
if(!decoderv || !sinkv)
{
g_print("\nthe video could not playback\n");
return -1;
}

/*Gstreamer audio elements*/

decodera = gst_element_factory_make("mad", "decoder-audio");
convert = gst_element_factory_make("audioconvert", "a-convert");
resample = gst_element_factory_make("audioresample", "a-resample");
sinka = gst_element_factory_make("osssink", "play audio");
if(!decodera || !convert || !resample || !sinka)
{
g_print("\nthe audio could not playback\n");
return -1;
}

queueA = gst_element_factory_make("queue", "queue-audio");
queueV = gst_element_factory_make("queue", "queue-video");

g_object_set (G_OBJECT (src), "location", argv[1], NULL);
gst_bin_add_many(GST_BIN(pipeline), src, demux, decoderv,
queueV,sinkv,decodera, queueA,convert, resample, sinka, NULL);

gst_element_link (src, demux);
gst_element_link (queueV, sinkv);
gst_element_link (queueA, convert);
gst_element_link (convert, resample);
gst_element_link (resample, sinka);

g_signal_connect (demux, "pad-added",G_CALLBACK (on_pad_added), NULL);
g_signal_connect (decodera,
"pad-added",G_CALLBACK(on_decpad_added),decodera);
//g_signal_connect (decoderv,
"pad-added",G_CALLBACK(on_decpad_added),decoderv);

/* run */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);

if (ret == GST_STATE_CHANGE_FAILURE)
{
GstMessage *msg;

g_print ("Failed to start up pipeline!\n");

/* check if there is an error message with details on the bus */
msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
if (msg) {
GError *err = NULL;
g_print("\nDISPLAY ERROR:\n");
gst_message_parse_error (msg, &err, NULL);
g_print ("ERROR: %s\n", err->message);
g_error_free (err);
gst_message_unref (msg);
}
return -1;
}

g_main_loop_run (loop);

/* clean up */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);

return 0;

}


static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data)
{
GMainLoop *loop = data;

switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug = NULL;
GError *err = NULL;

gst_message_parse_error (msg, &err, &debug);

g_print ("Error: %s\n", err->message);
g_error_free (err);

if (debug) {
g_print ("Debug details: %s\n", debug);
g_free (debug);
}

g_main_loop_quit (loop);
break;
}
default:
break;
}

return TRUE;
}


static void on_decpad_added(GstElement *element, GstPad *pad )
{
g_debug ("Signal: decoder pad-added");
GstCaps *caps;
GstStructure *str;
GstPad *targetsink;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);

g_debug ("Linking decoder to QUEUE pad ");
// Link it actually
element = (element == decodera ? queueA : queueV);
targetsink = gst_element_get_pad ( element, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The decoder pad src was linked\n");
else
g_print("The stream:%s pad src was
linked\n",gst_pad_get_name(pad));
gst_object_unref (targetsink);
gst_caps_unref (caps);
}

static void on_pad_added (GstElement *element, GstPad *pad)
{
g_debug ("Signal: pad-added");
GstCaps *caps;
GstStructure *str;

caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);

const gchar *c = gst_structure_get_name(str);
g_print("on_pad_added: %s\n",c);
if (g_strrstr (c, "video") || g_strrstr (c, "image")) {
g_debug ("Linking video pad to dec_vd");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decoderv, "sink");
g_assert (targetsink != NULL);
g_print("\nVIDEO--pad source: %s\n", gst_pad_get_name(pad));
g_print("\nVIDEO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The video pad src was linked\n");
gst_object_unref (targetsink);
}

if (g_strrstr (c, "audio")) {
g_debug ("Linking audio pad to dec_ad");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decodera, "sink");
g_assert (targetsink != NULL);
g_print("\nAUDIO--pad source: %s\n", gst_pad_get_name(pad));
g_print("\nAUDIO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The audio pad src was linked\n");
gst_object_unref (targetsink);
}

gst_caps_unref (caps);
}

When i execute the application, this is the results:


** (testvideop:6122): DEBUG: Signal: pad-added
on_pad_added: audio/mpeg
** (testvideop:6122): DEBUG: Linking audio pad to dec_ad

AUDIO--pad source: audio_00

AUDIO--pad sink: sink
The audio pad src was linked
** (testvideop:6122): DEBUG: Signal: pad-added
on_pad_added: video/mpeg
** (testvideop:6122): DEBUG: Linking video pad to dec_vd

VIDEO--pad source: video_00

VIDEO--pad sink: sink
The video pad src was linked
Error: Internal data flow error.
Debug details: gstbasesrc.c(2193): gst_base_src_loop (): /VIDEO
PLAYER/filesource:
streaming task paused, reason not-linked (-1)


thanks in advance for your help,

Alberto
--
View this message in context: http://www.nabble.com/play-audio-and-video-simultaneously-tp23175849p23175849.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
sledge hammer
2009-04-23 13:07:08 UTC
Permalink
The "mad" element doesn't have dynamic pads, so it doesn't emit a "pad-added" signal. You can connect it directly to "audioconvert".

Also I see that you declare "queueV" as a "GstElement**" and not as a "GstElement*"

Date: Wed, 22 Apr 2009 13:37:01 -0700
From: ***@dextratech.com
To: gstreamer-***@lists.sourceforge.net
Subject: [gst-devel] play audio and video simultaneously


Hi everybody!

Actually im writing an gstreamer application to playback video and audio simultaneously in gstreamer0.10 but i have some problems to gain my goal, the on_decpad_added function is not executed, do you have any ideas what could be the problem:

#include

static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data);
static void on_pad_added (GstElement *element, GstPad *pad);
static void on_decpad_added(GstElement *element, GstPad *pad );

GstElement *pipeline, *src, *demux, *decoderv,*sinkv,*decodera, *convert, *resample, *sinka;
GstElement *queueA,* *queueV;

int main(int argc, char *argv[])
{
GstStateChangeReturn ret;
GMainLoop *loop;
GstBus *bus;
/*initialization*/
gst_init(&argc,&argv);
loop = g_main_loop_new(NULL, FALSE);

if(argc != 2)
{
g_print("Usage: %s ", argv[0]);
return -1;
}

pipeline = gst_pipeline_new("VIDEO PLAYER");
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, bus_call, loop);

src = gst_element_factory_make("filesrc", "filesource");
demux = gst_element_factory_make("dvddemux", "mpg-demux");

/*Gstreamer video elements*/
decoderv = gst_element_factory_make("mpeg2dec", "mpeg-decoder");
sinkv = gst_element_factory_make("xvimagesink", "video-out");
if(!decoderv || !sinkv)
{
g_print("\nthe video could not playback\n");
return -1;
}

/*Gstreamer audio elements*/

decodera = gst_element_factory_make("mad", "decoder-audio");
convert = gst_element_factory_make("audioconvert", "a-convert");
resample = gst_element_factory_make("audioresample", "a-resample");
sinka = gst_element_factory_make("osssink", "play audio");
if(!decodera || !convert || !resample || !sinka)
{
g_print("\nthe audio could not playback\n");
return -1;
}

queueA = gst_element_factory_make("queue", "queue-audio");
queueV = gst_element_factory_make("queue", "queue-video");

g_object_set (G_OBJECT (src), "location", argv[1], NULL);
gst_bin_add_many(GST_BIN(pipeline), src, demux, decoderv, queueV,sinkv,decodera, queueA,convert, resample, sinka, NULL);

gst_element_link (src, demux);
gst_element_link (queueV, sinkv);
gst_element_link (queueA, convert);
gst_element_link (convert, resample);
gst_element_link (resample, sinka);

g_signal_connect (demux, "pad-added",G_CALLBACK (on_pad_added), NULL);
g_signal_connect (decodera, "pad-added",G_CALLBACK(on_decpad_added),decodera);
//g_signal_connect (decoderv, "pad-added",G_CALLBACK(on_decpad_added),decoderv);

/* run */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);

if (ret == GST_STATE_CHANGE_FAILURE)
{
GstMessage *msg;

g_print ("Failed to start up pipeline!\n");

/* check if there is an error message with details on the bus */
msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
if (msg) {
GError *err = NULL;
g_print("\nDISPLAY ERROR:\n");
gst_message_parse_error (msg, &err, NULL);
g_print ("ERROR: %s\n", err->message);
g_error_free (err);
gst_message_unref (msg);
}
return -1;
}

g_main_loop_run (loop);

/* clean up */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);

return 0;

}


static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = data;

switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug = NULL;
GError *err = NULL;

gst_message_parse_error (msg, &err, &debug);

g_print ("Error: %s\n", err->message);
g_error_free (err);

if (debug) {
g_print ("Debug details: %s\n", debug);
g_free (debug);
}

g_main_loop_quit (loop);
break;
}
default:
break;
}

return TRUE;
}


static void on_decpad_added(GstElement *element, GstPad *pad )
{
g_debug ("Signal: decoder pad-added");
GstCaps *caps;
GstStructure *str;
GstPad *targetsink;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);

g_debug ("Linking decoder to QUEUE pad ");
// Link it actually
element = (element == decodera ? queueA : queueV);
targetsink = gst_element_get_pad ( element, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The decoder pad src was linked\n");
else
g_print("The stream:%s pad src was linked\n",gst_pad_get_name(pad));
gst_object_unref (targetsink);
gst_caps_unref (caps);
}

static void on_pad_added (GstElement *element, GstPad *pad)
{
g_debug ("Signal: pad-added");
GstCaps *caps;
GstStructure *str;

caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);

const gchar *c = gst_structure_get_name(str);
g_print("on_pad_added: %s\n",c);
if (g_strrstr (c, "video") || g_strrstr (c, "image")) {
g_debug ("Linking video pad to dec_vd");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decoderv, "sink");
g_assert (targetsink != NULL);
g_print("\nVIDEO--pad source: %s\n", gst_pad_get_name(pad));
g_print("\nVIDEO--pad sink: %s\n", gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The video pad src was linked\n");
gst_object_unref (targetsink);
}

if (g_strrstr (c, "audio")) {
g_debug ("Linking audio pad to dec_ad");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decodera, "sink");
g_assert (targetsink != NULL);
g_print("\nAUDIO--pad source: %s\n", gst_pad_get_name(pad));
g_print("\nAUDIO--pad sink: %s\n", gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The audio pad src was linked\n");
gst_object_unref (targetsink);
}

gst_caps_unref (caps);
}

When i execute the application, this is the results:


** (testvideop:6122): DEBUG: Signal: pad-added
on_pad_added: audio/mpeg
** (testvideop:6122): DEBUG: Linking audio pad to dec_ad

AUDIO--pad source: audio_00

AUDIO--pad sink: sink
The audio pad src was linked
** (testvideop:6122): DEBUG: Signal: pad-added
on_pad_added: video/mpeg
** (testvideop:6122): DEBUG: Linking video pad to dec_vd

VIDEO--pad source: video_00

VIDEO--pad sink: sink
The video pad src was linked
Error: Internal data flow error.
Debug details: gstbasesrc.c(2193): gst_base_src_loop (): /VIDEO PLAYER/filesource:
streaming task paused, reason not-linked (-1)


thanks in advance for your help,

Alberto


View this message in context: play audio and video simultaneously

Sent from the GStreamer-devel mailing list archive at Nabble.com.

_________________________________________________________________
Ôï What's New óáò åéäïðïéåß Üìåóá ãéá êÜèå åíçìÝñùóç. ÌÜèåôå ðþò.
http://home.live.com/
alavariega
2009-04-23 15:31:41 UTC
Permalink
The "mad" element doesn't have dynamic pads, so it doesn't emit a "pad-added"
signal. You can connect it directly to "audioconvert".

Also I see that you declare "queueV" as a "GstElement**" and not as a
"GstElement*".


Hi Sledge, thanks for your reply!

I have already made the corresponding changes and still not activated the
signal that link the video decoder with the respective queue. when i execute
the application this is the result:

** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: audio/mpeg
** (testvideop:6043): DEBUG: Linking audio pad to decodera
The audio pad src was linked
** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: video/mpeg
** (testvideop:6043): DEBUG: Linking video pad to decoderv
The video pad src was linked

There are no more errors, but they are not playing the video and audio
streams simultaneously

Thanks in advance for the help,

Best Regards,

Alberto.



#include<gst/gst.h>

static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data);
static void on_pad_added (GstElement *element, GstPad *pad);
static void on_decpad_added(GstElement *element, GstPad *pad );

GstElement *pipeline, *src, *demux, *decoderv, *sinkv, *decodera, *convert,
*resample, *sinka;
GstElement *queueA, *queueV;

int main(int argc, char *argv[])
{
GstStateChangeReturn ret;
GMainLoop *loop;
GstBus *bus;
/*initialization*/
gst_init(&argc,&argv);
loop = g_main_loop_new(NULL, FALSE);

if(argc != 2)
{
g_print("Usage: %s <mpg/mpeg video file>", argv[0]);
return -1;
}

pipeline = gst_pipeline_new("VIDEO PLAYER");
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, bus_call, loop);

src = gst_element_factory_make("filesrc", "filesource");
demux = gst_element_factory_make("dvddemux", "mpg-demux");

/*Gstreamer video elements*/
decoderv = gst_element_factory_make("mpeg2dec", "mpeg-decoder");
sinkv = gst_element_factory_make("xvimagesink", "video-out");
if(!decoderv || !sinkv)
{
g_print("\nthe video could not playback\n");
return -1;
}

/*Gstreamer audio elements*/

decodera = gst_element_factory_make("mad", "decoder-audio");
convert = gst_element_factory_make("audioconvert", "a-convert");
resample = gst_element_factory_make("audioresample", "a-resample");
sinka = gst_element_factory_make("osssink", "play audio");
if(!decodera || !convert || !resample || !sinka)
{
g_print("\nthe audio could not playback\n");
return -1;
}

queueA = gst_element_factory_make("queue", "queue-audio");
queueV = gst_element_factory_make("queue", "queue-video");

g_object_set (G_OBJECT (src), "location", argv[1], NULL);
gst_bin_add_many(GST_BIN(pipeline), src, demux, decoderv,
queueV,sinkv,decodera,convert, resample, sinka, NULL);

gst_element_link (src, demux);
gst_element_link (queueV, sinkv);
// gst_element_link (queueA, convert);
gst_element_link (decodera, convert);
gst_element_link (convert, resample);
gst_element_link (resample, sinka);

g_signal_connect (demux, "pad-added",G_CALLBACK (on_pad_added), NULL);
//g_signal_connect (decodera,
"pad-added",G_CALLBACK(on_decpad_added),decodera);
g_signal_connect (decoderv,
"pad-added",G_CALLBACK(on_decpad_added),decoderv);

/* run */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);

if (ret == GST_STATE_CHANGE_FAILURE)
{
GstMessage *msg;

g_print ("Failed to start up pipeline!\n");

/* check if there is an error message with details on the bus */
msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
if (msg) {
GError *err = NULL;
g_print("\nDISPLAY ERROR:\n");
gst_message_parse_error (msg, &err, NULL);
g_print ("ERROR: %s\n", err->message);
g_error_free (err);
gst_message_unref (msg);
}
return -1;
}

g_main_loop_run (loop);

/* clean up */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);

return 0;

}


static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data)
{
GMainLoop *loop = data;

switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug = NULL;
GError *err = NULL;

gst_message_parse_error (msg, &err, &debug);

g_print ("Error: %s\n", err->message);
g_error_free (err);

if (debug) {
g_print ("Debug details: %s\n", debug);
g_free (debug);
}

g_main_loop_quit (loop);
break;
}
default:
break;
}

return TRUE;
}


static void on_decpad_added(GstElement *element, GstPad *pad )
{
g_debug ("Signal: decoder pad-added");
GstCaps *caps;
GstStructure *str;
GstPad *targetsink;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);

g_debug ("Linking decoder to QUEUE pad ");
// Link it actually
//element = (element == decodera ? queueA : queueV);
targetsink = gst_element_get_pad ( queueV, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The decoder pad src was linked\n");
else
g_print("The stream:%s pad src was
linked\n",gst_pad_get_name(pad));
gst_object_unref (targetsink);
gst_caps_unref (caps);
}

static void on_pad_added (GstElement *element, GstPad *pad)
{
g_debug ("Signal: pad-added");
GstCaps *caps;
GstStructure *str;

caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);

const gchar *c = gst_structure_get_name(str);
g_print("on_pad_added: %s\n",c);
if (g_strrstr (c, "video") || g_strrstr (c, "image")) {
g_debug ("Linking video pad to decoderv");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decoderv, "sink");
g_assert (targetsink != NULL);
//g_print("\nVIDEO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nVIDEO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The video pad src was linked\n");
gst_object_unref (targetsink);
}

if (g_strrstr (c, "audio")) {
g_debug ("Linking audio pad to decodera");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decodera, "sink");
g_assert (targetsink != NULL);
//g_print("\nAUDIO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nAUDIO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The audio pad src was linked\n");
gst_object_unref (targetsink);
}

gst_caps_unref (caps);
}
--
View this message in context: http://www.nabble.com/play-audio-and-video-simultaneously-tp23175849p23197413.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
sledge hammer
2009-04-24 10:37:44 UTC
Permalink
I think your problem is this:

The queues should be between the demuxer and the decoder. I see that you put them between the decoder and the audioconvert(for the audio stream). The pipeline linkage should look like src->demuxer->queuea->decodera->convert->resample->sinka
src->demuxer->queuev->decoderv->sinkv
Date: Thu, 23 Apr 2009 08:31:41 -0700
Subject: Re: [gst-devel] play audio and video simultaneously
The "mad" element doesn't have dynamic pads, so it doesn't emit a "pad-added"
signal. You can connect it directly to "audioconvert".
Also I see that you declare "queueV" as a "GstElement**" and not as a
"GstElement*".
Hi Sledge, thanks for your reply!
I have already made the corresponding changes and still not activated the
signal that link the video decoder with the respective queue. when i execute
** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: audio/mpeg
** (testvideop:6043): DEBUG: Linking audio pad to decodera
The audio pad src was linked
** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: video/mpeg
** (testvideop:6043): DEBUG: Linking video pad to decoderv
The video pad src was linked
There are no more errors, but they are not playing the video and audio
streams simultaneously
Thanks in advance for the help,
Best Regards,
Alberto.
#include<gst/gst.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data);
static void on_pad_added (GstElement *element, GstPad *pad);
static void on_decpad_added(GstElement *element, GstPad *pad );
GstElement *pipeline, *src, *demux, *decoderv, *sinkv, *decodera, *convert,
*resample, *sinka;
GstElement *queueA, *queueV;
int main(int argc, char *argv[])
{
GstStateChangeReturn ret;
GMainLoop *loop;
GstBus *bus;
/*initialization*/
gst_init(&argc,&argv);
loop = g_main_loop_new(NULL, FALSE);
if(argc != 2)
{
g_print("Usage: %s <mpg/mpeg video file>", argv[0]);
return -1;
}
pipeline = gst_pipeline_new("VIDEO PLAYER");
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, bus_call, loop);
src = gst_element_factory_make("filesrc", "filesource");
demux = gst_element_factory_make("dvddemux", "mpg-demux");
/*Gstreamer video elements*/
decoderv = gst_element_factory_make("mpeg2dec", "mpeg-decoder");
sinkv = gst_element_factory_make("xvimagesink", "video-out");
if(!decoderv || !sinkv)
{
g_print("\nthe video could not playback\n");
return -1;
}
/*Gstreamer audio elements*/
decodera = gst_element_factory_make("mad", "decoder-audio");
convert = gst_element_factory_make("audioconvert", "a-convert");
resample = gst_element_factory_make("audioresample", "a-resample");
sinka = gst_element_factory_make("osssink", "play audio");
if(!decodera || !convert || !resample || !sinka)
{
g_print("\nthe audio could not playback\n");
return -1;
}
queueA = gst_element_factory_make("queue", "queue-audio");
queueV = gst_element_factory_make("queue", "queue-video");
g_object_set (G_OBJECT (src), "location", argv[1], NULL);
gst_bin_add_many(GST_BIN(pipeline), src, demux, decoderv,
queueV,sinkv,decodera,convert, resample, sinka, NULL);
gst_element_link (src, demux);
gst_element_link (queueV, sinkv);
// gst_element_link (queueA, convert);
gst_element_link (decodera, convert);
gst_element_link (convert, resample);
gst_element_link (resample, sinka);
g_signal_connect (demux, "pad-added",G_CALLBACK (on_pad_added), NULL);
//g_signal_connect (decodera,
"pad-added",G_CALLBACK(on_decpad_added),decodera);
g_signal_connect (decoderv,
"pad-added",G_CALLBACK(on_decpad_added),decoderv);
/* run */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
GstMessage *msg;
g_print ("Failed to start up pipeline!\n");
/* check if there is an error message with details on the bus */
msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
if (msg) {
GError *err = NULL;
g_print("\nDISPLAY ERROR:\n");
gst_message_parse_error (msg, &err, NULL);
g_print ("ERROR: %s\n", err->message);
g_error_free (err);
gst_message_unref (msg);
}
return -1;
}
g_main_loop_run (loop);
/* clean up */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data)
{
GMainLoop *loop = data;
switch (GST_MESSAGE_TYPE (msg)) {
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug = NULL;
GError *err = NULL;
gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
if (debug) {
g_print ("Debug details: %s\n", debug);
g_free (debug);
}
g_main_loop_quit (loop);
break;
}
break;
}
return TRUE;
}
static void on_decpad_added(GstElement *element, GstPad *pad )
{
g_debug ("Signal: decoder pad-added");
GstCaps *caps;
GstStructure *str;
GstPad *targetsink;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);
g_debug ("Linking decoder to QUEUE pad ");
// Link it actually
//element = (element == decodera ? queueA : queueV);
targetsink = gst_element_get_pad ( queueV, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The decoder pad src was linked\n");
else
g_print("The stream:%s pad src was
linked\n",gst_pad_get_name(pad));
gst_object_unref (targetsink);
gst_caps_unref (caps);
}
static void on_pad_added (GstElement *element, GstPad *pad)
{
g_debug ("Signal: pad-added");
GstCaps *caps;
GstStructure *str;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);
const gchar *c = gst_structure_get_name(str);
g_print("on_pad_added: %s\n",c);
if (g_strrstr (c, "video") || g_strrstr (c, "image")) {
g_debug ("Linking video pad to decoderv");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decoderv, "sink");
g_assert (targetsink != NULL);
//g_print("\nVIDEO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nVIDEO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The video pad src was linked\n");
gst_object_unref (targetsink);
}
if (g_strrstr (c, "audio")) {
g_debug ("Linking audio pad to decodera");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decodera, "sink");
g_assert (targetsink != NULL);
//g_print("\nAUDIO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nAUDIO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The audio pad src was linked\n");
gst_object_unref (targetsink);
}
gst_caps_unref (caps);
}
--
View this message in context: http://www.nabble.com/play-audio-and-video-simultaneously-tp23175849p23197413.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
------------------------------------------------------------------------------
Stay on top of everything new and different, both inside and
around Java (TM) technology - register by April 22, and save
$200 on the JavaOne (SM) conference, June 2-5, 2009, San Francisco.
300 plus technical and hands-on sessions. Register today.
Use priority code J9JMT32. http://p.sf.net/sfu/p
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
_________________________________________________________________
Ôï What's New óáò åéäïðïéåß Üìåóá ãéá êÜèå åíçìÝñùóç. ÌÜèåôå ðþò.
http://home.live.com/
Guilherme
2009-04-26 12:03:59 UTC
Permalink
Mates, Im using gst-lauch to get the following procedure:

1 - Read from a mic
2 - Save in a Buffer
3 - Playback recorded sound

gst-launch -v alsasrc ! queue ! alsasink

It works pretty well wih audiotestsrc ! alsasink

Could I get some help from you?

Tks!

-------------------

Guilherme Longo
Dept. Eng. da Computac,a~o
Unaerp

Linux User - #484927

*Before Asking
http://www.istf.com.br/?page=perguntas

!- I'd rather die on my feet than live on my knees -!
Post by sledge hammer
The queues should be between the demuxer and the decoder. I see that
you put them between the decoder and the audioconvert(for the audio
stream). The pipeline linkage should look like
src->demuxer->queuea->decodera->convert->resample->sinka
src->demuxer->queuev->decoderv->sinkv
Date: Thu, 23 Apr 2009 08:31:41 -0700
Subject: Re: [gst-devel] play audio and video simultaneously
The "mad" element doesn't have dynamic pads, so it doesn't emit a
"pad-added"
signal. You can connect it directly to "audioconvert".
Also I see that you declare "queueV" as a "GstElement**" and not as a
"GstElement*".
Hi Sledge, thanks for your reply!
I have already made the corresponding changes and still not
activated the
signal that link the video decoder with the respective queue. when i
execute
** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: audio/mpeg
** (testvideop:6043): DEBUG: Linking audio pad to decodera
The audio pad src was linked
** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: video/mpeg
** (testvideop:6043): DEBUG: Linking video pad to decoderv
The video pad src was linked
There are no more errors, but they are not playing the video and audio
streams simultaneously
Thanks in advance for the help,
Best Regards,
Alberto.
#include<gst/gst.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data);
static void on_pad_added (GstElement *element, GstPad *pad);
static void on_decpad_added(GstElement *element, GstPad *pad );
GstElement *pipeline, *src, *demux, *decoderv, *sinkv, *decodera,
*convert,
*resample, *sinka;
GstElement *queueA, *queueV;
int main(int argc, char *argv[])
{
GstStateChangeReturn ret;
GMainLoop *loop;
GstBus *bus;
/*initialization*/
gst_init(&argc,&argv);
loop = g_main_loop_new(NULL, FALSE);
if(argc != 2)
{
g_print("Usage: %s <mpg/mpeg video file>", argv[0]);
return -1;
}
pipeline = gst_pipeline_new("VIDEO PLAYER");
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, bus_call, loop);
src = gst_element_factory_make("filesrc", "filesource");
demux = gst_element_factory_make("dvddemux", "mpg-demux");
/*Gstreamer video elements*/
decoderv = gst_element_factory_make("mpeg2dec", "mpeg-decoder");
sinkv = gst_element_factory_make("xvimagesink", "video-out");
if(!decoderv || !sinkv)
{
g_print("\nthe video could not playback\n");
return -1;
}
/*Gstreamer audio elements*/
decodera = gst_element_factory_make("mad", "decoder-audio");
convert = gst_element_factory_make("audioconvert", "a-convert");
resample = gst_element_factory_make("audioresample", "a-resample");
sinka = gst_element_factory_make("osssink", "play audio");
if(!decodera || !convert || !resample || !sinka)
{
g_print("\nthe audio could not playback\n");
return -1;
}
queueA = gst_element_factory_make("queue", "queue-audio");
queueV = gst_element_factory_make("queue", "queue-video");
g_object_set (G_OBJECT (src), "location", argv[1], NULL);
gst_bin_add_many(GST_BIN(pipeline), src, demux, decoderv,
queueV,sinkv,decodera,convert, resample, sinka, NULL);
gst_element_link (src, demux);
gst_element_link (queueV, sinkv);
// gst_element_link (queueA, convert);
gst_element_link (decodera, convert);
gst_element_link (convert, resample);
gst_element_link (resample, sinka);
g_signal_connect (demux, "pad-added",G_CALLBACK (on_pad_added), NULL);
//g_signal_connect (decodera,
"pad-added",G_CALLBACK(on_decpad_added),decodera);
g_signal_connect (decoderv,
"pad-added",G_CALLBACK(on_decpad_added),decoderv);
/* run */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
GstMessage *msg;
g_print ("Failed to start up pipeline!\n");
/* check if there is an error message with details on the bus */
msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
if (msg) {
GError *err = NULL;
g_print("\nDISPLAY ERROR:\n");
gst_message_parse_error (msg, &err, NULL);
g_print ("ERROR: %s\n", err->message);
g_error_free (err);
gst_message_unref (msg);
}
return -1;
}
g_main_loop_run (loop);
/* clean up */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data)
{
GMainLoop *loop = data;
switch (GST_MESSAGE_TYPE (msg)) {
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug = NULL;
GError *err = NULL;
gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
if (debug) {
g_print ("Debug details: %s\n", debug);
g_free (debug);
}
g_main_loop_quit (loop);
break;
}
break;
}
return TRUE;
}
static void on_decpad_added(GstElement *element, GstPad *pad )
{
g_debug ("Signal: decoder pad-added");
GstCaps *caps;
GstStructure *str;
GstPad *targetsink;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);
g_debug ("Linking decoder to QUEUE pad ");
// Link it actually
//element = (element == decodera ? queueA : queueV);
targetsink = gst_element_get_pad ( queueV, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The decoder pad src was linked\n");
else
g_print("The stream:%s pad src was
linked\n",gst_pad_get_name(pad));
gst_object_unref (targetsink);
gst_caps_unref (caps);
}
static void on_pad_added (GstElement *element, GstPad *pad)
{
g_debug ("Signal: pad-added");
GstCaps *caps;
GstStructure *str;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);
const gchar *c = gst_structure_get_name(str);
g_print("on_pad_added: %s\n",c);
if (g_strrstr (c, "video") || g_strrstr (c, "image")) {
g_debug ("Linking video pad to decoderv");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decoderv, "sink");
g_assert (targetsink != NULL);
//g_print("\nVIDEO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nVIDEO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The video pad src was linked\n");
gst_object_unref (targetsink);
}
if (g_strrstr (c, "audio")) {
g_debug ("Linking audio pad to decodera");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decodera, "sink");
g_assert (targetsink != NULL);
//g_print("\nAUDIO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nAUDIO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The audio pad src was linked\n");
gst_object_unref (targetsink);
}
gst_caps_unref (caps);
}
--
http://www.nabble.com/play-audio-and-video-simultaneously-tp23175849p23197413.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
------------------------------------------------------------------------------
Stay on top of everything new and different, both inside and
around Java (TM) technology - register by April 22, and save
$200 on the JavaOne (SM) conference, June 2-5, 2009, San Francisco.
300 plus technical and hands-on sessions. Register today.
Use priority code J9JMT32. http://p.sf.net/sfu/p
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
------------------------------------------------------------------------
Σύρετε φωτογραφίες στο παράθυρο του Messenger. Δείτε πώς. Κάντε κλικ
εδώ! <http://download.live.com/messenger%20>
------------------------------------------------------------------------
------------------------------------------------------------------------------
Crystal Reports &#45; New Free Runtime and 30 Day Trial
Check out the new simplified licensign option that enables unlimited
royalty&#45;free distribution of the report engine for externally facing
server and web deployment.
http://p.sf.net/sfu/businessobjects
------------------------------------------------------------------------
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Guilherme
2009-04-26 13:06:27 UTC
Permalink
Also trying:


gst-launch alsasrc ! audioconvert ! level ! audioconvert ! flacenc !
filesink location=my_file.mp3

does create the file but no snd is recorded.

;o(

Tks!

-------------------

Guilherme Longo
Dept. Eng. da Computac,a~o
Unaerp

Linux User - #484927

*Before Asking
http://www.istf.com.br/?page=perguntas

!- I'd rather die on my feet than live on my knees -!
Post by Guilherme
1 - Read from a mic
2 - Save in a Buffer
3 - Playback recorded sound
gst-launch -v alsasrc ! queue ! alsasink
It works pretty well wih audiotestsrc ! alsasink
Could I get some help from you?
Tks!
-------------------
Guilherme Longo
Dept. Eng. da Computac,a~o
Unaerp
Linux User - #484927
*Before Asking
http://www.istf.com.br/?page=perguntas
!- I'd rather die on my feet than live on my knees -!
Post by sledge hammer
The queues should be between the demuxer and the decoder. I see that
you put them between the decoder and the audioconvert(for the audio
stream). The pipeline linkage should look like
src->demuxer->queuea->decodera->convert->resample->sinka
src->demuxer->queuev->decoderv->sinkv
Date: Thu, 23 Apr 2009 08:31:41 -0700
Subject: Re: [gst-devel] play audio and video simultaneously
The "mad" element doesn't have dynamic pads, so it doesn't emit a
"pad-added"
signal. You can connect it directly to "audioconvert".
Also I see that you declare "queueV" as a "GstElement**" and not as a
"GstElement*".
Hi Sledge, thanks for your reply!
I have already made the corresponding changes and still not
activated the
signal that link the video decoder with the respective queue. when
i execute
** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: audio/mpeg
** (testvideop:6043): DEBUG: Linking audio pad to decodera
The audio pad src was linked
** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: video/mpeg
** (testvideop:6043): DEBUG: Linking video pad to decoderv
The video pad src was linked
There are no more errors, but they are not playing the video and audio
streams simultaneously
Thanks in advance for the help,
Best Regards,
Alberto.
#include<gst/gst.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data);
static void on_pad_added (GstElement *element, GstPad *pad);
static void on_decpad_added(GstElement *element, GstPad *pad );
GstElement *pipeline, *src, *demux, *decoderv, *sinkv, *decodera,
*convert,
*resample, *sinka;
GstElement *queueA, *queueV;
int main(int argc, char *argv[])
{
GstStateChangeReturn ret;
GMainLoop *loop;
GstBus *bus;
/*initialization*/
gst_init(&argc,&argv);
loop = g_main_loop_new(NULL, FALSE);
if(argc != 2)
{
g_print("Usage: %s <mpg/mpeg video file>", argv[0]);
return -1;
}
pipeline = gst_pipeline_new("VIDEO PLAYER");
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, bus_call, loop);
src = gst_element_factory_make("filesrc", "filesource");
demux = gst_element_factory_make("dvddemux", "mpg-demux");
/*Gstreamer video elements*/
decoderv = gst_element_factory_make("mpeg2dec", "mpeg-decoder");
sinkv = gst_element_factory_make("xvimagesink", "video-out");
if(!decoderv || !sinkv)
{
g_print("\nthe video could not playback\n");
return -1;
}
/*Gstreamer audio elements*/
decodera = gst_element_factory_make("mad", "decoder-audio");
convert = gst_element_factory_make("audioconvert", "a-convert");
resample = gst_element_factory_make("audioresample", "a-resample");
sinka = gst_element_factory_make("osssink", "play audio");
if(!decodera || !convert || !resample || !sinka)
{
g_print("\nthe audio could not playback\n");
return -1;
}
queueA = gst_element_factory_make("queue", "queue-audio");
queueV = gst_element_factory_make("queue", "queue-video");
g_object_set (G_OBJECT (src), "location", argv[1], NULL);
gst_bin_add_many(GST_BIN(pipeline), src, demux, decoderv,
queueV,sinkv,decodera,convert, resample, sinka, NULL);
gst_element_link (src, demux);
gst_element_link (queueV, sinkv);
// gst_element_link (queueA, convert);
gst_element_link (decodera, convert);
gst_element_link (convert, resample);
gst_element_link (resample, sinka);
g_signal_connect (demux, "pad-added",G_CALLBACK (on_pad_added), NULL);
//g_signal_connect (decodera,
"pad-added",G_CALLBACK(on_decpad_added),decodera);
g_signal_connect (decoderv,
"pad-added",G_CALLBACK(on_decpad_added),decoderv);
/* run */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
GstMessage *msg;
g_print ("Failed to start up pipeline!\n");
/* check if there is an error message with details on the bus */
msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
if (msg) {
GError *err = NULL;
g_print("\nDISPLAY ERROR:\n");
gst_message_parse_error (msg, &err, NULL);
g_print ("ERROR: %s\n", err->message);
g_error_free (err);
gst_message_unref (msg);
}
return -1;
}
g_main_loop_run (loop);
/* clean up */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data)
{
GMainLoop *loop = data;
switch (GST_MESSAGE_TYPE (msg)) {
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug = NULL;
GError *err = NULL;
gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
if (debug) {
g_print ("Debug details: %s\n", debug);
g_free (debug);
}
g_main_loop_quit (loop);
break;
}
break;
}
return TRUE;
}
static void on_decpad_added(GstElement *element, GstPad *pad )
{
g_debug ("Signal: decoder pad-added");
GstCaps *caps;
GstStructure *str;
GstPad *targetsink;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);
g_debug ("Linking decoder to QUEUE pad ");
// Link it actually
//element = (element == decodera ? queueA : queueV);
targetsink = gst_element_get_pad ( queueV, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The decoder pad src was linked\n");
else
g_print("The stream:%s pad src was
linked\n",gst_pad_get_name(pad));
gst_object_unref (targetsink);
gst_caps_unref (caps);
}
static void on_pad_added (GstElement *element, GstPad *pad)
{
g_debug ("Signal: pad-added");
GstCaps *caps;
GstStructure *str;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);
const gchar *c = gst_structure_get_name(str);
g_print("on_pad_added: %s\n",c);
if (g_strrstr (c, "video") || g_strrstr (c, "image")) {
g_debug ("Linking video pad to decoderv");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decoderv, "sink");
g_assert (targetsink != NULL);
//g_print("\nVIDEO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nVIDEO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The video pad src was linked\n");
gst_object_unref (targetsink);
}
if (g_strrstr (c, "audio")) {
g_debug ("Linking audio pad to decodera");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decodera, "sink");
g_assert (targetsink != NULL);
//g_print("\nAUDIO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nAUDIO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The audio pad src was linked\n");
gst_object_unref (targetsink);
}
gst_caps_unref (caps);
}
--
http://www.nabble.com/play-audio-and-video-simultaneously-tp23175849p23197413.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
------------------------------------------------------------------------------
Stay on top of everything new and different, both inside and
around Java (TM) technology - register by April 22, and save
$200 on the JavaOne (SM) conference, June 2-5, 2009, San Francisco.
300 plus technical and hands-on sessions. Register today.
Use priority code J9JMT32. http://p.sf.net/sfu/p
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
------------------------------------------------------------------------
Σύρετε φωτογραφίες στο παράθυρο του Messenger. Δείτε πώς. Κάντε κλικ
εδώ! <http://download.live.com/messenger%20>
------------------------------------------------------------------------
------------------------------------------------------------------------------
Crystal Reports &#45; New Free Runtime and 30 Day Trial
Check out the new simplified licensign option that enables unlimited
royalty&#45;free distribution of the report engine for externally
facing server and web deployment.
http://p.sf.net/sfu/businessobjects
------------------------------------------------------------------------
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Stefan Kost
2009-04-26 14:25:22 UTC
Permalink
Post by Guilherme
gst-launch alsasrc ! audioconvert ! level ! audioconvert ! flacenc !
filesink location=my_file.mp3
does create the file but no snd is recorded.
;o(
Tks!
-------------------
Guilherme Longo
Dept. Eng. da Computac,a~o
Unaerp
Please check with gnome mixer if you have selected the right source for
reording. Also you gst-launch -m to see the level-meter messages from level
element. ALso flacenc ! filesink location=my_file.mp3 is wrong. Use lame if you
want to encode into an mp3, use .flac as an extension, if you really want flac.

Finaly the level element has a demo app under
gst-plugins-good/tests/examples/level/
which you might want to checkout.

Stefan
Guilherme
2009-04-26 15:15:53 UTC
Permalink
Stefan...

I did figure out this early this morning but the problem persist.

1 - Iam using the right source "alsasrc"!

That's what is happening:

gst-launch -m alsasrc ! audioconvert ! level ! audioconvert ! wavenc !
filesink location=/home/guilherme/Desktop/guilherme.wav


[..]
Setting pipeline to PLAYING ...
Got Message from element "pipeline0" (new-clock): GstMessageNewClock,
clock=(GstClock)"\(GstAudioClock\)\ GstAudioSrcClock";
New clock: GstAudioSrcClock
Got Message from element "wavenc0" (state-changed): GstMessageState,
old-state=(GstState)GST_STATE_PAUSED,
new-state=(GstState)GST_STATE_PLAYING,
pending-state=(GstState)GST_STATE_VOID_PENDING;
Got Message from element "audioconvert1" (state-changed):
GstMessageState, old-state=(GstState)GST_STATE_PAUSED,
new-state=(GstState)GST_STATE_PLAYING,
pending-state=(GstState)GST_STATE_VOID_PENDING;
Got Message from element "level0" (state-changed): GstMessageState,
old-state=(GstState)GST_STATE_PAUSED,
new-state=(GstState)GST_STATE_PLAYING,
pending-state=(GstState)GST_STATE_VOID_PENDING;
Got Message from element "audioconvert0" (state-changed):
GstMessageState, old-state=(GstState)GST_STATE_PAUSED,
new-state=(GstState)GST_STATE_PLAYING,
pending-state=(GstState)GST_STATE_VOID_PENDING;
Got Message from element "alsasrc0" (state-changed): GstMessageState,
old-state=(GstState)GST_STATE_PAUSED,
new-state=(GstState)GST_STATE_PLAYING,
pending-state=(GstState)GST_STATE_VOID_PENDING;
^CCaught interrupt -- handling interrupt.
Got Message from element "pipeline0" (application): GstLaunchInterrupt,
message=(string)"Pipeline\ interrupted";
Interrupt: Stopping pipeline ...
Execution ended after 17225878195 ns.
Setting pipeline to PAUSED ...
Setting pipeline to READY ...
^C
***@lap:~$

And then, when I open guilherme.wav , gxine says that the file is empty

Tks!

-------------------

Guilherme Longo
Dept. Eng. da Computação
Unaerp

Linux User - #484927

*Before Asking
http://www.istf.com.br/?page=perguntas

!- I'd rather die on my feet than live on my knees -!
Post by Stefan Kost
Post by Guilherme
gst-launch alsasrc ! audioconvert ! level ! audioconvert ! flacenc !
filesink location=my_file.mp3
does create the file but no snd is recorded.
;o(
Tks!
-------------------
Guilherme Longo
Dept. Eng. da Computac,a~o
Unaerp
Please check with gnome mixer if you have selected the right source for
reording. Also you gst-launch -m to see the level-meter messages from level
element. ALso flacenc ! filesink location=my_file.mp3 is wrong. Use lame if you
want to encode into an mp3, use .flac as an extension, if you really want flac.
Finaly the level element has a demo app under
gst-plugins-good/tests/examples/level/
which you might want to checkout.
Stefan
------------------------------------------------------------------------------
Crystal Reports &#45; New Free Runtime and 30 Day Trial
Check out the new simplified licensign option that enables unlimited
royalty&#45;free distribution of the report engine for externally facing
server and web deployment.
http://p.sf.net/sfu/businessobjects
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Guilherme
2009-04-26 16:24:12 UTC
Permalink
Mates, just for reference

running

gst-launch audiotestsrc ! audioconvert ! wavenc ! filesink
location=/home/guilherme/Desktop/test.wav

creates a test.wav file of 18 minutes, no matter when I stop recording
and also it generates a horrible cracking sound...

;o/

Looking for help with waveenc I cant find much information as you can see:

http://gstreamer.freedesktop.org/data/doc/gstreamer/0.10.0/gst-plugins-good-plugins/html/gst-plugins-good-plugins-plugin-wavenc.html

;0/

Tks!

-------------------

Guilherme Longo
Dept. Eng. da Computação
Unaerp

Linux User - #484927

*Before Asking
http://www.istf.com.br/?page=perguntas

!- I'd rather die on my feet than live on my knees -!
Post by Guilherme
Stefan...
I did figure out this early this morning but the problem persist.
1 - Iam using the right source "alsasrc"!
gst-launch -m alsasrc ! audioconvert ! level ! audioconvert ! wavenc !
filesink location=/home/guilherme/Desktop/guilherme.wav
[..]
Setting pipeline to PLAYING ...
Got Message from element "pipeline0" (new-clock): GstMessageNewClock,
clock=(GstClock)"\(GstAudioClock\)\ GstAudioSrcClock";
New clock: GstAudioSrcClock
Got Message from element "wavenc0" (state-changed): GstMessageState,
old-state=(GstState)GST_STATE_PAUSED,
new-state=(GstState)GST_STATE_PLAYING,
pending-state=(GstState)GST_STATE_VOID_PENDING;
GstMessageState, old-state=(GstState)GST_STATE_PAUSED,
new-state=(GstState)GST_STATE_PLAYING,
pending-state=(GstState)GST_STATE_VOID_PENDING;
Got Message from element "level0" (state-changed): GstMessageState,
old-state=(GstState)GST_STATE_PAUSED,
new-state=(GstState)GST_STATE_PLAYING,
pending-state=(GstState)GST_STATE_VOID_PENDING;
GstMessageState, old-state=(GstState)GST_STATE_PAUSED,
new-state=(GstState)GST_STATE_PLAYING,
pending-state=(GstState)GST_STATE_VOID_PENDING;
Got Message from element "alsasrc0" (state-changed): GstMessageState,
old-state=(GstState)GST_STATE_PAUSED,
new-state=(GstState)GST_STATE_PLAYING,
pending-state=(GstState)GST_STATE_VOID_PENDING;
^CCaught interrupt -- handling interrupt.
GstLaunchInterrupt, message=(string)"Pipeline\ interrupted";
Interrupt: Stopping pipeline ...
Execution ended after 17225878195 ns.
Setting pipeline to PAUSED ...
Setting pipeline to READY ...
^C
And then, when I open guilherme.wav , gxine says that the file is empty
Tks!
-------------------
Guilherme Longo
Dept. Eng. da Computação
Unaerp
Linux User - #484927
*Before Asking
http://www.istf.com.br/?page=perguntas
!- I'd rather die on my feet than live on my knees -!
Post by Stefan Kost
Post by Guilherme
gst-launch alsasrc ! audioconvert ! level ! audioconvert ! flacenc !
filesink location=my_file.mp3
does create the file but no snd is recorded.
;o(
Tks!
-------------------
Guilherme Longo
Dept. Eng. da Computac,a~o
Unaerp
Please check with gnome mixer if you have selected the right source for
reording. Also you gst-launch -m to see the level-meter messages from level
element. ALso flacenc ! filesink location=my_file.mp3 is wrong. Use lame if you
want to encode into an mp3, use .flac as an extension, if you really want flac.
Finaly the level element has a demo app under
gst-plugins-good/tests/examples/level/
which you might want to checkout.
Stefan
------------------------------------------------------------------------------
Crystal Reports &#45; New Free Runtime and 30 Day Trial
Check out the new simplified licensign option that enables unlimited
royalty&#45;free distribution of the report engine for externally
facing server and web deployment.
http://p.sf.net/sfu/businessobjects
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
David Schleef
2009-04-27 02:21:19 UTC
Permalink
Post by Guilherme
Mates, just for reference
running
gst-launch audiotestsrc ! audioconvert ! wavenc ! filesink
location=/home/guilherme/Desktop/test.wav
creates a test.wav file of 18 minutes, no matter when I stop recording
and also it generates a horrible cracking sound...
Of course. Because gst-launch doesn't shut down live pipelines
correctly when you use ctrl-C. Use the -e flag (recently added
in git, also in the prereleases), or write a real application.



dave...
Guilherme
2009-04-27 09:22:04 UTC
Permalink
Oh.... tks man!
I was really stuck with that.

Tks!

-------------------

Guilherme Longo
Dept. Eng. da Computação
Unaerp

Linux User - #484927

*Before Asking
http://www.istf.com.br/?page=perguntas

!- I'd rather die on my feet than live on my knees -!
Post by David Schleef
Post by Guilherme
Mates, just for reference
running
gst-launch audiotestsrc ! audioconvert ! wavenc ! filesink
location=/home/guilherme/Desktop/test.wav
creates a test.wav file of 18 minutes, no matter when I stop recording
and also it generates a horrible cracking sound...
Of course. Because gst-launch doesn't shut down live pipelines
correctly when you use ctrl-C. Use the -e flag (recently added
in git, also in the prereleases), or write a real application.
dave...
------------------------------------------------------------------------------
Crystal Reports &#45; New Free Runtime and 30 Day Trial
Check out the new simplified licensign option that enables unlimited
royalty&#45;free distribution of the report engine for externally facing
server and web deployment.
http://p.sf.net/sfu/businessobjects
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Guilherme
2009-04-27 10:03:24 UTC
Permalink
David, just for reference, -e flag isn't working here. Dunno why!
Anyways my app is almost done.

Still I have just one doubt...

Where I can find a simple app (audio recorder) for download!

I just need to know how exacly I'm gonna create my output file, so I
wanna check it out!

Tks!

-------------------

Guilherme Longo
Dept. Eng. da Computação
Unaerp

Linux User - #484927

*Before Asking
http://www.istf.com.br/?page=perguntas

!- I'd rather die on my feet than live on my knees -!
Post by David Schleef
Post by Guilherme
Mates, just for reference
running
gst-launch audiotestsrc ! audioconvert ! wavenc ! filesink
location=/home/guilherme/Desktop/test.wav
creates a test.wav file of 18 minutes, no matter when I stop recording
and also it generates a horrible cracking sound...
Of course. Because gst-launch doesn't shut down live pipelines
correctly when you use ctrl-C. Use the -e flag (recently added
in git, also in the prereleases), or write a real application.
dave...
------------------------------------------------------------------------------
Crystal Reports &#45; New Free Runtime and 30 Day Trial
Check out the new simplified licensign option that enables unlimited
royalty&#45;free distribution of the report engine for externally facing
server and web deployment.
http://p.sf.net/sfu/businessobjects
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
alavariega
2009-04-24 14:22:53 UTC
Permalink
Hi how are you?

Thanks a lot for your help, the problem was solved with your comments below.

Have a nice day and again thanks for your help and time for reply to my
mails

Best Regards,

Alberto
Post by sledge hammer
The queues should be between the demuxer and the decoder. I see that you
put them between the decoder and the audioconvert(for the audio stream).
The pipeline linkage should look like
src->demuxer->queuea->decodera->convert->resample->sinka
src->demuxer->queuev->decoderv->sinkv
Date: Thu, 23 Apr 2009 08:31:41 -0700
Subject: Re: [gst-devel] play audio and video simultaneously
The "mad" element doesn't have dynamic pads, so it doesn't emit a "pad-added"
signal. You can connect it directly to "audioconvert".
Also I see that you declare "queueV" as a "GstElement**" and not as a
"GstElement*".
Hi Sledge, thanks for your reply!
I have already made the corresponding changes and still not activated the
signal that link the video decoder with the respective queue. when i execute
** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: audio/mpeg
** (testvideop:6043): DEBUG: Linking audio pad to decodera
The audio pad src was linked
** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: video/mpeg
** (testvideop:6043): DEBUG: Linking video pad to decoderv
The video pad src was linked
There are no more errors, but they are not playing the video and audio
streams simultaneously
Thanks in advance for the help,
Best Regards,
Alberto.
#include<gst/gst.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data);
static void on_pad_added (GstElement *element, GstPad *pad);
static void on_decpad_added(GstElement *element, GstPad *pad );
GstElement *pipeline, *src, *demux, *decoderv, *sinkv, *decodera, *convert,
*resample, *sinka;
GstElement *queueA, *queueV;
int main(int argc, char *argv[])
{
GstStateChangeReturn ret;
GMainLoop *loop;
GstBus *bus;
/*initialization*/
gst_init(&argc,&argv);
loop = g_main_loop_new(NULL, FALSE);
if(argc != 2)
{
g_print("Usage: %s <mpg/mpeg video file>", argv[0]);
return -1;
}
pipeline = gst_pipeline_new("VIDEO PLAYER");
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, bus_call, loop);
src = gst_element_factory_make("filesrc", "filesource");
demux = gst_element_factory_make("dvddemux", "mpg-demux");
/*Gstreamer video elements*/
decoderv = gst_element_factory_make("mpeg2dec", "mpeg-decoder");
sinkv = gst_element_factory_make("xvimagesink", "video-out");
if(!decoderv || !sinkv)
{
g_print("\nthe video could not playback\n");
return -1;
}
/*Gstreamer audio elements*/
decodera = gst_element_factory_make("mad", "decoder-audio");
convert = gst_element_factory_make("audioconvert", "a-convert");
resample = gst_element_factory_make("audioresample", "a-resample");
sinka = gst_element_factory_make("osssink", "play audio");
if(!decodera || !convert || !resample || !sinka)
{
g_print("\nthe audio could not playback\n");
return -1;
}
queueA = gst_element_factory_make("queue", "queue-audio");
queueV = gst_element_factory_make("queue", "queue-video");
g_object_set (G_OBJECT (src), "location", argv[1], NULL);
gst_bin_add_many(GST_BIN(pipeline), src, demux, decoderv,
queueV,sinkv,decodera,convert, resample, sinka, NULL);
gst_element_link (src, demux);
gst_element_link (queueV, sinkv);
// gst_element_link (queueA, convert);
gst_element_link (decodera, convert);
gst_element_link (convert, resample);
gst_element_link (resample, sinka);
g_signal_connect (demux, "pad-added",G_CALLBACK (on_pad_added), NULL);
//g_signal_connect (decodera,
"pad-added",G_CALLBACK(on_decpad_added),decodera);
g_signal_connect (decoderv,
"pad-added",G_CALLBACK(on_decpad_added),decoderv);
/* run */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
GstMessage *msg;
g_print ("Failed to start up pipeline!\n");
/* check if there is an error message with details on the bus */
msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
if (msg) {
GError *err = NULL;
g_print("\nDISPLAY ERROR:\n");
gst_message_parse_error (msg, &err, NULL);
g_print ("ERROR: %s\n", err->message);
g_error_free (err);
gst_message_unref (msg);
}
return -1;
}
g_main_loop_run (loop);
/* clean up */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data)
{
GMainLoop *loop = data;
switch (GST_MESSAGE_TYPE (msg)) {
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug = NULL;
GError *err = NULL;
gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
if (debug) {
g_print ("Debug details: %s\n", debug);
g_free (debug);
}
g_main_loop_quit (loop);
break;
}
break;
}
return TRUE;
}
static void on_decpad_added(GstElement *element, GstPad *pad )
{
g_debug ("Signal: decoder pad-added");
GstCaps *caps;
GstStructure *str;
GstPad *targetsink;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);
g_debug ("Linking decoder to QUEUE pad ");
// Link it actually
//element = (element == decodera ? queueA : queueV);
targetsink = gst_element_get_pad ( queueV, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The decoder pad src was linked\n");
else
g_print("The stream:%s pad src was
linked\n",gst_pad_get_name(pad));
gst_object_unref (targetsink);
gst_caps_unref (caps);
}
static void on_pad_added (GstElement *element, GstPad *pad)
{
g_debug ("Signal: pad-added");
GstCaps *caps;
GstStructure *str;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);
const gchar *c = gst_structure_get_name(str);
g_print("on_pad_added: %s\n",c);
if (g_strrstr (c, "video") || g_strrstr (c, "image")) {
g_debug ("Linking video pad to decoderv");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decoderv, "sink");
g_assert (targetsink != NULL);
//g_print("\nVIDEO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nVIDEO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The video pad src was linked\n");
gst_object_unref (targetsink);
}
if (g_strrstr (c, "audio")) {
g_debug ("Linking audio pad to decodera");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decodera, "sink");
g_assert (targetsink != NULL);
//g_print("\nAUDIO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nAUDIO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The audio pad src was linked\n");
gst_object_unref (targetsink);
}
gst_caps_unref (caps);
}
--
http://www.nabble.com/play-audio-and-video-simultaneously-tp23175849p23197413.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
------------------------------------------------------------------------------
Stay on top of everything new and different, both inside and
around Java (TM) technology - register by April 22, and save
$200 on the JavaOne (SM) conference, June 2-5, 2009, San Francisco.
300 plus technical and hands-on sessions. Register today.
Use priority code J9JMT32. http://p.sf.net/sfu/p
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
_________________________________________________________________
Το What's New σας ειδοποιεί άμεσα για κάθε ενημέρωση. Μάθετε πώς.
http://home.live.com/
------------------------------------------------------------------------------
Crystal Reports &#45; New Free Runtime and 30 Day Trial
Check out the new simplified licensign option that enables unlimited
royalty&#45;free distribution of the report engine for externally facing
server and web deployment.
http://p.sf.net/sfu/businessobjects
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
--
View this message in context: http://www.nabble.com/play-audio-and-video-simultaneously-tp23175849p23217475.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
sledge hammer
2009-04-27 17:25:12 UTC
Permalink
I am fine. No problem. You may find a better explanation of the problem in this wiki entry: http://gstreamer.freedesktop.org/wiki/FAQ#head-9f14f8ce765b3d24c91660db5f02e3345da98a35
Date: Fri, 24 Apr 2009 07:22:53 -0700
Subject: Re: [gst-devel] play audio and video simultaneously
Hi how are you?
Thanks a lot for your help, the problem was solved with your comments below.
Have a nice day and again thanks for your help and time for reply to my
mails
Best Regards,
Alberto
Post by sledge hammer
The queues should be between the demuxer and the decoder. I see that you
put them between the decoder and the audioconvert(for the audio stream).
The pipeline linkage should look like
src->demuxer->queuea->decodera->convert->resample->sinka
src->demuxer->queuev->decoderv->sinkv
Date: Thu, 23 Apr 2009 08:31:41 -0700
Subject: Re: [gst-devel] play audio and video simultaneously
The "mad" element doesn't have dynamic pads, so it doesn't emit a "pad-added"
signal. You can connect it directly to "audioconvert".
Also I see that you declare "queueV" as a "GstElement**" and not as a
"GstElement*".
Hi Sledge, thanks for your reply!
I have already made the corresponding changes and still not activated the
signal that link the video decoder with the respective queue. when i execute
** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: audio/mpeg
** (testvideop:6043): DEBUG: Linking audio pad to decodera
The audio pad src was linked
** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: video/mpeg
** (testvideop:6043): DEBUG: Linking video pad to decoderv
The video pad src was linked
There are no more errors, but they are not playing the video and audio
streams simultaneously
Thanks in advance for the help,
Best Regards,
Alberto.
#include<gst/gst.h>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data);
static void on_pad_added (GstElement *element, GstPad *pad);
static void on_decpad_added(GstElement *element, GstPad *pad );
GstElement *pipeline, *src, *demux, *decoderv, *sinkv, *decodera, *convert,
*resample, *sinka;
GstElement *queueA, *queueV;
int main(int argc, char *argv[])
{
GstStateChangeReturn ret;
GMainLoop *loop;
GstBus *bus;
/*initialization*/
gst_init(&argc,&argv);
loop = g_main_loop_new(NULL, FALSE);
if(argc != 2)
{
g_print("Usage: %s <mpg/mpeg video file>", argv[0]);
return -1;
}
pipeline = gst_pipeline_new("VIDEO PLAYER");
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, bus_call, loop);
src = gst_element_factory_make("filesrc", "filesource");
demux = gst_element_factory_make("dvddemux", "mpg-demux");
/*Gstreamer video elements*/
decoderv = gst_element_factory_make("mpeg2dec", "mpeg-decoder");
sinkv = gst_element_factory_make("xvimagesink", "video-out");
if(!decoderv || !sinkv)
{
g_print("\nthe video could not playback\n");
return -1;
}
/*Gstreamer audio elements*/
decodera = gst_element_factory_make("mad", "decoder-audio");
convert = gst_element_factory_make("audioconvert", "a-convert");
resample = gst_element_factory_make("audioresample", "a-resample");
sinka = gst_element_factory_make("osssink", "play audio");
if(!decodera || !convert || !resample || !sinka)
{
g_print("\nthe audio could not playback\n");
return -1;
}
queueA = gst_element_factory_make("queue", "queue-audio");
queueV = gst_element_factory_make("queue", "queue-video");
g_object_set (G_OBJECT (src), "location", argv[1], NULL);
gst_bin_add_many(GST_BIN(pipeline), src, demux, decoderv,
queueV,sinkv,decodera,convert, resample, sinka, NULL);
gst_element_link (src, demux);
gst_element_link (queueV, sinkv);
// gst_element_link (queueA, convert);
gst_element_link (decodera, convert);
gst_element_link (convert, resample);
gst_element_link (resample, sinka);
g_signal_connect (demux, "pad-added",G_CALLBACK (on_pad_added), NULL);
//g_signal_connect (decodera,
"pad-added",G_CALLBACK(on_decpad_added),decodera);
g_signal_connect (decoderv,
"pad-added",G_CALLBACK(on_decpad_added),decoderv);
/* run */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
GstMessage *msg;
g_print ("Failed to start up pipeline!\n");
/* check if there is an error message with details on the bus */
msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
if (msg) {
GError *err = NULL;
g_print("\nDISPLAY ERROR:\n");
gst_message_parse_error (msg, &err, NULL);
g_print ("ERROR: %s\n", err->message);
g_error_free (err);
gst_message_unref (msg);
}
return -1;
}
g_main_loop_run (loop);
/* clean up */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data)
{
GMainLoop *loop = data;
switch (GST_MESSAGE_TYPE (msg)) {
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug = NULL;
GError *err = NULL;
gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
if (debug) {
g_print ("Debug details: %s\n", debug);
g_free (debug);
}
g_main_loop_quit (loop);
break;
}
break;
}
return TRUE;
}
static void on_decpad_added(GstElement *element, GstPad *pad )
{
g_debug ("Signal: decoder pad-added");
GstCaps *caps;
GstStructure *str;
GstPad *targetsink;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);
g_debug ("Linking decoder to QUEUE pad ");
// Link it actually
//element = (element == decodera ? queueA : queueV);
targetsink = gst_element_get_pad ( queueV, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The decoder pad src was linked\n");
else
g_print("The stream:%s pad src was
linked\n",gst_pad_get_name(pad));
gst_object_unref (targetsink);
gst_caps_unref (caps);
}
static void on_pad_added (GstElement *element, GstPad *pad)
{
g_debug ("Signal: pad-added");
GstCaps *caps;
GstStructure *str;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);
const gchar *c = gst_structure_get_name(str);
g_print("on_pad_added: %s\n",c);
if (g_strrstr (c, "video") || g_strrstr (c, "image")) {
g_debug ("Linking video pad to decoderv");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decoderv, "sink");
g_assert (targetsink != NULL);
//g_print("\nVIDEO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nVIDEO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The video pad src was linked\n");
gst_object_unref (targetsink);
}
if (g_strrstr (c, "audio")) {
g_debug ("Linking audio pad to decodera");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decodera, "sink");
g_assert (targetsink != NULL);
//g_print("\nAUDIO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nAUDIO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The audio pad src was linked\n");
gst_object_unref (targetsink);
}
gst_caps_unref (caps);
}
--
http://www.nabble.com/play-audio-and-video-simultaneously-tp23175849p23197413.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
------------------------------------------------------------------------------
Stay on top of everything new and different, both inside and
around Java (TM) technology - register by April 22, and save
$200 on the JavaOne (SM) conference, June 2-5, 2009, San Francisco.
300 plus technical and hands-on sessions. Register today.
Use priority code J9JMT32. http://p.sf.net/sfu/p
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
_________________________________________________________________
Ôï What's New óáò åéäïðïéåß Üìåóá ãéá êÜèå åíçìÝñùóç. ÌÜèåôå ðþò.
http://home.live.com/
------------------------------------------------------------------------------
Crystal Reports &#45; New Free Runtime and 30 Day Trial
Check out the new simplified licensign option that enables unlimited
royalty&#45;free distribution of the report engine for externally facing
server and web deployment.
http://p.sf.net/sfu/businessobjects
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
--
View this message in context: http://www.nabble.com/play-audio-and-video-simultaneously-tp23175849p23217475.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
------------------------------------------------------------------------------
Crystal Reports &#45; New Free Runtime and 30 Day Trial
Check out the new simplified licensign option that enables unlimited
royalty&#45;free distribution of the report engine for externally facing
server and web deployment.
http://p.sf.net/sfu/businessobjects
_______________________________________________
gstreamer-devel mailing list
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
_________________________________________________________________
Óýñåôå öùôïãñáößåò óôï ðáñÜèõñï ôïõ Messenger. Äåßôå ðþò.
http://download.live.com/messenger

Andrey Boyko
2009-04-24 06:36:47 UTC
Permalink
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<meta content="text/html;charset=ISO-8859-1" http-equiv="Content-Type">
</head>
<body bgcolor="#ffffff" text="#000000">
mpeg2dec element also has no dynamic pads:<br>
gst-inspect mpeg2dec<br>
...<br>
Pad Templates:<br>
&nbsp; SRC template: 'src'<br>
&nbsp;&nbsp;&nbsp; Availability: <b><i>Always</i></b><br>
&nbsp;&nbsp;&nbsp; Capabilities:<br>
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; video/x-raw-yuv<br>
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; format: { YV12, I420, Y42B }<br>
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; width: [ 16, 4096 ]<br>
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; height: [ 16, 4096 ]<br>
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; framerate: [ 0/1, 2147483647/1 ]<br>
...<br>
Also it is make sense to have queues between demux and decoders for
both video and audio.<br>
<br>
alavariega wrote:
<blockquote cite="mid:***@talk.nabble.com" type="cite">
<pre wrap="">The "mad" element doesn't have dynamic pads, so it doesn't emit a "pad-added"
signal. You can connect it directly to "audioconvert".

Also I see that you declare "queueV" as a "GstElement**" and not as a
"GstElement*".


Hi Sledge, thanks for your reply!

I have already made the corresponding changes and still not activated the
signal that link the video decoder with the respective queue. when i execute
the application this is the result:

** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: audio/mpeg
** (testvideop:6043): DEBUG: Linking audio pad to decodera
The audio pad src was linked
** (testvideop:6043): DEBUG: Signal: pad-added
on_pad_added: video/mpeg
** (testvideop:6043): DEBUG: Linking video pad to decoderv
The video pad src was linked

There are no more errors, but they are not playing the video and audio
streams simultaneously

Thanks in advance for the help,

Best Regards,

Alberto.



#include&lt;gst/gst.h&gt;

static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data);
static void on_pad_added (GstElement *element, GstPad *pad);
static void on_decpad_added(GstElement *element, GstPad *pad );

GstElement *pipeline, *src, *demux, *decoderv, *sinkv, *decodera, *convert,
*resample, *sinka;
GstElement *queueA, *queueV;

int main(int argc, char *argv[])
{
GstStateChangeReturn ret;
GMainLoop *loop;
GstBus *bus;
/*initialization*/
gst_init(&amp;argc,&amp;argv);
loop = g_main_loop_new(NULL, FALSE);

if(argc != 2)
{
g_print("Usage: %s &lt;mpg/mpeg video file&gt;", argv[0]);
return -1;
}

pipeline = gst_pipeline_new("VIDEO PLAYER");
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, bus_call, loop);

src = gst_element_factory_make("filesrc", "filesource");
demux = gst_element_factory_make("dvddemux", "mpg-demux");

/*Gstreamer video elements*/
decoderv = gst_element_factory_make("mpeg2dec", "mpeg-decoder");
sinkv = gst_element_factory_make("xvimagesink", "video-out");
if(!decoderv || !sinkv)
{
g_print("\nthe video could not playback\n");
return -1;
}

/*Gstreamer audio elements*/

decodera = gst_element_factory_make("mad", "decoder-audio");
convert = gst_element_factory_make("audioconvert", "a-convert");
resample = gst_element_factory_make("audioresample", "a-resample");
sinka = gst_element_factory_make("osssink", "play audio");
if(!decodera || !convert || !resample || !sinka)
{
g_print("\nthe audio could not playback\n");
return -1;
}

queueA = gst_element_factory_make("queue", "queue-audio");
queueV = gst_element_factory_make("queue", "queue-video");

g_object_set (G_OBJECT (src), "location", argv[1], NULL);
gst_bin_add_many(GST_BIN(pipeline), src, demux, decoderv,
queueV,sinkv,decodera,convert, resample, sinka, NULL);

gst_element_link (src, demux);
gst_element_link (queueV, sinkv);
// gst_element_link (queueA, convert);
gst_element_link (decodera, convert);
gst_element_link (convert, resample);
gst_element_link (resample, sinka);

g_signal_connect (demux, "pad-added",G_CALLBACK (on_pad_added), NULL);
//g_signal_connect (decodera,
"pad-added",G_CALLBACK(on_decpad_added),decodera);
g_signal_connect (decoderv,
"pad-added",G_CALLBACK(on_decpad_added),decoderv);

/* run */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);

if (ret == GST_STATE_CHANGE_FAILURE)
{
GstMessage *msg;

g_print ("Failed to start up pipeline!\n");

/* check if there is an error message with details on the bus */
msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
if (msg) {
GError *err = NULL;
g_print("\nDISPLAY ERROR:\n");
gst_message_parse_error (msg, &amp;err, NULL);
g_print ("ERROR: %s\n", err-&gt;message);
g_error_free (err);
gst_message_unref (msg);
}
return -1;
}

g_main_loop_run (loop);

/* clean up */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);

return 0;

}


static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
data)
{
GMainLoop *loop = data;

switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug = NULL;
GError *err = NULL;

gst_message_parse_error (msg, &amp;err, &amp;debug);

g_print ("Error: %s\n", err-&gt;message);
g_error_free (err);

if (debug) {
g_print ("Debug details: %s\n", debug);
g_free (debug);
}

g_main_loop_quit (loop);
break;
}
default:
break;
}

return TRUE;
}


static void on_decpad_added(GstElement *element, GstPad *pad )
{
g_debug ("Signal: decoder pad-added");
GstCaps *caps;
GstStructure *str;
GstPad *targetsink;
caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);

g_debug ("Linking decoder to QUEUE pad ");
// Link it actually
//element = (element == decodera ? queueA : queueV);
targetsink = gst_element_get_pad ( queueV, "sink");
g_assert (targetsink != NULL);
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The decoder pad src was linked\n");
else
g_print("The stream:%s pad src was
linked\n",gst_pad_get_name(pad));
gst_object_unref (targetsink);
gst_caps_unref (caps);
}

static void on_pad_added (GstElement *element, GstPad *pad)
{
g_debug ("Signal: pad-added");
GstCaps *caps;
GstStructure *str;

caps = gst_pad_get_caps (pad);
g_assert (caps != NULL);
str = gst_caps_get_structure (caps, 0);
g_assert (str != NULL);

const gchar *c = gst_structure_get_name(str);
g_print("on_pad_added: %s\n",c);
if (g_strrstr (c, "video") || g_strrstr (c, "image")) {
g_debug ("Linking video pad to decoderv");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decoderv, "sink");
g_assert (targetsink != NULL);
//g_print("\nVIDEO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nVIDEO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The video pad src was linked\n");
gst_object_unref (targetsink);
}

if (g_strrstr (c, "audio")) {
g_debug ("Linking audio pad to decodera");
// Link it actually
GstPad *targetsink = gst_element_get_pad (decodera, "sink");
g_assert (targetsink != NULL);
//g_print("\nAUDIO--pad source: %s\n",
gst_pad_get_name(pad));
//g_print("\nAUDIO--pad sink: %s\n",
gst_pad_get_name(targetsink));
gst_pad_link (pad, targetsink);
if (gst_pad_is_linked(pad))
g_print("The audio pad src was linked\n");
gst_object_unref (targetsink);
}

gst_caps_unref (caps);
}




</pre>
</blockquote>
<br>
</body>
</html>
Continue reading on narkive:
Loading...