-
amrwbdec — Adaptive Multi-Rate Wideband audio decoder
-
amrwbenc — Adaptive Multi-Rate Wideband audio encoder
-
amrwbparse — Adaptive Multi-Rate WideBand audio parser
-
audioparse — parses a byte stream into audio frames
-
dfbvideosink — A DirectFB based videosink
-
dvbsrc — Digital Video Broadcast Source
-
dvdspu — Parses the DVD Sub-Picture command stream and renders the SPU overlay onto the video as it passes through
-
festival — Synthesizes plain text into audio
-
gstrtpbin — handle media from one RTP bin
-
gstrtpclient — handle media from one RTP client
-
gstrtpjitterbuffer — buffer, reorder and remove duplicate RTP packets to
compensate for network oddities.
-
gstrtpptdemux — separate RTP payloads based on the payload type
-
gstrtpsession — an RTP session manager
-
gstrtpssrcdemux — separate RTP payloads based on the SSRC
-
input-selector — N-to-1 stream selectoring
-
ivorbisdec — a decoder that decodes Vorbis to raw audio
-
jackaudiosink — JACK audio sink
-
metadatademux — element that parse or demux metadata from image files
-
metadatamux — Write metadata (EXIF, IPTC and XMP) into a image stream
-
modplug — Module decoder based on modplug engine
-
nuvdemux — Demultiplex a mythtv .nuv file into audio and video
-
output-selector — 1-to-N stream selectoring
-
rganalysis — Perform the ReplayGain analysis
-
rglimiter — Apply signal compression to raw audio data
-
rgvolume — Apply ReplayGain volume adjustment
-
sdlaudiosink — Output to a sound card via SDLAUDIO
-
sdlvideosink — An SDL-based videosink
-
sdpdemux — Receive data over the network via SDP
-
speed — Set speed/pitch on audio/raw streams (resampler)
-
speexresample — Resamples audio
-
timidity — Midi Synthesizer Element
-
trm — Compute MusicBrainz TRM Id using libmusicbrainz
-
videoanalyse — Analyse and report about the video frame
-
videodetect — Detect a pattern in a video signal
-
videomark — Mark a pattern in a video signal
-
videoparse — parses a byte stream into video frames
-
wildmidi — Midi Synthesizer Element
#include <directfb.h>
#include <gst/gst.h>
static IDirectFB *dfb = NULL;
static IDirectFBSurface *primary = NULL;
static GMainLoop *loop;
#define DFBCHECK(x...) \
{ \
DFBResult err = x; \
\
if (err != DFB_OK) \
{ \
fprintf( stderr, "%s <%d>:\n\t", __FILE__, __LINE__ ); \
DirectFBErrorFatal( #x, err ); \
} \
}
static gboolean
get_me_out (gpointer data)
{
g_main_loop_quit (loop);
return FALSE;
}
int
main (int argc, char *argv[])
{
DFBSurfaceDescription dsc;
GstElement *pipeline, *src, *sink;
/* Init both GStreamer and DirectFB */
DFBCHECK (DirectFBInit (&argc, &argv));
gst_init (&argc, &argv);
/* Creates DirectFB main context and set it to fullscreen layout */
DFBCHECK (DirectFBCreate (&dfb));
DFBCHECK (dfb->SetCooperativeLevel (dfb, DFSCL_FULLSCREEN));
/* We want a double buffered primary surface */
dsc.flags = DSDESC_CAPS;
dsc.caps = DSCAPS_PRIMARY | DSCAPS_FLIPPING;
DFBCHECK (dfb->CreateSurface (dfb, &dsc, &primary));
/* Creating our pipeline : videotestsrc ! dfbvideosink */
pipeline = gst_pipeline_new (NULL);
g_assert (pipeline);
src = gst_element_factory_make ("videotestsrc", NULL);
g_assert (src);
sink = gst_element_factory_make ("dfbvideosink", NULL);
g_assert (sink);
/* That's the interesting part, giving the primary surface to dfbvideosink */
g_object_set (sink, "surface", primary, NULL);
/* Adding elements to the pipeline */
gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
g_assert (gst_element_link (src, sink));
/* Let's play ! */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* we need to run a GLib main loop to get out of here */
loop = g_main_loop_new (NULL, FALSE);
/* Get us out after 20 seconds */
g_timeout_add (20000, get_me_out, NULL);
g_main_loop_run (loop);
/* Release elements and stop playback */
gst_element_set_state (pipeline, GST_STATE_NULL);
/* Free the main loop */
g_main_loop_unref (loop);
/* Release DirectFB context and surface */
primary->Release (primary);
dfb->Release (dfb);
return 0;
}