Gstreamer
GStreamer is a low-level video system for Linux (like a libre version of QuickTime or DirectShow).
GStreamer can be tested / triggered / hacked with from the command line using the gst-launch and gst-inspect programs.
GStreamer can also be programmed via scripting languages. For instance, there's a nice connection library to use it from Python.
- http://wiki.oz9aec.net/index.php/Gstreamer_cheat_sheet
- http://www.twm-kd.com/linux/webcam-and-linux-gstreamer-tutorial/
Building a Webcam Recorder in Python (working notes)[edit]
Starting from the "Hello World" of webcam testing with the commandline: (NB: I am using an external webcam, not my built in, thus the device is /dev/video1 in my cast. To use the default/built-in webcam, use /dev/video0 or just skip the "device=/blah" part.)
gst-launch-0.10 v4l2src device=/dev/video1 ! xvimagesink
Now in Python, using parse_launch, the Python version of what the gst-launch command does, and the simplest way to get a pipeline going.
import gst, gobject
mainloop = gobject.MainLoop()
pipeline = gst.parse_launch('v4l2src device=/dev/video1 ! xvimagesink')
pipeline.set_state(gst.STATE_PLAYING)
try:
mainloop.run()
except: # an interruption from Ctrl-C
print "stopping"
pipeline.set_state(gst.STATE_NULL)
Instead of using parse_launch, a pipeline can also be manually created piece by piece in Python. Though it seems (initially) more complicated, there are advantages: Connecting "multi-pad" things like tees and muxes is maybe clearer, you have more precise control over how a pipeline is made, and you can keep references to specific parts of the pipeline to eventually send / receive messages from them.
import gst, gobject
mainloop = gobject.MainLoop()
# original pipeline: v4l2src device=/dev/video1 ! xvimagesink
pipeline = gst.Pipeline()
v4l2src = gst.element_factory_make("v4l2src")
v4l2src.set_property("device", "/dev/video1")
xvimagesink = gst.element_factory_make("xvimagesink")
pipeline.add(v4l2src, xvimagesink)
gst.element_link_many(v4l2src, xvimagesink)
pipeline.set_state(gst.STATE_PLAYING)
try:
mainloop.run()
except: # an interruption from Ctrl-C
print "stopping"
pipeline.set_state(gst.STATE_NULL)
This pipeline uses caps to set the frame size and rate:
gst-launch-0.10 v4l2src device=/dev/video1 ! 'video/x-raw-yuv,width=640,height=480,framerate=30/1' ! xvimagesink
In code:
import gst, gobject
mainloop = gobject.MainLoop()
# original pipeline:
# v4l2src device=/dev/video1 ! 'video/x-raw-yuv,width=640,height=480,framerate=30/1' ! xvimagesink
pipeline = gst.Pipeline()
v4l2src = gst.element_factory_make("v4l2src")
v4l2src.set_property("device", "/dev/video1")
caps = gst.Caps("video/x-raw-yuv,width=640,height=480,framerate=30/1")
capsfilter = gst.element_factory_make("capsfilter", "filter")
capsfilter.set_property("caps", caps)
xvimagesink = gst.element_factory_make("xvimagesink")
pipeline.add(v4l2src, capsfilter, xvimagesink)
gst.element_link_many(v4l2src, capsfilter, xvimagesink)
pipeline.set_state(gst.STATE_PLAYING)
try:
mainloop.run()
except: # an interruption from Ctrl-C
print "stopping"
pipeline.set_state(gst.STATE_NULL)
Now a more sophisticated pipeline that actually records the webcam, together with audio, in a "raw" avi format (big file, but less stressing for the computer for better quality).
gst-launch-0.10 -e v4l2src device=/dev/video1 ! 'video/x-raw-yuv,width=640,height=480,framerate=30/1'\ ! tee name=t_vid ! queue ! xvimagesink sync=false\ t_vid. ! queue ! videorate ! video/x-raw-yuv,framerate=30/1 ! queue ! mux.\ pulsesrc ! audio/x-raw-int,rate=48000,channels=1,depth=16 ! queue ! audioconvert ! queue ! mux.\ avimux name=mux ! filesink location=capture.avi
Translated to code:
import gst, gobject
mainloop = gobject.MainLoop()
# original pipeline:
"""
gst-launch-0.10 -e v4l2src device=/dev/video1 ! 'video/x-raw-yuv,width=640,height=480,framerate=30/1'\
! tee name=t_vid ! queue ! xvimagesink sync=false\
t_vid. ! queue ! videorate ! video/x-raw-yuv,framerate=30/1 ! queue ! mux.\
pulsesrc ! audio/x-raw-int,rate=48000,channels=1,depth=16 ! queue ! audioconvert ! queue ! mux.\
avimux name=mux ! filesink location=capture.avi
"""
def make (factory, pipeline=None, name=None):
elt = gst.element_factory_make(factory, name)
if pipeline: pipeline.add(elt)
return elt
link = gst.element_link_many
p = gst.Pipeline()
v4l2src = make("v4l2src", p)
v4l2src.set_property("device", "/dev/video1")
caps = gst.Caps("video/x-raw-yuv,width=640,height=480,framerate=30/1")
capsfilter = make("capsfilter", p)
capsfilter.set_property("caps", caps)
capsfilter2 = make("capsfilter", p)
capsfilter2.set_property("caps", caps)
tee = make("tee", p)
xvimagesink = make("xvimagesink", p)
xvimagesink.set_property("sync", "false")
videorate = make("videorate", p)
pulsesrc = make("pulsesrc", p)
audiocaps = gst.Caps("audio/x-raw-int,rate=48000,channels=1,depth=16")
audiocapsfilter = make("capsfilter", p)
audiocapsfilter.set_property("caps", audiocaps)
audioconvert = make("audioconvert", p)
mux = make("avimux", p)
filesink = make("filesink", p)
filesink.set_property("location", "capture.avi")
q = []
for i in range(5):
q.append(make("queue", p))
link(v4l2src, capsfilter, tee, q[0], xvimagesink)
link(tee, q[1], videorate, capsfilter2, q[2], mux)
link(pulsesrc, audiocapsfilter, q[3], audioconvert, q[4], mux)
link(mux, filesink)
p.set_state(gst.STATE_PLAYING)
try:
mainloop.run()
except: # an interruption from Ctrl-C
print "stopping"
p.set_state(gst.STATE_NULL)
Example of using a thread to do other things simultaneously:
import gst, gobject
mainloop = gobject.MainLoop()
# original pipeline:
"""
gst-launch-0.10 -e v4l2src device=/dev/video1 ! 'video/x-raw-yuv,width=640,height=480,framerate=30/1'\
! tee name=t_vid ! queue ! xvimagesink sync=false\
t_vid. ! queue ! videorate ! video/x-raw-yuv,framerate=30/1 ! queue ! mux.\
pulsesrc ! audio/x-raw-int,rate=48000,channels=1,depth=16 ! queue ! audioconvert ! queue ! mux.\
avimux name=mux ! filesink location=capture.avi
"""
def make (factory, pipeline=None, name=None):
elt = gst.element_factory_make(factory, name)
if pipeline: pipeline.add(elt)
return elt
link = gst.element_link_many
p = gst.Pipeline()
v4l2src = make("v4l2src", p)
v4l2src.set_property("device", "/dev/video1")
videocaps = gst.Caps("video/x-raw-yuv,width=640,height=480,framerate=30/1")
videocapsfilter = make("capsfilter", p)
videocapsfilter.set_property("caps", videocaps)
videocapsfilter2 = make("capsfilter", p)
videocapsfilter2.set_property("caps", videocaps)
tee = make("tee", p)
xvimagesink = make("xvimagesink", p)
xvimagesink.set_property("sync", "false")
videorate = make("videorate", p)
pulsesrc = make("pulsesrc", p)
audiocaps = gst.Caps("audio/x-raw-int,rate=48000,channels=1,depth=16")
audiocapsfilter = make("capsfilter", p)
audiocapsfilter.set_property("caps", audiocaps)
audioconvert = make("audioconvert", p)
mux = make("avimux", p)
filesink = make("filesink", p)
filesink.set_property("location", "capture.avi")
q = []
for i in range(5):
q.append(make("queue", p))
link(v4l2src, videocapsfilter, tee, q[0], xvimagesink)
link(tee, q[1], videorate, videocapsfilter2, q[2], mux)
link(pulsesrc, audiocapsfilter, q[3], audioconvert, q[4], mux)
link(mux, filesink)
p.set_state(gst.STATE_PLAYING)
from time import sleep
def stuff ():
while True:
print "I am alive"
sleep(1)
import thread
s = thread.start_new_thread(stuff, ())
try:
gobject.threads_init() # necessary to allow python.thread to run
mainloop.run()
except: # an interruption from Ctrl-C
print "stopping"
print s
p.set_state(gst.STATE_NULL)
Icecast[edit]
gst-launch-0.10 v4l2src ! 'video/x-raw-yuv,width=320,height=240,framerate=30/1' ! queue ! ffmpegcolorspace ! theoraenc quality=1 ! queue ! oggmux name=mux alsasrc ! audio/x-raw-int,rate=8000,channels=1,depth=8 ! queue ! audioconvert ! vorbisenc ! queue ! mux. mux. ! queue ! shout2send ip=constantvzw.org port=8000 password=hackme mount=/stream.ogg