Creating procedural MPEG-4 video with pygst
I hope to find how to create MPEG-4 video file with pygst (other frameworks can be suggested).
The question has three parts
How to feed generated video data in pygst pipeline from Python frame buffer
How to save this stream to MPEG-4 file
How to mix this stream with MP3 audio source
Pseudo-code below:
for frame in range(0, 10000): # let's render 10000 frames of video
data = []
for y in range(0, height):
for x in range(0, width):
data[y*width+x] = random.randint(0, 2**31) # rgba pixel
# XXX: how to feed th video frame generated above in GStreamer pipeline
开发者_StackOverflow社区 # and save it MPEG-4 file
pass
More info:
http://lists.freedesktop.org/archives/gstreamer-devel/2011-August/032609.html
You would probably want to create an appsink
element, then for each frame create a new GstBuffer and push it into the pipeline with gst_app_src_push_buffer().
This is some example code, It does not work - ffmpeg complains about the frame length(?), but I think you get the point, and some hints.
import os
os.putenv('GST_DEBUG_DUMP_DOT_DIR', '/tmp')
import gst
import gobject
gobject.threads_init()
import logging
import random
import pdb
_log = logging.getLogger(__name__)
_log.setLevel(logging.DEBUG)
logging.basicConfig()
def framegenerator():
'''
Yields one frame of video per iteration
'''
height = 1080
width = 1920
for frame in range(0, 10000): # let's render 10000 frames of video
data = list(range(height*width))
for y in range(0, height):
for x in range(0, width):
#_log.debug(y*width+x)
data[y*width+x] = random.randint(0, 2**31) # rgba pixel
yield data
GENERATOR = framegenerator()
def feed_appsrc(bus, message):
'''
Feed the appsrc element with a new frame
'''
global appsrc, pipeline
dotfile = "/tmp/debug-graph.dot"
pngfile = "/tmp/pipeline.png"
if os.access(dotfile, os.F_OK):
os.remove(dotfile)
if os.access(pngfile, os.F_OK):
os.remove(pngfile)
gst.DEBUG_BIN_TO_DOT_FILE(
pipeline,
gst.DEBUG_GRAPH_SHOW_ALL,
'debug-graph')
dot = '/usr/bin/dot'
os.system(dot + " -Tpng -o " + pngfile + " " + dotfile)
try:
frame = GENERATOR.next()
frame = str(frame)
buf = gst.Buffer(
frame)
buf.set_caps(
gst.caps_from_string('video/x-raw-rgb,framerate=30/1'))
#_log.debug(buf)
res = appsrc.emit('push-buffer', buf)
_log.debug('Result: {0}'.format(res))
except StopIteration:
res = appsrc.emit('eos')
_log.info('EOS')
def _on_message(bus, message):
_log.debug(message)
pipeline = gst.Pipeline('pipeline')
appsrc = gst.element_factory_make('appsrc', 'appsrc')
# Connect feed_appsrc to the need-data signal
appsrc.connect('need-data', feed_appsrc)
appsrc.set_property('caps',
gst.caps_from_string(','.join([
'video/x-raw-rgb',
'framerate=30/1',
'width=1920',
'height=1080',
'bpp=32',
'depth=32',
'green_mask=65280', #{0}'.format(0x00ff0000),
'red_mask=255', #{0}'.format(0x000000ff),
'blue_mask=16711680', #{0}'.format(0x000000ff),
'alpha_mask=-16777216', #{0}'.format(),
'endianness=4321'])))
pipeline.add(appsrc)
ffmpegcolorspace = gst.element_factory_make('ffmpegcolorspace')
pipeline.add(ffmpegcolorspace)
videorate = gst.element_factory_make('videorate')
pipeline.add(videorate)
ffvideoscale = gst.element_factory_make('videoscale')
pipeline.add(ffvideoscale)
vp8enc = gst.element_factory_make('vp8enc', 'vp8enc')
pipeline.add(vp8enc)
webmmux = gst.element_factory_make('webmmux', 'webmmux')
pipeline.add(webmmux)
filesink = gst.element_factory_make('filesink', 'filesink')
filesink.set_property('location', '/tmp/generated-video.webm')
pipeline.add(filesink)
gst.element_link_many(
appsrc,
ffmpegcolorspace,
videorate,
ffvideoscale,
vp8enc,
webmmux,
filesink)
bus = pipeline.get_bus()
bus.add_signal_watch()
bus.connect('message', _on_message)
pipeline.set_state(gst.STATE_PLAYING)
pdb.set_trace()
gobject.MainLoop().run()
Gist: https://gist.github.com/ce60c620e7ef3dbd0779
精彩评论