#!/usr/bin/perl # Send live video/audio media as RTP streams, published via RTSP # Depends: libglib-object-introspection-perl, gir1.2-gst-rtsp-server-1.0 # Recommends: gstreamer1.0-plugins-good use v5.12; use strictures 2; use Glib qw( TRUE FALSE ); use Glib::Object::Introspection; use IPC::System::Simple qw(capturex); BEGIN { Glib::Object::Introspection->setup( basename => 'Gst', version => '1.0', package => 'Gst', ); Glib::Object::Introspection->setup( basename => 'GstRtspServer', version => '1.0', package => 'Gst', ); } my $ADDRESS = shift || $ENV{'ADDRESS'} || '127.0.0.1'; my $PORT = shift || $ENV{'PORT'} || '8554'; my $VDEVICES = shift || $ENV{'VDEVICES'} || ''; my $ADEVICES = shift || $ENV{'ADEVICES'} || ''; my $VFORMAT = shift || $ENV{'VFORMAT'} || 'VP8'; # H264 VP8 RAW - default: VP8 my $AFORMAT = shift || $ENV{'AFORMAT'} || 'OPUS'; # AMR OPUS RAW - default: OPUS my @VDEVICES = $VDEVICES ? split ' ', $VDEVICES : sort split ' ', capturex( 'find', qw(/dev -maxdepth 1 -type c -name video*) ); # FIXME: Detect/blacklist and skip faulty devices #my @ADEVICES = grep { /^hw:/ } capturex( 'arecord', qw(-L) ); my @ADEVICES = split ' ', $ADEVICES; chomp @ADEVICES; #use Data::Dump; die dd @ADEVICES; my $HEIGHT = 240; my $FRAMERATE = 25; my $AUDIORATE = 48000; # TODO: Vary vpx quality based on height my $SPEED_VP8 = 15; # inspired by Apple HLS recommendations # TODO: Externalize to site-specific configfile my ( $VBITRATE, $SPEED_X264, $SPEED_X264_ALONE ); if ( $HEIGHT le 234 ) { $VBITRATE = 145000; $SPEED_X264 = 'slow'; $SPEED_X264_ALONE = 'fast'; } elsif ( $HEIGHT le 270 ) { $VBITRATE = 365000; $SPEED_X264 = 'faster'; $SPEED_X264_ALONE = 'fast'; } elsif ( $HEIGHT le 360 ) { $VBITRATE = 730000; $SPEED_X264 = 'faster'; $SPEED_X264_ALONE = 'fast'; } elsif ( $HEIGHT le 432 ) { $VBITRATE = 1100000; $SPEED_X264 = 'veryfast'; $SPEED_X264_ALONE = 'fast'; } elsif ( $HEIGHT le 540 ) { $VBITRATE = 2000000; $SPEED_X264 = 'toofast'; $SPEED_X264_ALONE = 'veryfast'; } elsif ( $HEIGHT le 720 ) { $VBITRATE = 3000000; $SPEED_X264 = 'toofast'; $SPEED_X264_ALONE = 'ultrafast'; } unless ($HEIGHTS_WEBM) { $SPEED_X264 = $SPEED_X264_ALONE } if ( toofast == $SPEED_X264 ) die "Not enough CPU - reduce size or streams"; my $VCAPS = "video/x-raw,height=$HEIGHT"; my $ACAPS = "audio/x-raw,rate=$AUDIORATE,channels=2,depth=16"; # * http://stackoverflow.com/a/42237307 my $ABUFFERS = 20000; # * force threads using queues - see http://stackoverflow.com/a/30738533 # * generous queue sizes inspired by https://wiki.xiph.org/GST_cookbook my $QUEUE = "queue max-size-bytes=100000000 max-size-time=0"; my %PIPELINE = ( AMR => { AENC => [ 'amrnbenc', $QUEUE, 'rtpamrpay' ], }, H264 => { # * let x264 use low-latency sliced-threads (i.e. don't disable treads) VENC => [ "x264enc speed-preset=$SPEED_H264 tune=zerolatency bitrate=800 byte-stream=true key-int-max=15 intra-refresh=true option-string=\"slice-max-size=8192:vbv-maxrate=80:vbv-bufsize=10\"", 'video/x-h264,profile=baseline', $QUEUE, 'rtph264pay', ], }, OPUS => { AENC => [ 'opusenc', $QUEUE, 'rtpopuspay' ], }, VP8 => { VENC => [ "vp8enc threads=4 cpu-used=$SPEED_VP8 deadline=1000000 end-usage=1 target-bitrate=$VBITRATE undershoot=95 keyframe-max-dist=999999 max-quantizer=56 deadline=5000 static-threshold=500", 'video/x-vp8', $QUEUE, 'rtpvp8pay', ], }, RAW => { AENC => ['rtpL16pay'], VENC => ['rtpvrawpay'], }, ); our $nextpayload = 0; sub cam { my ( $device, $payload ) = @_; my $factory = Gst::RTSPMediaFactory->new(); my $pipeline = join( ' ! ', ( "v4l2src device=$device", $QUEUE, 'videoconvert', $VCAPS, $QUEUE, @{ $PIPELINE{$VFORMAT}{'VENC'} }, ) ); return "( $pipeline name=pay$payload )"; } sub mic { my ( $device, $payload ) = @_; my $factory = Gst::RTSPMediaFactory->new(); my $pipeline = join( ' ! ', ( "alsasrc device=$device buffer-time=$ABUFFERS", $QUEUE, 'audioconvert', $QUEUE, @{ $PIPELINE{$AFORMAT}{'AENC'} }, ) ); return "( $pipeline name=pay$payload )"; } sub factory { my @pipeline = @_; my $factory = Gst::RTSPMediaFactory->new(); $factory->set_launch( join( ' ', @pipeline ) ); $factory->set_shared(TRUE); #say "media ($device): " . $factory->get_launch(); # $factory->set_latency(5); #say "latency ($device): " . $factory->get_latency(); return $factory; } Gst::init( [ $0, @ARGV ] ); my $loop = Glib::MainLoop->new( undef, FALSE ); # create a server instance my $server = Gst::RTSPServer->new(); $server->set_address($ADDRESS); $server->set_service($PORT); # get the mount points for this server, every server has a default # object that be used to map uri mount points to media factories my $mounts = $server->get_mount_points(); # attach media to URIs my @mounts; for my $i ( 0 .. $#VDEVICES ) { my $mount = "/cam$i"; $mounts->add_factory( $mount, factory( cam( $VDEVICES[$i], $nextpayload++ ) ) ); push @mounts, $mount; } for my $i ( 0 .. $#ADEVICES ) { my $mount = "/mic$i"; $mounts->add_factory( $mount, factory( mic( $ADEVICES[$i], $nextpayload++ ) ) ); push @mounts, $mount; } if ( @ADEVICES and @VDEVICES ) { my $mount = "/main"; $mounts->add_factory( $mount, factory( mic( $ADEVICES[0], $#VDEVICES + 1 ), cam( $VDEVICES[0], 0 ) ) ); push @mounts, $mount; } # don't need the ref to the mapper anymore undef $mounts; # attach the server to the default maincontext my $retval = $server->attach(undef); # start serving say "streams ready at the following URLs:"; for (@mounts) { say "rtsp://$ADDRESS:$PORT$_"; } $loop->run;