[FFmpeg-devel] [PATCH 1/4] lavd: pulse audio encoder

Stefano Sabatini stefasab at gmail.com
Mon Oct 7 12:22:00 CEST 2013


On date Friday 2013-10-04 18:24:28 +0200, Lukasz M encoded:
> >
> > > + at subsection Options
> >
> > + at table @option
> > > +
> > > + at item server
> > > +Connects to a specific server. Default server is used when not provided.
> >
> > Connect.
> >
> > What's exactly the server?
> >
> 
> This is good question. TBH I'm not very familiar with pulse audio, but
> after quick research and tests it seems to be an address of the host with
> pulseaudio server.
> Not sure if it can mean name of the server, not just address.
> I made some tests on Debian 7.1 (installed few days ago so it is fresh) and
> ffmpeg failed to connect with "-server localhost" option.
> After loading a module with
> pactl load-module module-native-protocol-tcp auth-ip-acl=LOCAL_IP
> it worked.

> I'm not quite sure name is adequate, but in pulse audio API it is called
> this way.

We should follow the pulse API.

What about: connect to a specific PulseAudio server, specified by an
IP address.

> 
> 
> > > + at item fragment_size
> > > +Specify the minimal buffering fragment in PulseAudio, it will affect the
> > > +audio latency. By default it is unset.
> >
> > expressed in which unit?
> 
> 
> It is in bytes, but I rushed with copying it from decoder file. This
> parameter is relevant for recording only so I removed it.
> There are some option for playback, I will add them later, but they
> recommend to use default values anyway.
> 
> Rest of remark fixed.

> From 404d8a3ad94c50a61c2550eb0c871b868814801f Mon Sep 17 00:00:00 2001
> From: Lukasz Marek <lukasz.m.luki at gmail.com>
> Date: Fri, 4 Oct 2013 11:49:07 +0200
> Subject: [PATCH 1/4] lavd: pulse audio encoder
> 
> Signed-off-by: Lukasz Marek <lukasz.m.luki at gmail.com>
> ---
>  Changelog                     |    1 +
>  configure                     |    1 +
>  doc/outdevs.texi              |   31 +++++++++
>  libavdevice/Makefile          |    1 +
>  libavdevice/alldevices.c      |    2 +-
>  libavdevice/pulse_audio_enc.c |  154 +++++++++++++++++++++++++++++++++++++++++
>  6 files changed, 189 insertions(+), 1 deletion(-)
>  create mode 100644 libavdevice/pulse_audio_enc.c
> 
> diff --git a/Changelog b/Changelog
> index b63e036..8311c88 100644
> --- a/Changelog
> +++ b/Changelog
> @@ -37,6 +37,7 @@ version <next>
>    the skip_alpha flag.
>  - ladspa wrapper filter
>  - native VP9 decoder
> +- PulseAudio output device
>  
>  
>  version 2.0:
> diff --git a/configure b/configure
> index 7b8cc81..c147522 100755
> --- a/configure
> +++ b/configure
> @@ -2132,6 +2132,7 @@ openal_indev_deps="openal"
>  oss_indev_deps_any="soundcard_h sys_soundcard_h"
>  oss_outdev_deps_any="soundcard_h sys_soundcard_h"
>  pulse_indev_deps="libpulse"
> +pulse_outdev_deps="libpulse"
>  sdl_outdev_deps="sdl"
>  sndio_indev_deps="sndio_h"
>  sndio_outdev_deps="sndio_h"
> diff --git a/doc/outdevs.texi b/doc/outdevs.texi
> index 0946276..a54f4ea 100644
> --- a/doc/outdevs.texi
> +++ b/doc/outdevs.texi
> @@ -108,6 +108,37 @@ ffmpeg -i INPUT -pix_fmt rgb24 -f caca -list_dither colors -
>  
>  OSS (Open Sound System) output device.
>  
> + at section pulse
> +
> +PulseAudio output device.
> +
> +To enable this output device you need to configure FFmpeg with @code{--enable-libpulse}.
> +
> + at subsection Options
> + at table @option
> +
> + at item server
> +Connect to a specific server. Default server is used when not provided.
> +
> + at item name
> +Specify the application name PulseAudio will use when showing active clients,
> +by default it is the @code{LIBAVFORMAT_IDENT} string.
> +

> + at item stream_name
> +Specify the stream name PulseAudio will use when showing active streams,
> +by default it is set to output name.

nit: to the specified output name.

> +
> + at item device
> +Specify the device to use. Default device is used when not provided.
> +
> + at end table
> +

> + at subsection Examples
> +Play a file using PulseAudio:
> + at example
> +ffmpeg  -i INPUT -f pulse -

description can be slightly more detailed, for example you could
specify that it will send the stream to the default server.

> + at end example

Also I suggest to place here a link to pulse audio docs/website.

> +
>  @section sdl
>  
>  SDL (Simple DirectMedia Layer) output device.
> diff --git a/libavdevice/Makefile b/libavdevice/Makefile
> index 424ce98..2fdc47b 100644
> --- a/libavdevice/Makefile
> +++ b/libavdevice/Makefile
> @@ -31,6 +31,7 @@ OBJS-$(CONFIG_OPENAL_INDEV)              += openal-dec.o
>  OBJS-$(CONFIG_OSS_INDEV)                 += oss_audio.o
>  OBJS-$(CONFIG_OSS_OUTDEV)                += oss_audio.o
>  OBJS-$(CONFIG_PULSE_INDEV)               += pulse.o
> +OBJS-$(CONFIG_PULSE_OUTDEV)              += pulse_audio_enc.o
>  OBJS-$(CONFIG_SDL_OUTDEV)                += sdl.o
>  OBJS-$(CONFIG_SNDIO_INDEV)               += sndio_common.o sndio_dec.o
>  OBJS-$(CONFIG_SNDIO_OUTDEV)              += sndio_common.o sndio_enc.o
> diff --git a/libavdevice/alldevices.c b/libavdevice/alldevices.c
> index fc8d3ce..33ce155 100644
> --- a/libavdevice/alldevices.c
> +++ b/libavdevice/alldevices.c
> @@ -57,7 +57,7 @@ void avdevice_register_all(void)
>      REGISTER_INDEV   (LAVFI,            lavfi);
>      REGISTER_INDEV   (OPENAL,           openal);
>      REGISTER_INOUTDEV(OSS,              oss);
> -    REGISTER_INDEV   (PULSE,            pulse);
> +    REGISTER_INOUTDEV(PULSE,            pulse);
>      REGISTER_OUTDEV  (SDL,              sdl);
>      REGISTER_INOUTDEV(SNDIO,            sndio);
>      REGISTER_INOUTDEV(V4L2,             v4l2);
> diff --git a/libavdevice/pulse_audio_enc.c b/libavdevice/pulse_audio_enc.c
> new file mode 100644
> index 0000000..37dca9e
> --- /dev/null
> +++ b/libavdevice/pulse_audio_enc.c
> @@ -0,0 +1,154 @@
> +/*
> + * Copyright (c) 2013 Lukasz Marek <lukasz.m.luki at gmail.com>
> + *
> + * This file is part of FFmpeg.
> + *
> + * FFmpeg is free software; you can redistribute it and/or
> + * modify it under the terms of the GNU Lesser General Public
> + * License as published by the Free Software Foundation; either
> + * version 2.1 of the License, or (at your option) any later version.
> + *
> + * FFmpeg is distributed in the hope that it will be useful,
> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
> + * Lesser General Public License for more details.
> + *
> + * You should have received a copy of the GNU Lesser General Public
> + * License along with FFmpeg; if not, write to the Free Software
> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> + */
> +
> +#include <pulse/simple.h>
> +#include <pulse/error.h>
> +#include "libavutil/opt.h"
> +#include "libavutil/time.h"
> +#include "libavutil/log.h"
> +#include "libavformat/avformat.h"
> +#include "libavformat/internal.h"
> +

> +#define DEFAULT_CODEC_ID AV_NE(AV_CODEC_ID_PCM_S16BE, AV_CODEC_ID_PCM_S16LE)

you can move the define right in the codec definition

> +
> +typedef struct PulseData {
> +    AVClass *class;
> +    const char *server;
> +    const char *name;
> +    const char *stream_name;
> +    const char *device;
> +    pa_simple *pa;
> +} PulseData;
> +
> +static pa_sample_format_t codec_id_to_pulse_format(enum AVCodecID codec_id)
> +{
> +    switch (codec_id) {
> +    case AV_CODEC_ID_PCM_U8:    return PA_SAMPLE_U8;
> +    case AV_CODEC_ID_PCM_ALAW:  return PA_SAMPLE_ALAW;
> +    case AV_CODEC_ID_PCM_MULAW: return PA_SAMPLE_ULAW;
> +    case AV_CODEC_ID_PCM_S16LE: return PA_SAMPLE_S16LE;
> +    case AV_CODEC_ID_PCM_S16BE: return PA_SAMPLE_S16BE;
> +    case AV_CODEC_ID_PCM_F32LE: return PA_SAMPLE_FLOAT32LE;
> +    case AV_CODEC_ID_PCM_F32BE: return PA_SAMPLE_FLOAT32BE;
> +    case AV_CODEC_ID_PCM_S32LE: return PA_SAMPLE_S32LE;
> +    case AV_CODEC_ID_PCM_S32BE: return PA_SAMPLE_S32BE;
> +    case AV_CODEC_ID_PCM_S24LE: return PA_SAMPLE_S24LE;
> +    case AV_CODEC_ID_PCM_S24BE: return PA_SAMPLE_S24BE;
> +    default:                    return PA_SAMPLE_INVALID;
> +    }
> +}
> +
> +static av_cold int pulse_write_header(AVFormatContext *h)
> +{
> +    PulseData *s = h->priv_data;
> +    AVStream *st = h->streams[0];
> +    int ret;
> +    pa_sample_spec ss = { codec_id_to_pulse_format(st->codec->codec_id),
> +                          st->codec->sample_rate,
> +                          st->codec->channels };
> +    pa_buffer_attr attr = { -1, -1, -1, -1, -1 };
> +    const char *stream_name = s->stream_name;
> +
> +    if (!stream_name)
> +        stream_name = h->filename;
> +
> +    s->pa = pa_simple_new(s->server,                 // Server
> +                          s->name,                   // Application name
> +                          PA_STREAM_PLAYBACK,
> +                          s->device,                 // Device
> +                          stream_name,               // Description of a stream
> +                          &ss,                       // Sample format
> +                          NULL,                      // Use default channel map
> +                          &attr,                     // Buffering attributes
> +                          &ret);                     // Result
> +
> +    if (!s->pa) {
> +        av_log(s, AV_LOG_ERROR, "pa_simple_new failed: %s\n", pa_strerror(ret));
> +        return AVERROR(EIO);
> +    }
> +
> +    avpriv_set_pts_info(st, 64, 1, 1000000);  /* 64 bits pts in us */
> +
> +    return 0;

Not sure if you should check the number and type of the streams. What
happens if you send two audio streams, or one video stream?

> +}
> +
> +static av_cold int pulse_write_trailer(AVFormatContext *h)
> +{
> +    PulseData *s = h->priv_data;
> +    pa_simple_flush(s->pa, NULL);
> +    pa_simple_free(s->pa);
> +    s->pa = NULL;
> +    return 0;
> +}
> +
> +static int pulse_write_packet(AVFormatContext *h, AVPacket *pkt)
> +{
> +    PulseData *s = h->priv_data;
> +    int size     = pkt->size;
> +    uint8_t *buf = pkt->data;
> +    int error;
> +
> +    if ((error = pa_simple_write(s->pa, buf, size, &error))) {
> +        av_log(s, AV_LOG_ERROR, "pa_simple_write failed: %s\n", pa_strerror(error));
> +        return AVERROR(EIO);
> +    }
> +
> +    return 0;
> +}
> +
> +static void pulse_get_output_timestamp(AVFormatContext *h, int stream, int64_t *dts, int64_t *wall)
> +{
> +    PulseData *s = h->priv_data;
> +    pa_usec_t latency = pa_simple_get_latency(s->pa, NULL);
> +    *wall = av_gettime();
> +    *dts = h->streams[0]->cur_dts - latency;
> +}
> +
> +#define OFFSET(a) offsetof(PulseData, a)
> +#define E AV_OPT_FLAG_ENCODING_PARAM
> +
> +static const AVOption options[] = {
> +    { "server",        "set pulse server name",  OFFSET(server),      AV_OPT_TYPE_STRING, {.str = NULL},     0, 0, E },
> +    { "name",          "set application name",   OFFSET(name),        AV_OPT_TYPE_STRING, {.str = LIBAVFORMAT_IDENT},  0, 0, E },
> +    { "stream_name",   "set stream description", OFFSET(stream_name), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, E },
> +    { "device",        "set device name",        OFFSET(device),      AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, E },
> +    { NULL }
> +};
> +
> +static const AVClass pulse_muxer_class = {
> +    .class_name     = "Pulse muxer",
> +    .item_name      = av_default_item_name,
> +    .option         = options,
> +    .version        = LIBAVUTIL_VERSION_INT,
> +};
> +
> +AVOutputFormat ff_pulse_muxer = {
> +    .name           = "pulse",
> +    .long_name      = NULL_IF_CONFIG_SMALL("Pulse audio output"),
> +    .priv_data_size = sizeof(PulseData),
> +    .audio_codec    = DEFAULT_CODEC_ID,
> +    .video_codec    = AV_CODEC_ID_NONE,
> +    .write_header   = pulse_write_header,
> +    .write_packet   = pulse_write_packet,
> +    .write_trailer  = pulse_write_trailer,
> +    .get_output_timestamp = pulse_get_output_timestamp,
> +    .flags          = AVFMT_NOFILE,
> +    .priv_class     = &pulse_muxer_class,
> +};

LGTM otherwise.
-- 
FFmpeg = Fiendish Fostering Multimedia Plastic Erotic God


More information about the ffmpeg-devel mailing list