diff options
Diffstat (limited to 'libavdevice')
69 files changed, 13547 insertions, 983 deletions
diff --git a/libavdevice/Makefile b/libavdevice/Makefile index 25e126c..df06d9c 100644 --- a/libavdevice/Makefile +++ b/libavdevice/Makefile @@ -1,3 +1,5 @@ +include $(SUBDIR)../config.mak + NAME = avdevice HEADERS = avdevice.h \ @@ -8,21 +10,44 @@ OBJS = alldevices.o \ # input/output devices OBJS-$(CONFIG_ALSA_INDEV) += alsa-audio-common.o \ - alsa-audio-dec.o + alsa-audio-dec.o timefilter.o OBJS-$(CONFIG_ALSA_OUTDEV) += alsa-audio-common.o \ alsa-audio-enc.o +OBJS-$(CONFIG_AVFOUNDATION_INDEV) += avfoundation.o OBJS-$(CONFIG_BKTR_INDEV) += bktr.o +OBJS-$(CONFIG_CACA_OUTDEV) += caca.o +OBJS-$(CONFIG_DECKLINK_OUTDEV) += decklink_enc.o decklink_enc_c.o decklink_common.o +OBJS-$(CONFIG_DECKLINK_INDEV) += decklink_dec.o decklink_dec_c.o decklink_common.o +OBJS-$(CONFIG_DSHOW_INDEV) += dshow.o dshow_enummediatypes.o \ + dshow_enumpins.o dshow_filter.o \ + dshow_pin.o dshow_common.o OBJS-$(CONFIG_DV1394_INDEV) += dv1394.o -OBJS-$(CONFIG_FBDEV_INDEV) += fbdev.o +OBJS-$(CONFIG_FBDEV_INDEV) += fbdev_dec.o \ + fbdev_common.o +OBJS-$(CONFIG_FBDEV_OUTDEV) += fbdev_enc.o \ + fbdev_common.o +OBJS-$(CONFIG_GDIGRAB_INDEV) += gdigrab.o +OBJS-$(CONFIG_IEC61883_INDEV) += iec61883.o OBJS-$(CONFIG_JACK_INDEV) += jack_audio.o timefilter.o +OBJS-$(CONFIG_LAVFI_INDEV) += lavfi.o +OBJS-$(CONFIG_OPENAL_INDEV) += openal-dec.o +OBJS-$(CONFIG_OPENGL_OUTDEV) += opengl_enc.o OBJS-$(CONFIG_OSS_INDEV) += oss_audio.o oss_audio_dec.o OBJS-$(CONFIG_OSS_OUTDEV) += oss_audio.o oss_audio_enc.o -OBJS-$(CONFIG_PULSE_INDEV) += pulse.o +OBJS-$(CONFIG_PULSE_INDEV) += pulse_audio_dec.o \ + pulse_audio_common.o +OBJS-$(CONFIG_PULSE_OUTDEV) += pulse_audio_enc.o \ + pulse_audio_common.o +OBJS-$(CONFIG_QTKIT_INDEV) += qtkit.o +OBJS-$(CONFIG_SDL_OUTDEV) += sdl.o OBJS-$(CONFIG_SNDIO_INDEV) += sndio_common.o sndio_dec.o OBJS-$(CONFIG_SNDIO_OUTDEV) += sndio_common.o sndio_enc.o -OBJS-$(CONFIG_V4L2_INDEV) += v4l2.o +OBJS-$(CONFIG_V4L2_INDEV) += v4l2.o v4l2-common.o timefilter.o +OBJS-$(CONFIG_V4L2_OUTDEV) += v4l2enc.o v4l2-common.o +OBJS-$(CONFIG_V4L_INDEV) += v4l.o OBJS-$(CONFIG_VFWCAP_INDEV) += vfwcap.o OBJS-$(CONFIG_X11GRAB_INDEV) += x11grab.o +OBJS-$(CONFIG_XV_OUTDEV) += xv.o # external libraries OBJS-$(CONFIG_LIBCDIO_INDEV) += libcdio.o @@ -30,6 +55,17 @@ OBJS-$(CONFIG_LIBDC1394_INDEV) += libdc1394.o OBJS-$(HAVE_LIBC_MSVCRT) += file_open.o +# Windows resource file +SLIBOBJS-$(HAVE_GNU_WINDRES) += avdeviceres.o + +SKIPHEADERS-$(CONFIG_DECKLINK) += decklink_enc.h decklink_dec.h \ + decklink_common.h decklink_common_c.h +SKIPHEADERS-$(CONFIG_DSHOW_INDEV) += dshow_capture.h +SKIPHEADERS-$(CONFIG_FBDEV_INDEV) += fbdev_common.h +SKIPHEADERS-$(CONFIG_FBDEV_OUTDEV) += fbdev_common.h +SKIPHEADERS-$(CONFIG_LIBPULSE) += pulse_audio_common.h +SKIPHEADERS-$(CONFIG_V4L2_INDEV) += v4l2-common.h +SKIPHEADERS-$(CONFIG_V4L2_OUTDEV) += v4l2-common.h SKIPHEADERS-$(HAVE_ALSA_ASOUNDLIB_H) += alsa-audio.h SKIPHEADERS-$(HAVE_SNDIO_H) += sndio_common.h diff --git a/libavdevice/alldevices.c b/libavdevice/alldevices.c index 155f7a8..b8e7854 100644 --- a/libavdevice/alldevices.c +++ b/libavdevice/alldevices.c @@ -1,25 +1,24 @@ /* * Register all the grabbing devices. * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ #include "config.h" -#include "libavformat/avformat.h" #include "avdevice.h" #define REGISTER_OUTDEV(X, x) \ @@ -48,16 +47,29 @@ void avdevice_register_all(void) /* devices */ REGISTER_INOUTDEV(ALSA, alsa); + REGISTER_INDEV (AVFOUNDATION, avfoundation); REGISTER_INDEV (BKTR, bktr); + REGISTER_OUTDEV (CACA, caca); + REGISTER_INOUTDEV(DECKLINK, decklink); + REGISTER_INDEV (DSHOW, dshow); REGISTER_INDEV (DV1394, dv1394); - REGISTER_INDEV (FBDEV, fbdev); + REGISTER_INOUTDEV(FBDEV, fbdev); + REGISTER_INDEV (GDIGRAB, gdigrab); + REGISTER_INDEV (IEC61883, iec61883); REGISTER_INDEV (JACK, jack); + REGISTER_INDEV (LAVFI, lavfi); + REGISTER_INDEV (OPENAL, openal); + REGISTER_OUTDEV (OPENGL, opengl); REGISTER_INOUTDEV(OSS, oss); - REGISTER_INDEV (PULSE, pulse); + REGISTER_INOUTDEV(PULSE, pulse); + REGISTER_INDEV (QTKIT, qtkit); + REGISTER_OUTDEV (SDL, sdl); REGISTER_INOUTDEV(SNDIO, sndio); - REGISTER_INDEV (V4L2, v4l2); + REGISTER_INOUTDEV(V4L2, v4l2); +// REGISTER_INDEV (V4L, v4l REGISTER_INDEV (VFWCAP, vfwcap); REGISTER_INDEV (X11GRAB, x11grab); + REGISTER_OUTDEV (XV, xv); /* external libraries */ REGISTER_INDEV (LIBCDIO, libcdio); diff --git a/libavdevice/alsa-audio-common.c b/libavdevice/alsa-audio-common.c index 21f1594..4e63397 100644 --- a/libavdevice/alsa-audio-common.c +++ b/libavdevice/alsa-audio-common.c @@ -3,20 +3,20 @@ * Copyright (c) 2007 Luca Abeni ( lucabe72 email it ) * Copyright (c) 2007 Benoit Fouet ( benoit fouet free fr ) * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -29,7 +29,7 @@ */ #include <alsa/asoundlib.h> -#include "libavformat/avformat.h" +#include "avdevice.h" #include "libavutil/avassert.h" #include "libavutil/channel_layout.h" @@ -62,48 +62,45 @@ static av_cold snd_pcm_format_t codec_id_to_pcm_format(int codec_id) } } -#define REORDER_OUT_50(NAME, TYPE) \ -static void alsa_reorder_ ## NAME ## _out_50(const void *in_v, void *out_v, int n) \ -{ \ - const TYPE *in = in_v; \ - TYPE *out = out_v; \ -\ - while (n-- > 0) { \ +#define MAKE_REORDER_FUNC(NAME, TYPE, CHANNELS, LAYOUT, MAP) \ +static void alsa_reorder_ ## NAME ## _ ## LAYOUT(const void *in_v, \ + void *out_v, \ + int n) \ +{ \ + const TYPE *in = in_v; \ + TYPE *out = out_v; \ + \ + while (n-- > 0) { \ + MAP \ + in += CHANNELS; \ + out += CHANNELS; \ + } \ +} + +#define MAKE_REORDER_FUNCS(CHANNELS, LAYOUT, MAP) \ + MAKE_REORDER_FUNC(int8, int8_t, CHANNELS, LAYOUT, MAP) \ + MAKE_REORDER_FUNC(int16, int16_t, CHANNELS, LAYOUT, MAP) \ + MAKE_REORDER_FUNC(int32, int32_t, CHANNELS, LAYOUT, MAP) \ + MAKE_REORDER_FUNC(f32, float, CHANNELS, LAYOUT, MAP) + +MAKE_REORDER_FUNCS(5, out_50, \ out[0] = in[0]; \ out[1] = in[1]; \ out[2] = in[3]; \ out[3] = in[4]; \ out[4] = in[2]; \ - in += 5; \ - out += 5; \ - } \ -} + ); -#define REORDER_OUT_51(NAME, TYPE) \ -static void alsa_reorder_ ## NAME ## _out_51(const void *in_v, void *out_v, int n) \ -{ \ - const TYPE *in = in_v; \ - TYPE *out = out_v; \ -\ - while (n-- > 0) { \ +MAKE_REORDER_FUNCS(6, out_51, \ out[0] = in[0]; \ out[1] = in[1]; \ out[2] = in[4]; \ out[3] = in[5]; \ out[4] = in[2]; \ out[5] = in[3]; \ - in += 6; \ - out += 6; \ - } \ -} + ); -#define REORDER_OUT_71(NAME, TYPE) \ -static void alsa_reorder_ ## NAME ## _out_71(const void *in_v, void *out_v, int n) \ -{ \ - const TYPE *in = in_v; \ - TYPE *out = out_v; \ -\ - while (n-- > 0) { \ +MAKE_REORDER_FUNCS(8, out_71, \ out[0] = in[0]; \ out[1] = in[1]; \ out[2] = in[4]; \ @@ -112,23 +109,7 @@ static void alsa_reorder_ ## NAME ## _out_71(const void *in_v, void *out_v, int out[5] = in[3]; \ out[6] = in[6]; \ out[7] = in[7]; \ - in += 8; \ - out += 8; \ - } \ -} - -REORDER_OUT_50(int8, int8_t) -REORDER_OUT_51(int8, int8_t) -REORDER_OUT_71(int8, int8_t) -REORDER_OUT_50(int16, int16_t) -REORDER_OUT_51(int16, int16_t) -REORDER_OUT_71(int16, int16_t) -REORDER_OUT_50(int32, int32_t) -REORDER_OUT_51(int32, int32_t) -REORDER_OUT_71(int32, int32_t) -REORDER_OUT_50(f32, float) -REORDER_OUT_51(f32, float) -REORDER_OUT_71(f32, float) + ); #define FORMAT_I8 0 #define FORMAT_I16 1 @@ -320,6 +301,8 @@ av_cold int ff_alsa_close(AVFormatContext *s1) AlsaData *s = s1->priv_data; av_freep(&s->reorder_buf); + if (CONFIG_ALSA_INDEV) + ff_timefilter_destroy(s->timefilter); snd_pcm_close(s->h); return 0; } diff --git a/libavdevice/alsa-audio-dec.c b/libavdevice/alsa-audio-dec.c index 0687a4a..2cdf356 100644 --- a/libavdevice/alsa-audio-dec.c +++ b/libavdevice/alsa-audio-dec.c @@ -3,20 +3,20 @@ * Copyright (c) 2007 Luca Abeni ( lucabe72 email it ) * Copyright (c) 2007 Benoit Fouet ( benoit fouet free fr ) * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -46,10 +46,12 @@ */ #include <alsa/asoundlib.h> -#include "libavformat/avformat.h" #include "libavformat/internal.h" #include "libavutil/opt.h" +#include "libavutil/mathematics.h" +#include "libavutil/time.h" +#include "avdevice.h" #include "alsa-audio.h" static av_cold int audio_read_header(AVFormatContext *s1) @@ -58,7 +60,6 @@ static av_cold int audio_read_header(AVFormatContext *s1) AVStream *st; int ret; enum AVCodecID codec_id; - snd_pcm_sw_params_t *sw_params; st = avformat_new_stream(s1, NULL); if (!st) { @@ -74,35 +75,17 @@ static av_cold int audio_read_header(AVFormatContext *s1) return AVERROR(EIO); } - if (snd_pcm_type(s->h) != SND_PCM_TYPE_HW) - av_log(s1, AV_LOG_WARNING, - "capture with some ALSA plugins, especially dsnoop, " - "may hang.\n"); - - ret = snd_pcm_sw_params_malloc(&sw_params); - if (ret < 0) { - av_log(s1, AV_LOG_ERROR, "cannot allocate software parameters structure (%s)\n", - snd_strerror(ret)); - goto fail; - } - - snd_pcm_sw_params_current(s->h, sw_params); - snd_pcm_sw_params_set_tstamp_mode(s->h, sw_params, SND_PCM_TSTAMP_ENABLE); - - ret = snd_pcm_sw_params(s->h, sw_params); - snd_pcm_sw_params_free(sw_params); - if (ret < 0) { - av_log(s1, AV_LOG_ERROR, "cannot install ALSA software parameters (%s)\n", - snd_strerror(ret)); - goto fail; - } - /* take real parameters */ st->codec->codec_type = AVMEDIA_TYPE_AUDIO; st->codec->codec_id = codec_id; st->codec->sample_rate = s->sample_rate; st->codec->channels = s->channels; avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */ + /* microseconds instead of seconds, MHz instead of Hz */ + s->timefilter = ff_timefilter_new(1000000.0 / s->sample_rate, + s->period_size, 1.5E-6); + if (!s->timefilter) + goto fail; return 0; @@ -114,16 +97,15 @@ fail: static int audio_read_packet(AVFormatContext *s1, AVPacket *pkt) { AlsaData *s = s1->priv_data; - AVStream *st = s1->streams[0]; int res; - snd_htimestamp_t timestamp; - snd_pcm_uframes_t ts_delay; + int64_t dts; + snd_pcm_sframes_t delay = 0; - if (av_new_packet(pkt, s->period_size) < 0) { + if (av_new_packet(pkt, s->period_size * s->frame_size) < 0) { return AVERROR(EIO); } - while ((res = snd_pcm_readi(s->h, pkt->data, pkt->size / s->frame_size)) < 0) { + while ((res = snd_pcm_readi(s->h, pkt->data, s->period_size)) < 0) { if (res == -EAGAIN) { av_free_packet(pkt); @@ -136,14 +118,14 @@ static int audio_read_packet(AVFormatContext *s1, AVPacket *pkt) return AVERROR(EIO); } + ff_timefilter_reset(s->timefilter); } - snd_pcm_htimestamp(s->h, &ts_delay, ×tamp); - ts_delay += res; - pkt->pts = timestamp.tv_sec * 1000000LL - + (timestamp.tv_nsec * st->codec->sample_rate - - (int64_t)ts_delay * 1000000000LL + st->codec->sample_rate * 500LL) - / (st->codec->sample_rate * 1000LL); + dts = av_gettime(); + snd_pcm_delay(s->h, &delay); + dts -= av_rescale(delay + res, 1000000, s->sample_rate); + pkt->pts = ff_timefilter_update(s->timefilter, dts, s->last_period); + s->last_period = res; pkt->size = res * s->frame_size; @@ -161,6 +143,7 @@ static const AVClass alsa_demuxer_class = { .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT, }; AVInputFormat ff_alsa_demuxer = { diff --git a/libavdevice/alsa-audio-enc.c b/libavdevice/alsa-audio-enc.c index bb4575f..e42cc8f 100644 --- a/libavdevice/alsa-audio-enc.c +++ b/libavdevice/alsa-audio-enc.c @@ -3,20 +3,20 @@ * Copyright (c) 2007 Luca Abeni ( lucabe72 email it ) * Copyright (c) 2007 Benoit Fouet ( benoit fouet free fr ) * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -38,19 +38,26 @@ */ #include <alsa/asoundlib.h> -#include "libavformat/avformat.h" +#include "libavutil/time.h" +#include "libavformat/internal.h" +#include "avdevice.h" #include "alsa-audio.h" static av_cold int audio_write_header(AVFormatContext *s1) { AlsaData *s = s1->priv_data; - AVStream *st; + AVStream *st = NULL; unsigned int sample_rate; enum AVCodecID codec_id; int res; + if (s1->nb_streams != 1 || s1->streams[0]->codec->codec_type != AVMEDIA_TYPE_AUDIO) { + av_log(s1, AV_LOG_ERROR, "Only a single audio stream is supported.\n"); + return AVERROR(EINVAL); + } st = s1->streams[0]; + sample_rate = st->codec->sample_rate; codec_id = st->codec->codec_id; res = ff_alsa_open(s1, SND_PCM_STREAM_PLAYBACK, &sample_rate, @@ -61,6 +68,7 @@ static av_cold int audio_write_header(AVFormatContext *s1) st->codec->sample_rate, sample_rate); goto fail; } + avpriv_set_pts_info(st, 64, 1, sample_rate); return res; @@ -77,6 +85,10 @@ static int audio_write_packet(AVFormatContext *s1, AVPacket *pkt) uint8_t *buf = pkt->data; size /= s->frame_size; + if (pkt->dts != AV_NOPTS_VALUE) + s->timestamp = pkt->dts; + s->timestamp += pkt->duration ? pkt->duration : size; + if (s->reorder_func) { if (size > s->reorder_buf_size) if (ff_alsa_extend_reorder_buf(s, size)) @@ -101,6 +113,42 @@ static int audio_write_packet(AVFormatContext *s1, AVPacket *pkt) return 0; } +static int audio_write_frame(AVFormatContext *s1, int stream_index, + AVFrame **frame, unsigned flags) +{ + AlsaData *s = s1->priv_data; + AVPacket pkt; + + /* ff_alsa_open() should have accepted only supported formats */ + if ((flags & AV_WRITE_UNCODED_FRAME_QUERY)) + return av_sample_fmt_is_planar(s1->streams[stream_index]->codec->sample_fmt) ? + AVERROR(EINVAL) : 0; + /* set only used fields */ + pkt.data = (*frame)->data[0]; + pkt.size = (*frame)->nb_samples * s->frame_size; + pkt.dts = (*frame)->pkt_dts; + pkt.duration = av_frame_get_pkt_duration(*frame); + return audio_write_packet(s1, &pkt); +} + +static void +audio_get_output_timestamp(AVFormatContext *s1, int stream, + int64_t *dts, int64_t *wall) +{ + AlsaData *s = s1->priv_data; + snd_pcm_sframes_t delay = 0; + *wall = av_gettime(); + snd_pcm_delay(s->h, &delay); + *dts = s->timestamp - delay; +} + +static const AVClass alsa_muxer_class = { + .class_name = "ALSA muxer", + .item_name = av_default_item_name, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_AUDIO_OUTPUT, +}; + AVOutputFormat ff_alsa_muxer = { .name = "alsa", .long_name = NULL_IF_CONFIG_SMALL("ALSA audio output"), @@ -110,5 +158,8 @@ AVOutputFormat ff_alsa_muxer = { .write_header = audio_write_header, .write_packet = audio_write_packet, .write_trailer = ff_alsa_close, + .write_uncoded_frame = audio_write_frame, + .get_output_timestamp = audio_get_output_timestamp, .flags = AVFMT_NOFILE, + .priv_class = &alsa_muxer_class, }; diff --git a/libavdevice/alsa-audio.h b/libavdevice/alsa-audio.h index 26eaee6..583c911 100644 --- a/libavdevice/alsa-audio.h +++ b/libavdevice/alsa-audio.h @@ -3,20 +3,20 @@ * Copyright (c) 2007 Luca Abeni ( lucabe72 email it ) * Copyright (c) 2007 Benoit Fouet ( benoit fouet free fr ) * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -32,26 +32,32 @@ #include <alsa/asoundlib.h> #include "config.h" -#include "libavformat/avformat.h" #include "libavutil/log.h" +#include "timefilter.h" +#include "avdevice.h" /* XXX: we make the assumption that the soundcard accepts this format */ /* XXX: find better solution with "preinit" method, needed also in other formats */ #define DEFAULT_CODEC_ID AV_NE(AV_CODEC_ID_PCM_S16BE, AV_CODEC_ID_PCM_S16LE) -#define ALSA_BUFFER_SIZE_MAX 32768 +typedef void (*ff_reorder_func)(const void *, void *, int); + +#define ALSA_BUFFER_SIZE_MAX 65536 typedef struct AlsaData { AVClass *class; snd_pcm_t *h; - int frame_size; ///< preferred size for reads and writes - int period_size; ///< bytes per sample * channels + int frame_size; ///< bytes per sample * channels + int period_size; ///< preferred size for reads and writes, in frames int sample_rate; ///< sample rate set by user int channels; ///< number of channels set by user + int last_period; + TimeFilter *timefilter; void (*reorder_func)(const void *, void *, int); void *reorder_buf; int reorder_buf_size; ///< in frames + int64_t timestamp; ///< current timestamp, without latency applied. } AlsaData; /** diff --git a/libavdevice/avdevice.c b/libavdevice/avdevice.c index 5a5c762..6a75bd7 100644 --- a/libavdevice/avdevice.c +++ b/libavdevice/avdevice.c @@ -1,36 +1,229 @@ /* - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ +#include "libavutil/avassert.h" +#include "libavutil/samplefmt.h" +#include "libavutil/pixfmt.h" +#include "libavcodec/avcodec.h" #include "avdevice.h" #include "config.h" +#define E AV_OPT_FLAG_ENCODING_PARAM +#define D AV_OPT_FLAG_DECODING_PARAM +#define A AV_OPT_FLAG_AUDIO_PARAM +#define V AV_OPT_FLAG_VIDEO_PARAM +#define OFFSET(x) offsetof(AVDeviceCapabilitiesQuery, x) + +const AVOption av_device_capabilities[] = { + { "codec", "codec", OFFSET(codec), AV_OPT_TYPE_INT, + {.i64 = AV_CODEC_ID_NONE}, AV_CODEC_ID_NONE, INT_MAX, E|D|A|V }, + { "sample_format", "sample format", OFFSET(sample_format), AV_OPT_TYPE_INT, + {.i64 = AV_SAMPLE_FMT_NONE}, -1, INT_MAX, E|D|A }, + { "sample_rate", "sample rate", OFFSET(sample_rate), AV_OPT_TYPE_INT, + {.i64 = -1}, -1, INT_MAX, E|D|A }, + { "channels", "channels", OFFSET(channels), AV_OPT_TYPE_INT, + {.i64 = -1}, -1, INT_MAX, E|D|A }, + { "channel_layout", "channel layout", OFFSET(channel_layout), AV_OPT_TYPE_INT64, + {.i64 = -1}, -1, INT_MAX, E|D|A }, + { "pixel_format", "pixel format", OFFSET(pixel_format), AV_OPT_TYPE_INT, + {.i64 = AV_PIX_FMT_NONE}, -1, INT_MAX, E|D|V }, + { "window_size", "window size", OFFSET(window_width), AV_OPT_TYPE_IMAGE_SIZE, + {.str = NULL}, -1, INT_MAX, E|D|V }, + { "frame_size", "frame size", OFFSET(frame_width), AV_OPT_TYPE_IMAGE_SIZE, + {.str = NULL}, -1, INT_MAX, E|D|V }, + { "fps", "fps", OFFSET(fps), AV_OPT_TYPE_RATIONAL, + {.dbl = -1}, -1, INT_MAX, E|D|V }, + { NULL } +}; + +#undef E +#undef D +#undef A +#undef V +#undef OFFSET + unsigned avdevice_version(void) { + av_assert0(LIBAVDEVICE_VERSION_MICRO >= 100); return LIBAVDEVICE_VERSION_INT; } const char * avdevice_configuration(void) { - return LIBAV_CONFIGURATION; + return FFMPEG_CONFIGURATION; } const char * avdevice_license(void) { #define LICENSE_PREFIX "libavdevice license: " - return LICENSE_PREFIX LIBAV_LICENSE + sizeof(LICENSE_PREFIX) - 1; + return LICENSE_PREFIX FFMPEG_LICENSE + sizeof(LICENSE_PREFIX) - 1; +} + +static void *av_device_next(void *prev, int output, + AVClassCategory c1, AVClassCategory c2) +{ + const AVClass *pc; + AVClassCategory category = AV_CLASS_CATEGORY_NA; + do { + if (output) { + if (!(prev = av_oformat_next(prev))) + break; + pc = ((AVOutputFormat *)prev)->priv_class; + } else { + if (!(prev = av_iformat_next(prev))) + break; + pc = ((AVInputFormat *)prev)->priv_class; + } + if (!pc) + continue; + category = pc->category; + } while (category != c1 && category != c2); + return prev; +} + +AVInputFormat *av_input_audio_device_next(AVInputFormat *d) +{ + return av_device_next(d, 0, AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT, + AV_CLASS_CATEGORY_DEVICE_INPUT); +} + +AVInputFormat *av_input_video_device_next(AVInputFormat *d) +{ + return av_device_next(d, 0, AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, + AV_CLASS_CATEGORY_DEVICE_INPUT); +} + +AVOutputFormat *av_output_audio_device_next(AVOutputFormat *d) +{ + return av_device_next(d, 1, AV_CLASS_CATEGORY_DEVICE_AUDIO_OUTPUT, + AV_CLASS_CATEGORY_DEVICE_OUTPUT); +} + +AVOutputFormat *av_output_video_device_next(AVOutputFormat *d) +{ + return av_device_next(d, 1, AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT, + AV_CLASS_CATEGORY_DEVICE_OUTPUT); +} + +int avdevice_app_to_dev_control_message(struct AVFormatContext *s, enum AVAppToDevMessageType type, + void *data, size_t data_size) +{ + if (!s->oformat || !s->oformat->control_message) + return AVERROR(ENOSYS); + return s->oformat->control_message(s, type, data, data_size); +} + +int avdevice_dev_to_app_control_message(struct AVFormatContext *s, enum AVDevToAppMessageType type, + void *data, size_t data_size) +{ + if (!s->control_message_cb) + return AVERROR(ENOSYS); + return s->control_message_cb(s, type, data, data_size); +} + +int avdevice_capabilities_create(AVDeviceCapabilitiesQuery **caps, AVFormatContext *s, + AVDictionary **device_options) +{ + int ret; + av_assert0(s && caps); + av_assert0(s->iformat || s->oformat); + if ((s->oformat && !s->oformat->create_device_capabilities) || + (s->iformat && !s->iformat->create_device_capabilities)) + return AVERROR(ENOSYS); + *caps = av_mallocz(sizeof(**caps)); + if (!(*caps)) + return AVERROR(ENOMEM); + (*caps)->device_context = s; + if (((ret = av_opt_set_dict(s->priv_data, device_options)) < 0)) + goto fail; + if (s->iformat) { + if ((ret = s->iformat->create_device_capabilities(s, *caps)) < 0) + goto fail; + } else { + if ((ret = s->oformat->create_device_capabilities(s, *caps)) < 0) + goto fail; + } + av_opt_set_defaults(*caps); + return 0; + fail: + av_freep(caps); + return ret; +} + +void avdevice_capabilities_free(AVDeviceCapabilitiesQuery **caps, AVFormatContext *s) +{ + if (!s || !caps || !(*caps)) + return; + av_assert0(s->iformat || s->oformat); + if (s->iformat) { + if (s->iformat->free_device_capabilities) + s->iformat->free_device_capabilities(s, *caps); + } else { + if (s->oformat->free_device_capabilities) + s->oformat->free_device_capabilities(s, *caps); + } + av_freep(caps); +} + +int avdevice_list_devices(AVFormatContext *s, AVDeviceInfoList **device_list) +{ + int ret; + av_assert0(s); + av_assert0(device_list); + av_assert0(s->oformat || s->iformat); + if ((s->oformat && !s->oformat->get_device_list) || + (s->iformat && !s->iformat->get_device_list)) { + *device_list = NULL; + return AVERROR(ENOSYS); + } + *device_list = av_mallocz(sizeof(AVDeviceInfoList)); + if (!(*device_list)) + return AVERROR(ENOMEM); + /* no default device by default */ + (*device_list)->default_device = -1; + if (s->oformat) + ret = s->oformat->get_device_list(s, *device_list); + else + ret = s->iformat->get_device_list(s, *device_list); + if (ret < 0) + avdevice_free_list_devices(device_list); + return ret; +} + +void avdevice_free_list_devices(AVDeviceInfoList **device_list) +{ + AVDeviceInfoList *list; + AVDeviceInfo *dev; + int i; + + av_assert0(device_list); + list = *device_list; + if (!list) + return; + + for (i = 0; i < list->nb_devices; i++) { + dev = list->devices[i]; + if (dev) { + av_free(dev->device_name); + av_free(dev->device_description); + av_free(dev); + } + } + av_free(list->devices); + av_freep(device_list); } diff --git a/libavdevice/avdevice.h b/libavdevice/avdevice.h index 39166a5..a395228 100644 --- a/libavdevice/avdevice.h +++ b/libavdevice/avdevice.h @@ -1,18 +1,18 @@ /* - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -43,6 +43,11 @@ * @} */ +#include "libavutil/log.h" +#include "libavutil/opt.h" +#include "libavutil/dict.h" +#include "libavformat/avformat.h" + /** * Return the LIBAVDEVICE_VERSION_INT constant. */ @@ -64,4 +69,419 @@ const char *avdevice_license(void); */ void avdevice_register_all(void); +/** + * Audio input devices iterator. + * + * If d is NULL, returns the first registered input audio/video device, + * if d is non-NULL, returns the next registered input audio/video device after d + * or NULL if d is the last one. + */ +AVInputFormat *av_input_audio_device_next(AVInputFormat *d); + +/** + * Video input devices iterator. + * + * If d is NULL, returns the first registered input audio/video device, + * if d is non-NULL, returns the next registered input audio/video device after d + * or NULL if d is the last one. + */ +AVInputFormat *av_input_video_device_next(AVInputFormat *d); + +/** + * Audio output devices iterator. + * + * If d is NULL, returns the first registered output audio/video device, + * if d is non-NULL, returns the next registered output audio/video device after d + * or NULL if d is the last one. + */ +AVOutputFormat *av_output_audio_device_next(AVOutputFormat *d); + +/** + * Video output devices iterator. + * + * If d is NULL, returns the first registered output audio/video device, + * if d is non-NULL, returns the next registered output audio/video device after d + * or NULL if d is the last one. + */ +AVOutputFormat *av_output_video_device_next(AVOutputFormat *d); + +typedef struct AVDeviceRect { + int x; /**< x coordinate of top left corner */ + int y; /**< y coordinate of top left corner */ + int width; /**< width */ + int height; /**< height */ +} AVDeviceRect; + +/** + * Message types used by avdevice_app_to_dev_control_message(). + */ +enum AVAppToDevMessageType { + /** + * Dummy message. + */ + AV_APP_TO_DEV_NONE = MKBETAG('N','O','N','E'), + + /** + * Window size change message. + * + * Message is sent to the device every time the application changes the size + * of the window device renders to. + * Message should also be sent right after window is created. + * + * data: AVDeviceRect: new window size. + */ + AV_APP_TO_DEV_WINDOW_SIZE = MKBETAG('G','E','O','M'), + + /** + * Repaint request message. + * + * Message is sent to the device when window has to be repainted. + * + * data: AVDeviceRect: area required to be repainted. + * NULL: whole area is required to be repainted. + */ + AV_APP_TO_DEV_WINDOW_REPAINT = MKBETAG('R','E','P','A'), + + /** + * Request pause/play. + * + * Application requests pause/unpause playback. + * Mostly usable with devices that have internal buffer. + * By default devices are not paused. + * + * data: NULL + */ + AV_APP_TO_DEV_PAUSE = MKBETAG('P', 'A', 'U', ' '), + AV_APP_TO_DEV_PLAY = MKBETAG('P', 'L', 'A', 'Y'), + AV_APP_TO_DEV_TOGGLE_PAUSE = MKBETAG('P', 'A', 'U', 'T'), + + /** + * Volume control message. + * + * Set volume level. It may be device-dependent if volume + * is changed per stream or system wide. Per stream volume + * change is expected when possible. + * + * data: double: new volume with range of 0.0 - 1.0. + */ + AV_APP_TO_DEV_SET_VOLUME = MKBETAG('S', 'V', 'O', 'L'), + + /** + * Mute control messages. + * + * Change mute state. It may be device-dependent if mute status + * is changed per stream or system wide. Per stream mute status + * change is expected when possible. + * + * data: NULL. + */ + AV_APP_TO_DEV_MUTE = MKBETAG(' ', 'M', 'U', 'T'), + AV_APP_TO_DEV_UNMUTE = MKBETAG('U', 'M', 'U', 'T'), + AV_APP_TO_DEV_TOGGLE_MUTE = MKBETAG('T', 'M', 'U', 'T'), + + /** + * Get volume/mute messages. + * + * Force the device to send AV_DEV_TO_APP_VOLUME_LEVEL_CHANGED or + * AV_DEV_TO_APP_MUTE_STATE_CHANGED command respectively. + * + * data: NULL. + */ + AV_APP_TO_DEV_GET_VOLUME = MKBETAG('G', 'V', 'O', 'L'), + AV_APP_TO_DEV_GET_MUTE = MKBETAG('G', 'M', 'U', 'T'), +}; + +/** + * Message types used by avdevice_dev_to_app_control_message(). + */ +enum AVDevToAppMessageType { + /** + * Dummy message. + */ + AV_DEV_TO_APP_NONE = MKBETAG('N','O','N','E'), + + /** + * Create window buffer message. + * + * Device requests to create a window buffer. Exact meaning is device- + * and application-dependent. Message is sent before rendering first + * frame and all one-shot initializations should be done here. + * Application is allowed to ignore preferred window buffer size. + * + * @note: Application is obligated to inform about window buffer size + * with AV_APP_TO_DEV_WINDOW_SIZE message. + * + * data: AVDeviceRect: preferred size of the window buffer. + * NULL: no preferred size of the window buffer. + */ + AV_DEV_TO_APP_CREATE_WINDOW_BUFFER = MKBETAG('B','C','R','E'), + + /** + * Prepare window buffer message. + * + * Device requests to prepare a window buffer for rendering. + * Exact meaning is device- and application-dependent. + * Message is sent before rendering of each frame. + * + * data: NULL. + */ + AV_DEV_TO_APP_PREPARE_WINDOW_BUFFER = MKBETAG('B','P','R','E'), + + /** + * Display window buffer message. + * + * Device requests to display a window buffer. + * Message is sent when new frame is ready to be displayed. + * Usually buffers need to be swapped in handler of this message. + * + * data: NULL. + */ + AV_DEV_TO_APP_DISPLAY_WINDOW_BUFFER = MKBETAG('B','D','I','S'), + + /** + * Destroy window buffer message. + * + * Device requests to destroy a window buffer. + * Message is sent when device is about to be destroyed and window + * buffer is not required anymore. + * + * data: NULL. + */ + AV_DEV_TO_APP_DESTROY_WINDOW_BUFFER = MKBETAG('B','D','E','S'), + + /** + * Buffer fullness status messages. + * + * Device signals buffer overflow/underflow. + * + * data: NULL. + */ + AV_DEV_TO_APP_BUFFER_OVERFLOW = MKBETAG('B','O','F','L'), + AV_DEV_TO_APP_BUFFER_UNDERFLOW = MKBETAG('B','U','F','L'), + + /** + * Buffer readable/writable. + * + * Device informs that buffer is readable/writable. + * When possible, device informs how many bytes can be read/write. + * + * @warning Device may not inform when number of bytes than can be read/write changes. + * + * data: int64_t: amount of bytes available to read/write. + * NULL: amount of bytes available to read/write is not known. + */ + AV_DEV_TO_APP_BUFFER_READABLE = MKBETAG('B','R','D',' '), + AV_DEV_TO_APP_BUFFER_WRITABLE = MKBETAG('B','W','R',' '), + + /** + * Mute state change message. + * + * Device informs that mute state has changed. + * + * data: int: 0 for not muted state, non-zero for muted state. + */ + AV_DEV_TO_APP_MUTE_STATE_CHANGED = MKBETAG('C','M','U','T'), + + /** + * Volume level change message. + * + * Device informs that volume level has changed. + * + * data: double: new volume with range of 0.0 - 1.0. + */ + AV_DEV_TO_APP_VOLUME_LEVEL_CHANGED = MKBETAG('C','V','O','L'), +}; + +/** + * Send control message from application to device. + * + * @param s device context. + * @param type message type. + * @param data message data. Exact type depends on message type. + * @param data_size size of message data. + * @return >= 0 on success, negative on error. + * AVERROR(ENOSYS) when device doesn't implement handler of the message. + */ +int avdevice_app_to_dev_control_message(struct AVFormatContext *s, + enum AVAppToDevMessageType type, + void *data, size_t data_size); + +/** + * Send control message from device to application. + * + * @param s device context. + * @param type message type. + * @param data message data. Can be NULL. + * @param data_size size of message data. + * @return >= 0 on success, negative on error. + * AVERROR(ENOSYS) when application doesn't implement handler of the message. + */ +int avdevice_dev_to_app_control_message(struct AVFormatContext *s, + enum AVDevToAppMessageType type, + void *data, size_t data_size); + +/** + * Following API allows user to probe device capabilities (supported codecs, + * pixel formats, sample formats, resolutions, channel counts, etc). + * It is build on top op AVOption API. + * Queried capabilities allows to set up converters of video or audio + * parameters that fit to the device. + * + * List of capabilities that can be queried: + * - Capabilities valid for both audio and video devices: + * - codec: supported audio/video codecs. + * type: AV_OPT_TYPE_INT (AVCodecID value) + * - Capabilities valid for audio devices: + * - sample_format: supported sample formats. + * type: AV_OPT_TYPE_INT (AVSampleFormat value) + * - sample_rate: supported sample rates. + * type: AV_OPT_TYPE_INT + * - channels: supported number of channels. + * type: AV_OPT_TYPE_INT + * - channel_layout: supported channel layouts. + * type: AV_OPT_TYPE_INT64 + * - Capabilities valid for video devices: + * - pixel_format: supported pixel formats. + * type: AV_OPT_TYPE_INT (AVPixelFormat value) + * - window_size: supported window sizes (describes size of the window size presented to the user). + * type: AV_OPT_TYPE_IMAGE_SIZE + * - frame_size: supported frame sizes (describes size of provided video frames). + * type: AV_OPT_TYPE_IMAGE_SIZE + * - fps: supported fps values + * type: AV_OPT_TYPE_RATIONAL + * + * Value of the capability may be set by user using av_opt_set() function + * and AVDeviceCapabilitiesQuery object. Following queries will + * limit results to the values matching already set capabilities. + * For example, setting a codec may impact number of formats or fps values + * returned during next query. Setting invalid value may limit results to zero. + * + * Example of the usage basing on opengl output device: + * + * @code + * AVFormatContext *oc = NULL; + * AVDeviceCapabilitiesQuery *caps = NULL; + * AVOptionRanges *ranges; + * int ret; + * + * if ((ret = avformat_alloc_output_context2(&oc, NULL, "opengl", NULL)) < 0) + * goto fail; + * if (avdevice_capabilities_create(&caps, oc, NULL) < 0) + * goto fail; + * + * //query codecs + * if (av_opt_query_ranges(&ranges, caps, "codec", AV_OPT_MULTI_COMPONENT_RANGE)) < 0) + * goto fail; + * //pick codec here and set it + * av_opt_set(caps, "codec", AV_CODEC_ID_RAWVIDEO, 0); + * + * //query format + * if (av_opt_query_ranges(&ranges, caps, "pixel_format", AV_OPT_MULTI_COMPONENT_RANGE)) < 0) + * goto fail; + * //pick format here and set it + * av_opt_set(caps, "pixel_format", AV_PIX_FMT_YUV420P, 0); + * + * //query and set more capabilities + * + * fail: + * //clean up code + * avdevice_capabilities_free(&query, oc); + * avformat_free_context(oc); + * @endcode + */ + +/** + * Structure describes device capabilities. + * + * It is used by devices in conjunction with av_device_capabilities AVOption table + * to implement capabilities probing API based on AVOption API. Should not be used directly. + */ +typedef struct AVDeviceCapabilitiesQuery { + const AVClass *av_class; + AVFormatContext *device_context; + enum AVCodecID codec; + enum AVSampleFormat sample_format; + enum AVPixelFormat pixel_format; + int sample_rate; + int channels; + int64_t channel_layout; + int window_width; + int window_height; + int frame_width; + int frame_height; + AVRational fps; +} AVDeviceCapabilitiesQuery; + +/** + * AVOption table used by devices to implement device capabilities API. Should not be used by a user. + */ +extern const AVOption av_device_capabilities[]; + +/** + * Initialize capabilities probing API based on AVOption API. + * + * avdevice_capabilities_free() must be called when query capabilities API is + * not used anymore. + * + * @param[out] caps Device capabilities data. Pointer to a NULL pointer must be passed. + * @param s Context of the device. + * @param device_options An AVDictionary filled with device-private options. + * On return this parameter will be destroyed and replaced with a dict + * containing options that were not found. May be NULL. + * The same options must be passed later to avformat_write_header() for output + * devices or avformat_open_input() for input devices, or at any other place + * that affects device-private options. + * + * @return >= 0 on success, negative otherwise. + */ +int avdevice_capabilities_create(AVDeviceCapabilitiesQuery **caps, AVFormatContext *s, + AVDictionary **device_options); + +/** + * Free resources created by avdevice_capabilities_create() + * + * @param caps Device capabilities data to be freed. + * @param s Context of the device. + */ +void avdevice_capabilities_free(AVDeviceCapabilitiesQuery **caps, AVFormatContext *s); + +/** + * Structure describes basic parameters of the device. + */ +typedef struct AVDeviceInfo { + char *device_name; /**< device name, format depends on device */ + char *device_description; /**< human friendly name */ +} AVDeviceInfo; + +/** + * List of devices. + */ +typedef struct AVDeviceInfoList { + AVDeviceInfo **devices; /**< list of autodetected devices */ + int nb_devices; /**< number of autodetected devices */ + int default_device; /**< index of default device or -1 if no default */ +} AVDeviceInfoList; + +/** + * List devices. + * + * Returns available device names and their parameters. + * + * @note: Some devices may accept system-dependent device names that cannot be + * autodetected. The list returned by this function cannot be assumed to + * be always completed. + * + * @param s device context. + * @param[out] device_list list of autodetected devices. + * @return count of autodetected devices, negative on error. + */ +int avdevice_list_devices(struct AVFormatContext *s, AVDeviceInfoList **device_list); + +/** + * Convenient function to free result of avdevice_list_devices(). + * + * @param devices device list to be freed. + */ +void avdevice_free_list_devices(AVDeviceInfoList **device_list); + #endif /* AVDEVICE_AVDEVICE_H */ diff --git a/libavdevice/avdeviceres.rc b/libavdevice/avdeviceres.rc new file mode 100644 index 0000000..e13e73d --- /dev/null +++ b/libavdevice/avdeviceres.rc @@ -0,0 +1,55 @@ +/* + * Windows resource file for libavdevice + * + * Copyright (C) 2012 James Almer + * Copyright (C) 2013 Tiancheng "Timothy" Gu + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include <windows.h> +#include "libavdevice/version.h" +#include "libavutil/ffversion.h" +#include "config.h" + +1 VERSIONINFO +FILEVERSION LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO, 0 +PRODUCTVERSION LIBAVDEVICE_VERSION_MAJOR, LIBAVDEVICE_VERSION_MINOR, LIBAVDEVICE_VERSION_MICRO, 0 +FILEFLAGSMASK VS_FFI_FILEFLAGSMASK +FILEOS VOS_NT_WINDOWS32 +FILETYPE VFT_DLL +{ + BLOCK "StringFileInfo" + { + BLOCK "040904B0" + { + VALUE "CompanyName", "FFmpeg Project" + VALUE "FileDescription", "FFmpeg device handling library" + VALUE "FileVersion", AV_STRINGIFY(LIBAVDEVICE_VERSION) + VALUE "InternalName", "libavdevice" + VALUE "LegalCopyright", "Copyright (C) 2000-" AV_STRINGIFY(CONFIG_THIS_YEAR) " FFmpeg Project" + VALUE "OriginalFilename", "avdevice" BUILDSUF "-" AV_STRINGIFY(LIBAVDEVICE_VERSION_MAJOR) SLIBSUF + VALUE "ProductName", "FFmpeg" + VALUE "ProductVersion", FFMPEG_VERSION + } + } + + BLOCK "VarFileInfo" + { + VALUE "Translation", 0x0409, 0x04B0 + } +} diff --git a/libavdevice/avfoundation.m b/libavdevice/avfoundation.m new file mode 100644 index 0000000..8c00a0e --- /dev/null +++ b/libavdevice/avfoundation.m @@ -0,0 +1,801 @@ +/* + * AVFoundation input device + * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de> + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * AVFoundation input device + * @author Thilo Borgmann <thilo.borgmann@mail.de> + */ + +#import <AVFoundation/AVFoundation.h> +#include <pthread.h> + +#include "libavutil/pixdesc.h" +#include "libavutil/opt.h" +#include "libavformat/internal.h" +#include "libavutil/internal.h" +#include "libavutil/time.h" +#include "avdevice.h" + +static const int avf_time_base = 1000000; + +static const AVRational avf_time_base_q = { + .num = 1, + .den = avf_time_base +}; + +struct AVFPixelFormatSpec { + enum AVPixelFormat ff_id; + OSType avf_id; +}; + +static const struct AVFPixelFormatSpec avf_pixel_formats[] = { + { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome }, + { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 }, + { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 }, + { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 }, + { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 }, + { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB }, + { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR }, + { AV_PIX_FMT_0RGB, kCVPixelFormatType_32ARGB }, + { AV_PIX_FMT_BGR0, kCVPixelFormatType_32BGRA }, + { AV_PIX_FMT_0BGR, kCVPixelFormatType_32ABGR }, + { AV_PIX_FMT_RGB0, kCVPixelFormatType_32RGBA }, + { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB }, + { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 }, + { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R }, + { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 }, + { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 }, + { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 }, + { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 }, + { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 }, + { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar }, + { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange }, + { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs }, +#if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080 + { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 }, +#endif + { AV_PIX_FMT_NONE, 0 } +}; + +typedef struct +{ + AVClass* class; + + float frame_rate; + int frames_captured; + int audio_frames_captured; + int64_t first_pts; + int64_t first_audio_pts; + pthread_mutex_t frame_lock; + pthread_cond_t frame_wait_cond; + id avf_delegate; + id avf_audio_delegate; + + int list_devices; + int video_device_index; + int video_stream_index; + int audio_device_index; + int audio_stream_index; + + char *video_filename; + char *audio_filename; + + int audio_channels; + int audio_bits_per_sample; + int audio_float; + int audio_be; + int audio_signed_integer; + int audio_packed; + int audio_non_interleaved; + + int32_t *audio_buffer; + int audio_buffer_size; + + enum AVPixelFormat pixel_format; + + AVCaptureSession *capture_session; + AVCaptureVideoDataOutput *video_output; + AVCaptureAudioDataOutput *audio_output; + CMSampleBufferRef current_frame; + CMSampleBufferRef current_audio_frame; +} AVFContext; + +static void lock_frames(AVFContext* ctx) +{ + pthread_mutex_lock(&ctx->frame_lock); +} + +static void unlock_frames(AVFContext* ctx) +{ + pthread_mutex_unlock(&ctx->frame_lock); +} + +/** FrameReciever class - delegate for AVCaptureSession + */ +@interface AVFFrameReceiver : NSObject +{ + AVFContext* _context; +} + +- (id)initWithContext:(AVFContext*)context; + +- (void) captureOutput:(AVCaptureOutput *)captureOutput + didOutputSampleBuffer:(CMSampleBufferRef)videoFrame + fromConnection:(AVCaptureConnection *)connection; + +@end + +@implementation AVFFrameReceiver + +- (id)initWithContext:(AVFContext*)context +{ + if (self = [super init]) { + _context = context; + } + return self; +} + +- (void) captureOutput:(AVCaptureOutput *)captureOutput + didOutputSampleBuffer:(CMSampleBufferRef)videoFrame + fromConnection:(AVCaptureConnection *)connection +{ + lock_frames(_context); + + if (_context->current_frame != nil) { + CFRelease(_context->current_frame); + } + + _context->current_frame = (CMSampleBufferRef)CFRetain(videoFrame); + + pthread_cond_signal(&_context->frame_wait_cond); + + unlock_frames(_context); + + ++_context->frames_captured; +} + +@end + +/** AudioReciever class - delegate for AVCaptureSession + */ +@interface AVFAudioReceiver : NSObject +{ + AVFContext* _context; +} + +- (id)initWithContext:(AVFContext*)context; + +- (void) captureOutput:(AVCaptureOutput *)captureOutput + didOutputSampleBuffer:(CMSampleBufferRef)audioFrame + fromConnection:(AVCaptureConnection *)connection; + +@end + +@implementation AVFAudioReceiver + +- (id)initWithContext:(AVFContext*)context +{ + if (self = [super init]) { + _context = context; + } + return self; +} + +- (void) captureOutput:(AVCaptureOutput *)captureOutput + didOutputSampleBuffer:(CMSampleBufferRef)audioFrame + fromConnection:(AVCaptureConnection *)connection +{ + lock_frames(_context); + + if (_context->current_audio_frame != nil) { + CFRelease(_context->current_audio_frame); + } + + _context->current_audio_frame = (CMSampleBufferRef)CFRetain(audioFrame); + + pthread_cond_signal(&_context->frame_wait_cond); + + unlock_frames(_context); + + ++_context->audio_frames_captured; +} + +@end + +static void destroy_context(AVFContext* ctx) +{ + [ctx->capture_session stopRunning]; + + [ctx->capture_session release]; + [ctx->video_output release]; + [ctx->audio_output release]; + [ctx->avf_delegate release]; + [ctx->avf_audio_delegate release]; + + ctx->capture_session = NULL; + ctx->video_output = NULL; + ctx->audio_output = NULL; + ctx->avf_delegate = NULL; + ctx->avf_audio_delegate = NULL; + + av_freep(&ctx->audio_buffer); + + pthread_mutex_destroy(&ctx->frame_lock); + pthread_cond_destroy(&ctx->frame_wait_cond); + + if (ctx->current_frame) { + CFRelease(ctx->current_frame); + } +} + +static void parse_device_name(AVFormatContext *s) +{ + AVFContext *ctx = (AVFContext*)s->priv_data; + char *tmp = av_strdup(s->filename); + + if (tmp[0] != ':') { + ctx->video_filename = strtok(tmp, ":"); + ctx->audio_filename = strtok(NULL, ":"); + } else { + ctx->audio_filename = strtok(tmp, ":"); + } +} + +static int add_video_device(AVFormatContext *s, AVCaptureDevice *video_device) +{ + AVFContext *ctx = (AVFContext*)s->priv_data; + NSError *error = nil; + AVCaptureDeviceInput* capture_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:video_device error:&error] autorelease]; + + if (!capture_dev_input) { + av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n", + [[error localizedDescription] UTF8String]); + return 1; + } + + if ([ctx->capture_session canAddInput:capture_dev_input]) { + [ctx->capture_session addInput:capture_dev_input]; + } else { + av_log(s, AV_LOG_ERROR, "can't add video input to capture session\n"); + return 1; + } + + // Attaching output + ctx->video_output = [[AVCaptureVideoDataOutput alloc] init]; + + if (!ctx->video_output) { + av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n"); + return 1; + } + + // select pixel format + struct AVFPixelFormatSpec pxl_fmt_spec; + pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE; + + for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) { + if (ctx->pixel_format == avf_pixel_formats[i].ff_id) { + pxl_fmt_spec = avf_pixel_formats[i]; + break; + } + } + + // check if selected pixel format is supported by AVFoundation + if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) { + av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by AVFoundation.\n", + av_get_pix_fmt_name(pxl_fmt_spec.ff_id)); + return 1; + } + + // check if the pixel format is available for this device + if ([[ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) { + av_log(s, AV_LOG_ERROR, "Selected pixel format (%s) is not supported by the input device.\n", + av_get_pix_fmt_name(pxl_fmt_spec.ff_id)); + + pxl_fmt_spec.ff_id = AV_PIX_FMT_NONE; + + av_log(s, AV_LOG_ERROR, "Supported pixel formats:\n"); + for (NSNumber *pxl_fmt in [ctx->video_output availableVideoCVPixelFormatTypes]) { + struct AVFPixelFormatSpec pxl_fmt_dummy; + pxl_fmt_dummy.ff_id = AV_PIX_FMT_NONE; + for (int i = 0; avf_pixel_formats[i].ff_id != AV_PIX_FMT_NONE; i++) { + if ([pxl_fmt intValue] == avf_pixel_formats[i].avf_id) { + pxl_fmt_dummy = avf_pixel_formats[i]; + break; + } + } + + if (pxl_fmt_dummy.ff_id != AV_PIX_FMT_NONE) { + av_log(s, AV_LOG_ERROR, " %s\n", av_get_pix_fmt_name(pxl_fmt_dummy.ff_id)); + + // select first supported pixel format instead of user selected (or default) pixel format + if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) { + pxl_fmt_spec = pxl_fmt_dummy; + } + } + } + + // fail if there is no appropriate pixel format or print a warning about overriding the pixel format + if (pxl_fmt_spec.ff_id == AV_PIX_FMT_NONE) { + return 1; + } else { + av_log(s, AV_LOG_WARNING, "Overriding selected pixel format to use %s instead.\n", + av_get_pix_fmt_name(pxl_fmt_spec.ff_id)); + } + } + + ctx->pixel_format = pxl_fmt_spec.ff_id; + NSNumber *pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id]; + NSDictionary *capture_dict = [NSDictionary dictionaryWithObject:pixel_format + forKey:(id)kCVPixelBufferPixelFormatTypeKey]; + + [ctx->video_output setVideoSettings:capture_dict]; + [ctx->video_output setAlwaysDiscardsLateVideoFrames:YES]; + + ctx->avf_delegate = [[AVFFrameReceiver alloc] initWithContext:ctx]; + + dispatch_queue_t queue = dispatch_queue_create("avf_queue", NULL); + [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue]; + dispatch_release(queue); + + if ([ctx->capture_session canAddOutput:ctx->video_output]) { + [ctx->capture_session addOutput:ctx->video_output]; + } else { + av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n"); + return 1; + } + + return 0; +} + +static int add_audio_device(AVFormatContext *s, AVCaptureDevice *audio_device) +{ + AVFContext *ctx = (AVFContext*)s->priv_data; + NSError *error = nil; + AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device error:&error] autorelease]; + + if (!audio_dev_input) { + av_log(s, AV_LOG_ERROR, "Failed to create AV capture input device: %s\n", + [[error localizedDescription] UTF8String]); + return 1; + } + + if ([ctx->capture_session canAddInput:audio_dev_input]) { + [ctx->capture_session addInput:audio_dev_input]; + } else { + av_log(s, AV_LOG_ERROR, "can't add audio input to capture session\n"); + return 1; + } + + // Attaching output + ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init]; + + if (!ctx->audio_output) { + av_log(s, AV_LOG_ERROR, "Failed to init AV audio output\n"); + return 1; + } + + ctx->avf_audio_delegate = [[AVFAudioReceiver alloc] initWithContext:ctx]; + + dispatch_queue_t queue = dispatch_queue_create("avf_audio_queue", NULL); + [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue]; + dispatch_release(queue); + + if ([ctx->capture_session canAddOutput:ctx->audio_output]) { + [ctx->capture_session addOutput:ctx->audio_output]; + } else { + av_log(s, AV_LOG_ERROR, "adding audio output to capture session failed\n"); + return 1; + } + + return 0; +} + +static int get_video_config(AVFormatContext *s) +{ + AVFContext *ctx = (AVFContext*)s->priv_data; + + // Take stream info from the first frame. + while (ctx->frames_captured < 1) { + CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES); + } + + lock_frames(ctx); + + AVStream* stream = avformat_new_stream(s, NULL); + + if (!stream) { + return 1; + } + + ctx->video_stream_index = stream->index; + + avpriv_set_pts_info(stream, 64, 1, avf_time_base); + + CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame); + CGSize image_buffer_size = CVImageBufferGetEncodedSize(image_buffer); + + stream->codec->codec_id = AV_CODEC_ID_RAWVIDEO; + stream->codec->codec_type = AVMEDIA_TYPE_VIDEO; + stream->codec->width = (int)image_buffer_size.width; + stream->codec->height = (int)image_buffer_size.height; + stream->codec->pix_fmt = ctx->pixel_format; + + CFRelease(ctx->current_frame); + ctx->current_frame = nil; + + unlock_frames(ctx); + + return 0; +} + +static int get_audio_config(AVFormatContext *s) +{ + AVFContext *ctx = (AVFContext*)s->priv_data; + + // Take stream info from the first frame. + while (ctx->audio_frames_captured < 1) { + CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES); + } + + lock_frames(ctx); + + AVStream* stream = avformat_new_stream(s, NULL); + + if (!stream) { + return 1; + } + + ctx->audio_stream_index = stream->index; + + avpriv_set_pts_info(stream, 64, 1, avf_time_base); + + CMFormatDescriptionRef format_desc = CMSampleBufferGetFormatDescription(ctx->current_audio_frame); + const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc); + + if (!basic_desc) { + av_log(s, AV_LOG_ERROR, "audio format not available\n"); + return 1; + } + + stream->codec->codec_type = AVMEDIA_TYPE_AUDIO; + stream->codec->sample_rate = basic_desc->mSampleRate; + stream->codec->channels = basic_desc->mChannelsPerFrame; + stream->codec->channel_layout = av_get_default_channel_layout(stream->codec->channels); + + ctx->audio_channels = basic_desc->mChannelsPerFrame; + ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel; + ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat; + ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian; + ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger; + ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked; + ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved; + + if (basic_desc->mFormatID == kAudioFormatLinearPCM && + ctx->audio_float && + ctx->audio_packed) { + stream->codec->codec_id = ctx->audio_be ? AV_CODEC_ID_PCM_F32BE : AV_CODEC_ID_PCM_F32LE; + } else { + av_log(s, AV_LOG_ERROR, "audio format is not supported\n"); + return 1; + } + + if (ctx->audio_non_interleaved) { + CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame); + ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer); + ctx->audio_buffer = av_malloc(ctx->audio_buffer_size); + if (!ctx->audio_buffer) { + av_log(s, AV_LOG_ERROR, "error allocating audio buffer\n"); + return 1; + } + } + + CFRelease(ctx->current_audio_frame); + ctx->current_audio_frame = nil; + + unlock_frames(ctx); + + return 0; +} + +static int avf_read_header(AVFormatContext *s) +{ + NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; + AVFContext *ctx = (AVFContext*)s->priv_data; + ctx->first_pts = av_gettime(); + ctx->first_audio_pts = av_gettime(); + + pthread_mutex_init(&ctx->frame_lock, NULL); + pthread_cond_init(&ctx->frame_wait_cond, NULL); + + // List devices if requested + if (ctx->list_devices) { + av_log(ctx, AV_LOG_INFO, "AVFoundation video devices:\n"); + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + for (AVCaptureDevice *device in devices) { + const char *name = [[device localizedName] UTF8String]; + int index = [devices indexOfObject:device]; + av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name); + } + av_log(ctx, AV_LOG_INFO, "AVFoundation audio devices:\n"); + devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; + for (AVCaptureDevice *device in devices) { + const char *name = [[device localizedName] UTF8String]; + int index = [devices indexOfObject:device]; + av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name); + } + goto fail; + } + + // Find capture device + AVCaptureDevice *video_device = nil; + AVCaptureDevice *audio_device = nil; + + // parse input filename for video and audio device + parse_device_name(s); + + // check for device index given in filename + if (ctx->video_device_index == -1 && ctx->video_filename) { + sscanf(ctx->video_filename, "%d", &ctx->video_device_index); + } + if (ctx->audio_device_index == -1 && ctx->audio_filename) { + sscanf(ctx->audio_filename, "%d", &ctx->audio_device_index); + } + + if (ctx->video_device_index >= 0) { + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + + if (ctx->video_device_index >= [devices count]) { + av_log(ctx, AV_LOG_ERROR, "Invalid device index\n"); + goto fail; + } + + video_device = [devices objectAtIndex:ctx->video_device_index]; + } else if (ctx->video_filename && + strncmp(ctx->video_filename, "default", 7)) { + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + + for (AVCaptureDevice *device in devices) { + if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) { + video_device = device; + break; + } + } + + if (!video_device) { + av_log(ctx, AV_LOG_ERROR, "Video device not found\n"); + goto fail; + } + } else { + video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + + // get audio device + if (ctx->audio_device_index >= 0) { + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; + + if (ctx->audio_device_index >= [devices count]) { + av_log(ctx, AV_LOG_ERROR, "Invalid audio device index\n"); + goto fail; + } + + audio_device = [devices objectAtIndex:ctx->audio_device_index]; + } else if (ctx->audio_filename && + strncmp(ctx->audio_filename, "default", 7)) { + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; + + for (AVCaptureDevice *device in devices) { + if (!strncmp(ctx->audio_filename, [[device localizedName] UTF8String], strlen(ctx->audio_filename))) { + audio_device = device; + break; + } + } + + if (!audio_device) { + av_log(ctx, AV_LOG_ERROR, "Audio device not found\n"); + goto fail; + } + } else { + audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; + } + + // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio + if (!video_device && !audio_device) { + av_log(s, AV_LOG_ERROR, "No AV capture device found\n"); + goto fail; + } + + if (video_device) { + av_log(s, AV_LOG_DEBUG, "'%s' opened\n", [[video_device localizedName] UTF8String]); + } + if (audio_device) { + av_log(s, AV_LOG_DEBUG, "audio device '%s' opened\n", [[audio_device localizedName] UTF8String]); + } + + // Initialize capture session + ctx->capture_session = [[AVCaptureSession alloc] init]; + + if (video_device && add_video_device(s, video_device)) { + goto fail; + } + if (audio_device && add_audio_device(s, audio_device)) { + } + + [ctx->capture_session startRunning]; + + if (video_device && get_video_config(s)) { + goto fail; + } + + // set audio stream + if (audio_device && get_audio_config(s)) { + goto fail; + } + + [pool release]; + return 0; + +fail: + [pool release]; + destroy_context(ctx); + return AVERROR(EIO); +} + +static int avf_read_packet(AVFormatContext *s, AVPacket *pkt) +{ + AVFContext* ctx = (AVFContext*)s->priv_data; + + do { + lock_frames(ctx); + + CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame); + + if (ctx->current_frame != nil) { + if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(image_buffer)) < 0) { + return AVERROR(EIO); + } + + pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts, + AV_TIME_BASE_Q, + avf_time_base_q); + pkt->stream_index = ctx->video_stream_index; + pkt->flags |= AV_PKT_FLAG_KEY; + + CVPixelBufferLockBaseAddress(image_buffer, 0); + + void* data = CVPixelBufferGetBaseAddress(image_buffer); + memcpy(pkt->data, data, pkt->size); + + CVPixelBufferUnlockBaseAddress(image_buffer, 0); + CFRelease(ctx->current_frame); + ctx->current_frame = nil; + } else if (ctx->current_audio_frame != nil) { + CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(ctx->current_audio_frame); + int block_buffer_size = CMBlockBufferGetDataLength(block_buffer); + + if (!block_buffer || !block_buffer_size) { + return AVERROR(EIO); + } + + if (ctx->audio_non_interleaved && block_buffer_size > ctx->audio_buffer_size) { + return AVERROR_BUFFER_TOO_SMALL; + } + + if (av_new_packet(pkt, block_buffer_size) < 0) { + return AVERROR(EIO); + } + + pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_audio_pts, + AV_TIME_BASE_Q, + avf_time_base_q); + + pkt->stream_index = ctx->audio_stream_index; + pkt->flags |= AV_PKT_FLAG_KEY; + + if (ctx->audio_non_interleaved) { + int sample, c, shift; + + OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, ctx->audio_buffer); + if (ret != kCMBlockBufferNoErr) { + return AVERROR(EIO); + } + + int num_samples = pkt->size / (ctx->audio_channels * (ctx->audio_bits_per_sample >> 3)); + + // transform decoded frame into output format + #define INTERLEAVE_OUTPUT(bps) \ + { \ + int##bps##_t **src; \ + int##bps##_t *dest; \ + src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \ + if (!src) return AVERROR(EIO); \ + for (c = 0; c < ctx->audio_channels; c++) { \ + src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \ + } \ + dest = (int##bps##_t*)pkt->data; \ + shift = bps - ctx->audio_bits_per_sample; \ + for (sample = 0; sample < num_samples; sample++) \ + for (c = 0; c < ctx->audio_channels; c++) \ + *dest++ = src[c][sample] << shift; \ + av_freep(&src); \ + } + + if (ctx->audio_bits_per_sample <= 16) { + INTERLEAVE_OUTPUT(16) + } else { + INTERLEAVE_OUTPUT(32) + } + } else { + OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data); + if (ret != kCMBlockBufferNoErr) { + return AVERROR(EIO); + } + } + + CFRelease(ctx->current_audio_frame); + ctx->current_audio_frame = nil; + } else { + pkt->data = NULL; + pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock); + } + + unlock_frames(ctx); + } while (!pkt->data); + + return 0; +} + +static int avf_close(AVFormatContext *s) +{ + AVFContext* ctx = (AVFContext*)s->priv_data; + destroy_context(ctx); + return 0; +} + +static const AVOption options[] = { + { "frame_rate", "set frame rate", offsetof(AVFContext, frame_rate), AV_OPT_TYPE_FLOAT, { .dbl = 30.0 }, 0.1, 30.0, AV_OPT_TYPE_VIDEO_RATE, NULL }, + { "list_devices", "list available devices", offsetof(AVFContext, list_devices), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM, "list_devices" }, + { "true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" }, + { "false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" }, + { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(AVFContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM }, + { "audio_device_index", "select audio device by index for devices with same name (starts at 0)", offsetof(AVFContext, audio_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM }, + { "pixel_format", "set pixel format", offsetof(AVFContext, pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_YUV420P}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM}, + { NULL }, +}; + +static const AVClass avf_class = { + .class_name = "AVFoundation input device", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, +}; + +AVInputFormat ff_avfoundation_demuxer = { + .name = "avfoundation", + .long_name = NULL_IF_CONFIG_SMALL("AVFoundation input device"), + .priv_data_size = sizeof(AVFContext), + .read_header = avf_read_header, + .read_packet = avf_read_packet, + .read_close = avf_close, + .flags = AVFMT_NOFILE, + .priv_class = &avf_class, +}; diff --git a/libavdevice/bktr.c b/libavdevice/bktr.c index 695e13f..6c5a834 100644 --- a/libavdevice/bktr.c +++ b/libavdevice/bktr.c @@ -3,28 +3,27 @@ * Copyright (c) 2002 Steve O'Hara-Smith * based on * Linux video grab interface - * Copyright (c) 2000,2001 Gerard Lantau + * Copyright (c) 2000, 2001 Fabrice Bellard * and * simple_grab.c Copyright (c) 1999 Roger Hardiman * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -#include "libavformat/avformat.h" #include "libavformat/internal.h" #include "libavutil/internal.h" #include "libavutil/log.h" @@ -50,6 +49,7 @@ #include <sys/time.h> #include <signal.h> #include <stdint.h> +#include "avdevice.h" typedef struct VideoData { AVClass *class; @@ -58,7 +58,6 @@ typedef struct VideoData { int width, height; uint64_t per_frame; int standard; - char *video_size; /**< String describing video size, set by a private option. */ char *framerate; /**< Set by a private option. */ } VideoData; @@ -81,7 +80,7 @@ typedef struct VideoData { #define VIDEO_FORMAT NTSC #endif -static int bktr_dev[] = { METEOR_DEV0, METEOR_DEV1, METEOR_DEV2, +static const int bktr_dev[] = { METEOR_DEV0, METEOR_DEV1, METEOR_DEV2, METEOR_DEV3, METEOR_DEV_SVIDEO }; uint8_t *video_buf; @@ -104,7 +103,7 @@ static av_cold int bktr_init(const char *video_device, int width, int height, long ioctl_frequency; char *arg; int c; - struct sigaction act = { 0 }, old; + struct sigaction act = { {0} }, old; if (idev < 0 || idev > 4) { @@ -248,15 +247,9 @@ static int grab_read_header(AVFormatContext *s1) { VideoData *s = s1->priv_data; AVStream *st; - int width, height; AVRational framerate; int ret = 0; - if ((ret = av_parse_video_size(&width, &height, s->video_size)) < 0) { - av_log(s1, AV_LOG_ERROR, "Could not parse video size '%s'.\n", s->video_size); - goto out; - } - if (!s->framerate) switch (s->standard) { case PAL: s->framerate = av_strdup("pal"); break; @@ -279,20 +272,18 @@ static int grab_read_header(AVFormatContext *s1) } avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in use */ - s->width = width; - s->height = height; s->per_frame = ((uint64_t)1000000 * framerate.den) / framerate.num; st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->pix_fmt = AV_PIX_FMT_YUV420P; st->codec->codec_id = AV_CODEC_ID_RAWVIDEO; - st->codec->width = width; - st->codec->height = height; + st->codec->width = s->width; + st->codec->height = s->height; st->codec->time_base.den = framerate.num; st->codec->time_base.num = framerate.den; - if (bktr_init(s1->filename, width, height, s->standard, + if (bktr_init(s1->filename, s->width, s->height, s->standard, &s->video_fd, &s->tuner_fd, -1, 0.0) < 0) { ret = AVERROR(EIO); goto out; @@ -333,7 +324,7 @@ static const AVOption options[] = { { "PALN", "", 0, AV_OPT_TYPE_CONST, {.i64 = PALN}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "standard" }, { "PALM", "", 0, AV_OPT_TYPE_CONST, {.i64 = PALM}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "standard" }, { "NTSCJ", "", 0, AV_OPT_TYPE_CONST, {.i64 = NTSCJ}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "standard" }, - { "video_size", "A string describing frame size, such as 640x480 or hd720.", OFFSET(video_size), AV_OPT_TYPE_STRING, {.str = "vga"}, 0, 0, DEC }, + { "video_size", "A string describing frame size, such as 640x480 or hd720.", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, {.str = "vga"}, 0, 0, DEC }, { "framerate", "", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC }, { NULL }, }; @@ -343,6 +334,7 @@ static const AVClass bktr_class = { .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, }; AVInputFormat ff_bktr_demuxer = { diff --git a/libavdevice/caca.c b/libavdevice/caca.c new file mode 100644 index 0000000..a118064 --- /dev/null +++ b/libavdevice/caca.c @@ -0,0 +1,241 @@ +/* + * Copyright (c) 2012 Paul B Mahol + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include <caca.h> +#include "libavutil/opt.h" +#include "libavutil/pixdesc.h" +#include "avdevice.h" + +typedef struct CACAContext { + AVClass *class; + AVFormatContext *ctx; + char *window_title; + int window_width, window_height; + + caca_canvas_t *canvas; + caca_display_t *display; + caca_dither_t *dither; + + char *algorithm, *antialias; + char *charset, *color; + char *driver; + + char *list_dither; + int list_drivers; +} CACAContext; + +static int caca_write_trailer(AVFormatContext *s) +{ + CACAContext *c = s->priv_data; + + av_freep(&c->window_title); + + if (c->display) { + caca_free_display(c->display); + c->display = NULL; + } + if (c->dither) { + caca_free_dither(c->dither); + c->dither = NULL; + } + if (c->canvas) { + caca_free_canvas(c->canvas); + c->canvas = NULL; + } + return 0; +} + +static void list_drivers(CACAContext *c) +{ + const char *const *drivers = caca_get_display_driver_list(); + int i; + + av_log(c->ctx, AV_LOG_INFO, "Available drivers:\n"); + for (i = 0; drivers[i]; i += 2) + av_log(c->ctx, AV_LOG_INFO, "%s : %s\n", drivers[i], drivers[i + 1]); +} + +#define DEFINE_LIST_DITHER(thing, thing_str) \ +static void list_dither_## thing(CACAContext *c) \ +{ \ + const char *const *thing = caca_get_dither_## thing ##_list(c->dither); \ + int i; \ + \ + av_log(c->ctx, AV_LOG_INFO, "Available %s:\n", thing_str); \ + for (i = 0; thing[i]; i += 2) \ + av_log(c->ctx, AV_LOG_INFO, "%s : %s\n", thing[i], thing[i + 1]); \ +} + +DEFINE_LIST_DITHER(color, "colors"); +DEFINE_LIST_DITHER(charset, "charsets"); +DEFINE_LIST_DITHER(algorithm, "algorithms"); +DEFINE_LIST_DITHER(antialias, "antialias"); + +static int caca_write_header(AVFormatContext *s) +{ + CACAContext *c = s->priv_data; + AVStream *st = s->streams[0]; + AVCodecContext *encctx = st->codec; + int ret, bpp; + + c->ctx = s; + if (c->list_drivers) { + list_drivers(c); + return AVERROR_EXIT; + } + if (c->list_dither) { + if (!strcmp(c->list_dither, "colors")) { + list_dither_color(c); + } else if (!strcmp(c->list_dither, "charsets")) { + list_dither_charset(c); + } else if (!strcmp(c->list_dither, "algorithms")) { + list_dither_algorithm(c); + } else if (!strcmp(c->list_dither, "antialiases")) { + list_dither_antialias(c); + } else { + av_log(s, AV_LOG_ERROR, + "Invalid argument '%s', for 'list_dither' option\n" + "Argument must be one of 'algorithms, 'antialiases', 'charsets', 'colors'\n", + c->list_dither); + return AVERROR(EINVAL); + } + return AVERROR_EXIT; + } + + if ( s->nb_streams > 1 + || encctx->codec_type != AVMEDIA_TYPE_VIDEO + || encctx->codec_id != AV_CODEC_ID_RAWVIDEO) { + av_log(s, AV_LOG_ERROR, "Only supports one rawvideo stream\n"); + return AVERROR(EINVAL); + } + + if (encctx->pix_fmt != AV_PIX_FMT_RGB24) { + av_log(s, AV_LOG_ERROR, + "Unsupported pixel format '%s', choose rgb24\n", + av_get_pix_fmt_name(encctx->pix_fmt)); + return AVERROR(EINVAL); + } + + c->canvas = caca_create_canvas(c->window_width, c->window_height); + if (!c->canvas) { + av_log(s, AV_LOG_ERROR, "Failed to create canvas\n"); + ret = AVERROR(errno); + goto fail; + } + + bpp = av_get_bits_per_pixel(av_pix_fmt_desc_get(encctx->pix_fmt)); + c->dither = caca_create_dither(bpp, encctx->width, encctx->height, + bpp / 8 * encctx->width, + 0x0000ff, 0x00ff00, 0xff0000, 0); + if (!c->dither) { + av_log(s, AV_LOG_ERROR, "Failed to create dither\n"); + ret = AVERROR(errno); + goto fail; + } + +#define CHECK_DITHER_OPT(opt) \ + if (caca_set_dither_##opt(c->dither, c->opt) < 0) { \ + ret = AVERROR(errno); \ + av_log(s, AV_LOG_ERROR, "Failed to set value '%s' for option '%s'\n", \ + c->opt, #opt); \ + goto fail; \ + } + CHECK_DITHER_OPT(algorithm); + CHECK_DITHER_OPT(antialias); + CHECK_DITHER_OPT(charset); + CHECK_DITHER_OPT(color); + + c->display = caca_create_display_with_driver(c->canvas, c->driver); + if (!c->display) { + av_log(s, AV_LOG_ERROR, "Failed to create display\n"); + list_drivers(c); + ret = AVERROR(errno); + goto fail; + } + + if (!c->window_width || !c->window_height) { + c->window_width = caca_get_canvas_width(c->canvas); + c->window_height = caca_get_canvas_height(c->canvas); + } + + if (!c->window_title) + c->window_title = av_strdup(s->filename); + caca_set_display_title(c->display, c->window_title); + caca_set_display_time(c->display, av_rescale_q(1, st->codec->time_base, AV_TIME_BASE_Q)); + + return 0; + +fail: + caca_write_trailer(s); + return ret; +} + +static int caca_write_packet(AVFormatContext *s, AVPacket *pkt) +{ + CACAContext *c = s->priv_data; + + caca_dither_bitmap(c->canvas, 0, 0, c->window_width, c->window_height, c->dither, pkt->data); + caca_refresh_display(c->display); + + return 0; +} + +#define OFFSET(x) offsetof(CACAContext,x) +#define ENC AV_OPT_FLAG_ENCODING_PARAM + +static const AVOption options[] = { + { "window_size", "set window forced size", OFFSET(window_width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL }, 0, 0, ENC}, + { "window_title", "set window title", OFFSET(window_title), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, ENC }, + { "driver", "set display driver", OFFSET(driver), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, ENC }, + { "algorithm", "set dithering algorithm", OFFSET(algorithm), AV_OPT_TYPE_STRING, {.str = "default" }, 0, 0, ENC }, + { "antialias", "set antialias method", OFFSET(antialias), AV_OPT_TYPE_STRING, {.str = "default" }, 0, 0, ENC }, + { "charset", "set charset used to render output", OFFSET(charset), AV_OPT_TYPE_STRING, {.str = "default" }, 0, 0, ENC }, + { "color", "set color used to render output", OFFSET(color), AV_OPT_TYPE_STRING, {.str = "default" }, 0, 0, ENC }, + { "list_drivers", "list available drivers", OFFSET(list_drivers), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, ENC, "list_drivers" }, + { "true", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1}, 0, 0, ENC, "list_drivers" }, + { "false", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0}, 0, 0, ENC, "list_drivers" }, + { "list_dither", "list available dither options", OFFSET(list_dither), AV_OPT_TYPE_STRING, {.dbl=0}, 0, 1, ENC, "list_dither" }, + { "algorithms", NULL, 0, AV_OPT_TYPE_CONST, {.str = "algorithms"}, 0, 0, ENC, "list_dither" }, + { "antialiases", NULL, 0, AV_OPT_TYPE_CONST, {.str = "antialiases"},0, 0, ENC, "list_dither" }, + { "charsets", NULL, 0, AV_OPT_TYPE_CONST, {.str = "charsets"}, 0, 0, ENC, "list_dither" }, + { "colors", NULL, 0, AV_OPT_TYPE_CONST, {.str = "colors"}, 0, 0, ENC, "list_dither" }, + { NULL }, +}; + +static const AVClass caca_class = { + .class_name = "caca_outdev", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT, +}; + +AVOutputFormat ff_caca_muxer = { + .name = "caca", + .long_name = NULL_IF_CONFIG_SMALL("caca (color ASCII art) output device"), + .priv_data_size = sizeof(CACAContext), + .audio_codec = AV_CODEC_ID_NONE, + .video_codec = AV_CODEC_ID_RAWVIDEO, + .write_header = caca_write_header, + .write_packet = caca_write_packet, + .write_trailer = caca_write_trailer, + .flags = AVFMT_NOFILE, + .priv_class = &caca_class, +}; diff --git a/libavdevice/decklink_common.cpp b/libavdevice/decklink_common.cpp new file mode 100644 index 0000000..9a9e44b --- /dev/null +++ b/libavdevice/decklink_common.cpp @@ -0,0 +1,229 @@ +/* + * Blackmagic DeckLink output + * Copyright (c) 2013-2014 Ramiro Polla, Luca Barbato, Deti Fliegl + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include <DeckLinkAPI.h> +#ifdef _WIN32 +#include <DeckLinkAPI_i.c> +#else +#include <DeckLinkAPIDispatch.cpp> +#endif + +#include <pthread.h> +#include <semaphore.h> + +extern "C" { +#include "libavformat/avformat.h" +#include "libavformat/internal.h" +#include "libavutil/imgutils.h" +} + +#include "decklink_common.h" + +#ifdef _WIN32 +IDeckLinkIterator *CreateDeckLinkIteratorInstance(void) +{ + IDeckLinkIterator *iter; + + if (CoInitialize(NULL) != S_OK) { + av_log(NULL, AV_LOG_ERROR, "COM initialization failed.\n"); + return NULL; + } + + if (CoCreateInstance(CLSID_CDeckLinkIterator, NULL, CLSCTX_ALL, + IID_IDeckLinkIterator, (void**) &iter) != S_OK) { + av_log(NULL, AV_LOG_ERROR, "DeckLink drivers not installed.\n"); + return NULL; + } + + return iter; +} +#endif + +#ifdef _WIN32 +static char *dup_wchar_to_utf8(wchar_t *w) +{ + char *s = NULL; + int l = WideCharToMultiByte(CP_UTF8, 0, w, -1, 0, 0, 0, 0); + s = (char *) av_malloc(l); + if (s) + WideCharToMultiByte(CP_UTF8, 0, w, -1, s, l, 0, 0); + return s; +} +#define DECKLINK_STR OLECHAR * +#define DECKLINK_STRDUP dup_wchar_to_utf8 +#else +#define DECKLINK_STR const char * +#define DECKLINK_STRDUP av_strdup +#endif + +HRESULT ff_decklink_get_display_name(IDeckLink *This, const char **displayName) +{ + DECKLINK_STR tmpDisplayName; + HRESULT hr = This->GetDisplayName(&tmpDisplayName); + if (hr != S_OK) + return hr; + *displayName = DECKLINK_STRDUP(tmpDisplayName); + /* free() is needed for a string returned by the DeckLink SDL. */ + free((void *) tmpDisplayName); + return hr; +} + +int ff_decklink_set_format(AVFormatContext *avctx, + int width, int height, + int tb_num, int tb_den, + decklink_direction_t direction, int num) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx; + BMDDisplayModeSupport support; + IDeckLinkDisplayModeIterator *itermode; + IDeckLinkDisplayMode *mode; + int i = 1; + HRESULT res; + + if (direction == DIRECTION_IN) { + res = ctx->dli->GetDisplayModeIterator (&itermode); + } else { + res = ctx->dlo->GetDisplayModeIterator (&itermode); + } + + if (res!= S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not get Display Mode Iterator\n"); + return AVERROR(EIO); + } + + + if (tb_num == 1) { + tb_num *= 1000; + tb_den *= 1000; + } + ctx->bmd_mode = bmdModeUnknown; + while ((ctx->bmd_mode == bmdModeUnknown) && itermode->Next(&mode) == S_OK) { + BMDTimeValue bmd_tb_num, bmd_tb_den; + int bmd_width = mode->GetWidth(); + int bmd_height = mode->GetHeight(); + + mode->GetFrameRate(&bmd_tb_num, &bmd_tb_den); + + if ((bmd_width == width && bmd_height == height && + bmd_tb_num == tb_num && bmd_tb_den == tb_den) || i == num) { + ctx->bmd_mode = mode->GetDisplayMode(); + ctx->bmd_width = bmd_width; + ctx->bmd_height = bmd_height; + ctx->bmd_tb_den = bmd_tb_den; + ctx->bmd_tb_num = bmd_tb_num; + ctx->bmd_field_dominance = mode->GetFieldDominance(); + av_log(avctx, AV_LOG_INFO, "Found Decklink mode %d x %d with rate %.2f%s\n", + bmd_width, bmd_height, (float)bmd_tb_den/(float)bmd_tb_num, + (ctx->bmd_field_dominance==bmdLowerFieldFirst || ctx->bmd_field_dominance==bmdUpperFieldFirst)?"(i)":""); + } + + mode->Release(); + i++; + } + + itermode->Release(); + + if (ctx->bmd_mode == bmdModeUnknown) + return -1; + if (direction == DIRECTION_IN) { + if (ctx->dli->DoesSupportVideoMode(ctx->bmd_mode, bmdFormat8BitYUV, + bmdVideoOutputFlagDefault, + &support, NULL) != S_OK) + return -1; + } else { + if (ctx->dlo->DoesSupportVideoMode(ctx->bmd_mode, bmdFormat8BitYUV, + bmdVideoOutputFlagDefault, + &support, NULL) != S_OK) + return -1; + } + if (support == bmdDisplayModeSupported) + return 0; + + return -1; +} + +int ff_decklink_set_format(AVFormatContext *avctx, decklink_direction_t direction, int num) { + return ff_decklink_set_format(avctx, 0, 0, 0, 0, direction, num); +} + +int ff_decklink_list_devices(AVFormatContext *avctx) +{ + IDeckLink *dl = NULL; + IDeckLinkIterator *iter = CreateDeckLinkIteratorInstance(); + if (!iter) { + av_log(avctx, AV_LOG_ERROR, "Could not create DeckLink iterator\n"); + return AVERROR(EIO); + } + av_log(avctx, AV_LOG_INFO, "Blackmagic DeckLink devices:\n"); + while (iter->Next(&dl) == S_OK) { + const char *displayName; + ff_decklink_get_display_name(dl, &displayName); + av_log(avctx, AV_LOG_INFO, "\t'%s'\n", displayName); + av_free((void *) displayName); + dl->Release(); + } + iter->Release(); + return 0; +} + +int ff_decklink_list_formats(AVFormatContext *avctx, decklink_direction_t direction) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx; + IDeckLinkDisplayModeIterator *itermode; + IDeckLinkDisplayMode *mode; + int i=0; + HRESULT res; + + if (direction == DIRECTION_IN) { + res = ctx->dli->GetDisplayModeIterator (&itermode); + } else { + res = ctx->dlo->GetDisplayModeIterator (&itermode); + } + + if (res!= S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not get Display Mode Iterator\n"); + return AVERROR(EIO); + } + + av_log(avctx, AV_LOG_INFO, "Supported formats for '%s':\n", + avctx->filename); + while (itermode->Next(&mode) == S_OK) { + BMDTimeValue tb_num, tb_den; + mode->GetFrameRate(&tb_num, &tb_den); + av_log(avctx, AV_LOG_INFO, "\t%d\t%ldx%ld at %d/%d fps", + ++i,mode->GetWidth(), mode->GetHeight(), + (int) tb_den, (int) tb_num); + switch (mode->GetFieldDominance()) { + case bmdLowerFieldFirst: + av_log(avctx, AV_LOG_INFO, " (interlaced, lower field first)"); break; + case bmdUpperFieldFirst: + av_log(avctx, AV_LOG_INFO, " (interlaced, upper field first)"); break; + } + av_log(avctx, AV_LOG_INFO, "\n"); + mode->Release(); + } + + itermode->Release(); + + return 0; +} diff --git a/libavdevice/decklink_common.h b/libavdevice/decklink_common.h new file mode 100644 index 0000000..544da27 --- /dev/null +++ b/libavdevice/decklink_common.h @@ -0,0 +1,98 @@ +/* + * Blackmagic DeckLink common code + * Copyright (c) 2013-2014 Ramiro Polla, Luca Barbato, Deti Fliegl + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "decklink_common_c.h" + +class decklink_output_callback; +class decklink_input_callback; + +typedef struct AVPacketQueue { + AVPacketList *first_pkt, *last_pkt; + int nb_packets; + unsigned long long size; + int abort_request; + pthread_mutex_t mutex; + pthread_cond_t cond; + AVFormatContext *avctx; +} AVPacketQueue; + +struct decklink_ctx { + /* DeckLink SDK interfaces */ + IDeckLink *dl; + IDeckLinkOutput *dlo; + IDeckLinkInput *dli; + decklink_output_callback *output_callback; + decklink_input_callback *input_callback; + + /* DeckLink mode information */ + BMDTimeValue bmd_tb_den; + BMDTimeValue bmd_tb_num; + BMDDisplayMode bmd_mode; + int bmd_width; + int bmd_height; + int bmd_field_dominance; + + /* Capture buffer queue */ + AVPacketQueue queue; + + /* Streams present */ + int audio; + int video; + + /* Status */ + int playback_started; + int capture_started; + int64_t last_pts; + unsigned long frameCount; + unsigned int dropped; + AVStream *audio_st; + AVStream *video_st; + + /* Options */ + int list_devices; + int list_formats; + double preroll; + + int frames_preroll; + int frames_buffer; + + sem_t semaphore; + + int channels; +}; + +typedef enum { DIRECTION_IN, DIRECTION_OUT} decklink_direction_t; + +#ifdef _WIN32 +typedef unsigned long buffercount_type; +IDeckLinkIterator *CreateDeckLinkIteratorInstance(void); +char *dup_wchar_to_utf8(wchar_t *w); +#else +typedef uint32_t buffercount_type; +#endif + + +HRESULT ff_decklink_get_display_name(IDeckLink *This, const char **displayName); +int ff_decklink_set_format(AVFormatContext *avctx, int width, int height, int tb_num, int tb_den, decklink_direction_t direction = DIRECTION_OUT, int num = 0); +int ff_decklink_set_format(AVFormatContext *avctx, decklink_direction_t direction, int num); +int ff_decklink_list_devices(AVFormatContext *avctx); +int ff_decklink_list_formats(AVFormatContext *avctx, decklink_direction_t direction = DIRECTION_OUT); + diff --git a/libavdevice/decklink_common_c.h b/libavdevice/decklink_common_c.h new file mode 100644 index 0000000..861a51a --- /dev/null +++ b/libavdevice/decklink_common_c.h @@ -0,0 +1,32 @@ +/* + * Blackmagic DeckLink common code + * Copyright (c) 2013-2014 Ramiro Polla + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +struct decklink_cctx { + const AVClass *cclass; + + void *ctx; + + /* Options */ + int list_devices; + int list_formats; + double preroll; +}; + diff --git a/libavdevice/decklink_dec.cpp b/libavdevice/decklink_dec.cpp new file mode 100644 index 0000000..77a0fe5 --- /dev/null +++ b/libavdevice/decklink_dec.cpp @@ -0,0 +1,531 @@ +/* + * Blackmagic DeckLink output + * Copyright (c) 2013-2014 Luca Barbato, Deti Fliegl + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include <DeckLinkAPI.h> + +#include <pthread.h> +#include <semaphore.h> + +extern "C" { +#include "libavformat/avformat.h" +#include "libavformat/internal.h" +#include "libavutil/imgutils.h" +} + +#include "decklink_common.h" +#include "decklink_dec.h" + +static void avpacket_queue_init(AVFormatContext *avctx, AVPacketQueue *q) +{ + memset(q, 0, sizeof(AVPacketQueue)); + pthread_mutex_init(&q->mutex, NULL); + pthread_cond_init(&q->cond, NULL); + q->avctx = avctx; +} + +static void avpacket_queue_flush(AVPacketQueue *q) +{ + AVPacketList *pkt, *pkt1; + + pthread_mutex_lock(&q->mutex); + for (pkt = q->first_pkt; pkt != NULL; pkt = pkt1) { + pkt1 = pkt->next; + av_free_packet(&pkt->pkt); + av_freep(&pkt); + } + q->last_pkt = NULL; + q->first_pkt = NULL; + q->nb_packets = 0; + q->size = 0; + pthread_mutex_unlock(&q->mutex); +} + +static void avpacket_queue_end(AVPacketQueue *q) +{ + avpacket_queue_flush(q); + pthread_mutex_destroy(&q->mutex); + pthread_cond_destroy(&q->cond); +} + +static unsigned long long avpacket_queue_size(AVPacketQueue *q) +{ + unsigned long long size; + pthread_mutex_lock(&q->mutex); + size = q->size; + pthread_mutex_unlock(&q->mutex); + return size; +} + +static int avpacket_queue_put(AVPacketQueue *q, AVPacket *pkt) +{ + AVPacketList *pkt1; + + // Drop Packet if queue size is > 1GB + if (avpacket_queue_size(q) > 1024 * 1024 * 1024 ) { + av_log(q->avctx, AV_LOG_WARNING, "Decklink input buffer overrun!\n"); + return -1; + } + /* duplicate the packet */ + if (av_dup_packet(pkt) < 0) { + return -1; + } + + pkt1 = (AVPacketList *)av_malloc(sizeof(AVPacketList)); + if (!pkt1) { + return -1; + } + pkt1->pkt = *pkt; + pkt1->next = NULL; + + pthread_mutex_lock(&q->mutex); + + if (!q->last_pkt) { + q->first_pkt = pkt1; + } else { + q->last_pkt->next = pkt1; + } + + q->last_pkt = pkt1; + q->nb_packets++; + q->size += pkt1->pkt.size + sizeof(*pkt1); + + pthread_cond_signal(&q->cond); + + pthread_mutex_unlock(&q->mutex); + return 0; +} + +static int avpacket_queue_get(AVPacketQueue *q, AVPacket *pkt, int block) +{ + AVPacketList *pkt1; + int ret; + + pthread_mutex_lock(&q->mutex); + + for (;; ) { + pkt1 = q->first_pkt; + if (pkt1) { + q->first_pkt = pkt1->next; + if (!q->first_pkt) { + q->last_pkt = NULL; + } + q->nb_packets--; + q->size -= pkt1->pkt.size + sizeof(*pkt1); + *pkt = pkt1->pkt; + av_free(pkt1); + ret = 1; + break; + } else if (!block) { + ret = 0; + break; + } else { + pthread_cond_wait(&q->cond, &q->mutex); + } + } + pthread_mutex_unlock(&q->mutex); + return ret; +} + +class decklink_input_callback : public IDeckLinkInputCallback +{ +public: + decklink_input_callback(AVFormatContext *_avctx); + ~decklink_input_callback(); + + virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID *ppv) { return E_NOINTERFACE; } + virtual ULONG STDMETHODCALLTYPE AddRef(void); + virtual ULONG STDMETHODCALLTYPE Release(void); + virtual HRESULT STDMETHODCALLTYPE VideoInputFormatChanged(BMDVideoInputFormatChangedEvents, IDeckLinkDisplayMode*, BMDDetectedVideoInputFormatFlags); + virtual HRESULT STDMETHODCALLTYPE VideoInputFrameArrived(IDeckLinkVideoInputFrame*, IDeckLinkAudioInputPacket*); + +private: + ULONG m_refCount; + pthread_mutex_t m_mutex; + AVFormatContext *avctx; + decklink_ctx *ctx; + int no_video; + int64_t initial_video_pts; + int64_t initial_audio_pts; +}; + +decklink_input_callback::decklink_input_callback(AVFormatContext *_avctx) : m_refCount(0) +{ + avctx = _avctx; + decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + ctx = (struct decklink_ctx *) cctx->ctx; + initial_audio_pts = initial_video_pts = AV_NOPTS_VALUE; + pthread_mutex_init(&m_mutex, NULL); +} + +decklink_input_callback::~decklink_input_callback() +{ + pthread_mutex_destroy(&m_mutex); +} + +ULONG decklink_input_callback::AddRef(void) +{ + pthread_mutex_lock(&m_mutex); + m_refCount++; + pthread_mutex_unlock(&m_mutex); + + return (ULONG)m_refCount; +} + +ULONG decklink_input_callback::Release(void) +{ + pthread_mutex_lock(&m_mutex); + m_refCount--; + pthread_mutex_unlock(&m_mutex); + + if (m_refCount == 0) { + delete this; + return 0; + } + + return (ULONG)m_refCount; +} + +HRESULT decklink_input_callback::VideoInputFrameArrived( + IDeckLinkVideoInputFrame *videoFrame, IDeckLinkAudioInputPacket *audioFrame) +{ + void *frameBytes; + void *audioFrameBytes; + BMDTimeValue frameTime; + BMDTimeValue frameDuration; + + ctx->frameCount++; + + // Handle Video Frame + if (videoFrame) { + AVPacket pkt; + AVCodecContext *c; + av_init_packet(&pkt); + c = ctx->video_st->codec; + if (ctx->frameCount % 25 == 0) { + unsigned long long qsize = avpacket_queue_size(&ctx->queue); + av_log(avctx, AV_LOG_DEBUG, + "Frame received (#%lu) - Valid (%liB) - QSize %fMB\n", + ctx->frameCount, + videoFrame->GetRowBytes() * videoFrame->GetHeight(), + (double)qsize / 1024 / 1024); + } + + videoFrame->GetBytes(&frameBytes); + videoFrame->GetStreamTime(&frameTime, &frameDuration, + ctx->video_st->time_base.den); + + if (videoFrame->GetFlags() & bmdFrameHasNoInputSource) { + unsigned bars[8] = { + 0xEA80EA80, 0xD292D210, 0xA910A9A5, 0x90229035, + 0x6ADD6ACA, 0x51EF515A, 0x286D28EF, 0x10801080 }; + int width = videoFrame->GetWidth(); + int height = videoFrame->GetHeight(); + unsigned *p = (unsigned *)frameBytes; + + for (int y = 0; y < height; y++) { + for (int x = 0; x < width; x += 2) + *p++ = bars[(x * 8) / width]; + } + + if (!no_video) { + av_log(avctx, AV_LOG_WARNING, "Frame received (#%lu) - No input signal detected " + "- Frames dropped %u\n", ctx->frameCount, ++ctx->dropped); + } + no_video = 1; + } else { + if (no_video) { + av_log(avctx, AV_LOG_WARNING, "Frame received (#%lu) - Input returned " + "- Frames dropped %u\n", ctx->frameCount, ++ctx->dropped); + } + no_video = 0; + } + + pkt.pts = frameTime / ctx->video_st->time_base.num; + + if (initial_video_pts == AV_NOPTS_VALUE) { + initial_video_pts = pkt.pts; + } + + pkt.pts -= initial_video_pts; + pkt.dts = pkt.pts; + + pkt.duration = frameDuration; + //To be made sure it still applies + pkt.flags |= AV_PKT_FLAG_KEY; + pkt.stream_index = ctx->video_st->index; + pkt.data = (uint8_t *)frameBytes; + pkt.size = videoFrame->GetRowBytes() * + videoFrame->GetHeight(); + //fprintf(stderr,"Video Frame size %d ts %d\n", pkt.size, pkt.pts); + c->frame_number++; + if (avpacket_queue_put(&ctx->queue, &pkt) < 0) { + ++ctx->dropped; + } + } + + // Handle Audio Frame + if (audioFrame) { + AVCodecContext *c; + AVPacket pkt; + BMDTimeValue audio_pts; + av_init_packet(&pkt); + + c = ctx->audio_st->codec; + //hack among hacks + pkt.size = audioFrame->GetSampleFrameCount() * ctx->audio_st->codec->channels * (16 / 8); + audioFrame->GetBytes(&audioFrameBytes); + audioFrame->GetPacketTime(&audio_pts, ctx->audio_st->time_base.den); + pkt.pts = audio_pts / ctx->audio_st->time_base.num; + + if (initial_audio_pts == AV_NOPTS_VALUE) { + initial_audio_pts = pkt.pts; + } + + pkt.pts -= initial_audio_pts; + pkt.dts = pkt.pts; + + //fprintf(stderr,"Audio Frame size %d ts %d\n", pkt.size, pkt.pts); + pkt.flags |= AV_PKT_FLAG_KEY; + pkt.stream_index = ctx->audio_st->index; + pkt.data = (uint8_t *)audioFrameBytes; + + c->frame_number++; + if (avpacket_queue_put(&ctx->queue, &pkt) < 0) { + ++ctx->dropped; + } + } + + return S_OK; +} + +HRESULT decklink_input_callback::VideoInputFormatChanged( + BMDVideoInputFormatChangedEvents events, IDeckLinkDisplayMode *mode, + BMDDetectedVideoInputFormatFlags) +{ + return S_OK; +} + +static HRESULT decklink_start_input(AVFormatContext *avctx) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx = (struct decklink_ctx *) cctx->ctx; + + ctx->input_callback = new decklink_input_callback(avctx); + ctx->dli->SetCallback(ctx->input_callback); + return ctx->dli->StartStreams(); +} + +extern "C" { + +av_cold int ff_decklink_read_close(AVFormatContext *avctx) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx = (struct decklink_ctx *) cctx->ctx; + + if (ctx->capture_started) { + ctx->dli->StopStreams(); + ctx->dli->DisableVideoInput(); + ctx->dli->DisableAudioInput(); + } + + if (ctx->dli) + ctx->dli->Release(); + if (ctx->dl) + ctx->dl->Release(); + + avpacket_queue_end(&ctx->queue); + + av_freep(&cctx->ctx); + + return 0; +} + +av_cold int ff_decklink_read_header(AVFormatContext *avctx) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx; + IDeckLinkDisplayModeIterator *itermode; + IDeckLinkIterator *iter; + IDeckLink *dl = NULL; + AVStream *st; + HRESULT result; + char fname[1024]; + char *tmp; + int mode_num = 0; + + ctx = (struct decklink_ctx *) av_mallocz(sizeof(struct decklink_ctx)); + if (!ctx) + return AVERROR(ENOMEM); + ctx->list_devices = cctx->list_devices; + ctx->list_formats = cctx->list_formats; + ctx->preroll = cctx->preroll; + cctx->ctx = ctx; + + iter = CreateDeckLinkIteratorInstance(); + if (!iter) { + av_log(avctx, AV_LOG_ERROR, "Could not create DeckLink iterator\n"); + return AVERROR(EIO); + } + + /* List available devices. */ + if (ctx->list_devices) { + ff_decklink_list_devices(avctx); + return AVERROR_EXIT; + } + + strcpy (fname, avctx->filename); + tmp=strchr (fname, '@'); + if (tmp != NULL) { + mode_num = atoi (tmp+1); + *tmp = 0; + } + + /* Open device. */ + while (iter->Next(&dl) == S_OK) { + const char *displayName; + ff_decklink_get_display_name(dl, &displayName); + if (!strcmp(fname, displayName)) { + av_free((void *) displayName); + ctx->dl = dl; + break; + } + av_free((void *) displayName); + dl->Release(); + } + iter->Release(); + if (!ctx->dl) { + av_log(avctx, AV_LOG_ERROR, "Could not open '%s'\n", fname); + return AVERROR(EIO); + } + + /* Get input device. */ + if (ctx->dl->QueryInterface(IID_IDeckLinkInput, (void **) &ctx->dli) != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not open output device from '%s'\n", + avctx->filename); + ctx->dl->Release(); + return AVERROR(EIO); + } + + /* List supported formats. */ + if (ctx->list_formats) { + ff_decklink_list_formats(avctx, DIRECTION_IN); + ctx->dli->Release(); + ctx->dl->Release(); + return AVERROR_EXIT; + } + + if (ctx->dli->GetDisplayModeIterator(&itermode) != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not get Display Mode Iterator\n"); + ctx->dl->Release(); + return AVERROR(EIO); + } + + if (mode_num > 0) { + if (ff_decklink_set_format(avctx, DIRECTION_IN, mode_num) < 0) { + av_log(avctx, AV_LOG_ERROR, "Could not set mode %d for %s\n", mode_num, fname); + goto error; + } + } + + itermode->Release(); + + /* Setup streams. */ + st = avformat_new_stream(avctx, NULL); + if (!st) { + av_log(avctx, AV_LOG_ERROR, "Cannot add stream\n"); + goto error; + } + st->codec->codec_type = AVMEDIA_TYPE_AUDIO; + st->codec->codec_id = AV_CODEC_ID_PCM_S16LE; + st->codec->sample_rate = bmdAudioSampleRate48kHz; + st->codec->channels = 2; + avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */ + ctx->audio_st=st; + + st = avformat_new_stream(avctx, NULL); + if (!st) { + av_log(avctx, AV_LOG_ERROR, "Cannot add stream\n"); + goto error; + } + st->codec->codec_type = AVMEDIA_TYPE_VIDEO; + st->codec->codec_id = AV_CODEC_ID_RAWVIDEO; + st->codec->width = ctx->bmd_width; + st->codec->height = ctx->bmd_height; + + st->codec->pix_fmt = AV_PIX_FMT_UYVY422; + st->codec->time_base.den = ctx->bmd_tb_den; + st->codec->time_base.num = ctx->bmd_tb_num; + st->codec->bit_rate = avpicture_get_size(st->codec->pix_fmt, ctx->bmd_width, ctx->bmd_height) * 1/av_q2d(st->codec->time_base) * 8; + st->codec->codec_tag = MKTAG('U', 'Y', 'V', 'Y'); + + avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */ + + ctx->video_st=st; + + result = ctx->dli->EnableAudioInput(bmdAudioSampleRate48kHz, bmdAudioSampleType16bitInteger, 2); + + if (result != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Cannot enable audio input\n"); + goto error; + } + + result = ctx->dli->EnableVideoInput(ctx->bmd_mode, bmdFormat8BitYUV, bmdVideoInputFlagDefault); + + if (result != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Cannot enable video input\n"); + goto error; + } + + avpacket_queue_init (avctx, &ctx->queue); + + if (decklink_start_input (avctx) != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Cannot start input stream\n"); + goto error; + } + + return 0; + +error: + + ctx->dli->Release(); + ctx->dl->Release(); + + return AVERROR(EIO); +} + +int ff_decklink_read_packet(AVFormatContext *avctx, AVPacket *pkt) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx = (struct decklink_ctx *) cctx->ctx; + AVFrame *frame = ctx->video_st->codec->coded_frame; + + avpacket_queue_get(&ctx->queue, pkt, 1); + if (frame && (ctx->bmd_field_dominance == bmdUpperFieldFirst || ctx->bmd_field_dominance == bmdLowerFieldFirst)) { + frame->interlaced_frame = 1; + if (ctx->bmd_field_dominance == bmdUpperFieldFirst) { + frame->top_field_first = 1; + } + } + + return 0; +} + +} /* extern "C" */ diff --git a/libavdevice/decklink_dec.h b/libavdevice/decklink_dec.h new file mode 100644 index 0000000..6bd9226 --- /dev/null +++ b/libavdevice/decklink_dec.h @@ -0,0 +1,32 @@ +/* + * Blackmagic DeckLink output + * Copyright (c) 2013-2014 Ramiro Polla + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifdef __cplusplus +extern "C" { +#endif + +int ff_decklink_read_header(AVFormatContext *avctx); +int ff_decklink_read_packet(AVFormatContext *avctx, AVPacket *pkt); +int ff_decklink_read_close(AVFormatContext *avctx); + +#ifdef __cplusplus +} /* extern "C" */ +#endif diff --git a/libavdevice/decklink_dec_c.c b/libavdevice/decklink_dec_c.c new file mode 100644 index 0000000..2aea277 --- /dev/null +++ b/libavdevice/decklink_dec_c.c @@ -0,0 +1,54 @@ +/* + * Blackmagic DeckLink output + * Copyright (c) 2014 Deti Fliegl + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "libavformat/avformat.h" +#include "libavutil/opt.h" + +#include "decklink_common_c.h" +#include "decklink_dec.h" + +#define OFFSET(x) offsetof(struct decklink_cctx, x) +#define DEC AV_OPT_FLAG_DECODING_PARAM + +static const AVOption options[] = { + { "list_devices", "list available devices" , OFFSET(list_devices), AV_OPT_TYPE_INT , { .i64 = 0 }, 0, 1, DEC }, + { "list_formats", "list supported formats" , OFFSET(list_formats), AV_OPT_TYPE_INT , { .i64 = 0 }, 0, 1, DEC }, + { NULL }, +}; + +static const AVClass decklink_demuxer_class = { + .class_name = "Blackmagic DeckLink demuxer", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, +}; + +AVInputFormat ff_decklink_demuxer = { + .name = "decklink", + .long_name = NULL_IF_CONFIG_SMALL("Blackmagic DeckLink input"), + .flags = AVFMT_NOFILE | AVFMT_RAWPICTURE, + .priv_class = &decklink_demuxer_class, + .priv_data_size = sizeof(struct decklink_cctx), + .read_header = ff_decklink_read_header, + .read_packet = ff_decklink_read_packet, + .read_close = ff_decklink_read_close, +}; diff --git a/libavdevice/decklink_enc.cpp b/libavdevice/decklink_enc.cpp new file mode 100644 index 0000000..6c5450f --- /dev/null +++ b/libavdevice/decklink_enc.cpp @@ -0,0 +1,426 @@ +/* + * Blackmagic DeckLink output + * Copyright (c) 2013-2014 Ramiro Polla + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include <DeckLinkAPI.h> + +#include <pthread.h> +#include <semaphore.h> + +extern "C" { +#include "libavformat/avformat.h" +#include "libavformat/internal.h" +#include "libavutil/imgutils.h" +} + +#include "decklink_common.h" +#include "decklink_enc.h" + + +/* DeckLink callback class declaration */ +class decklink_frame : public IDeckLinkVideoFrame +{ +public: + decklink_frame(struct decklink_ctx *ctx, AVFrame *avframe, long width, + long height, void *buffer) : + _ctx(ctx), _avframe(avframe), _width(width), + _height(height), _buffer(buffer), _refs(0) { } + + virtual long STDMETHODCALLTYPE GetWidth (void) { return _width; } + virtual long STDMETHODCALLTYPE GetHeight (void) { return _height; } + virtual long STDMETHODCALLTYPE GetRowBytes (void) { return _width<<1; } + virtual BMDPixelFormat STDMETHODCALLTYPE GetPixelFormat(void) { return bmdFormat8BitYUV; } + virtual BMDFrameFlags STDMETHODCALLTYPE GetFlags (void) { return bmdVideoOutputFlagDefault; } + virtual HRESULT STDMETHODCALLTYPE GetBytes (void **buffer) { *buffer = _buffer; return S_OK; } + + virtual HRESULT STDMETHODCALLTYPE GetTimecode (BMDTimecodeFormat format, IDeckLinkTimecode **timecode) { return S_FALSE; } + virtual HRESULT STDMETHODCALLTYPE GetAncillaryData(IDeckLinkVideoFrameAncillary **ancillary) { return S_FALSE; } + + virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID *ppv) { return E_NOINTERFACE; } + virtual ULONG STDMETHODCALLTYPE AddRef(void) { return ++_refs; } + virtual ULONG STDMETHODCALLTYPE Release(void) { if (!--_refs) delete this; return _refs; } + + struct decklink_ctx *_ctx; + AVFrame *_avframe; + +private: + long _width; + long _height; + void *_buffer; + int _refs; +}; + +class decklink_output_callback : public IDeckLinkVideoOutputCallback +{ +public: + virtual HRESULT STDMETHODCALLTYPE ScheduledFrameCompleted(IDeckLinkVideoFrame *_frame, BMDOutputFrameCompletionResult result) + { + decklink_frame *frame = static_cast<decklink_frame *>(_frame); + struct decklink_ctx *ctx = frame->_ctx; + AVFrame *avframe = frame->_avframe; + + av_frame_free(&avframe); + + sem_post(&ctx->semaphore); + + return S_OK; + } + virtual HRESULT STDMETHODCALLTYPE ScheduledPlaybackHasStopped(void) { return S_OK; } + virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID *ppv) { return E_NOINTERFACE; } + virtual ULONG STDMETHODCALLTYPE AddRef(void) { return 1; } + virtual ULONG STDMETHODCALLTYPE Release(void) { return 1; } +}; + +static int decklink_setup_video(AVFormatContext *avctx, AVStream *st) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx = (struct decklink_ctx *) cctx->ctx; + AVCodecContext *c = st->codec; + + if (ctx->video) { + av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n"); + return -1; + } + + if (c->pix_fmt != AV_PIX_FMT_UYVY422) { + av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format!" + " Only AV_PIX_FMT_UYVY422 is supported.\n"); + return -1; + } + if (ff_decklink_set_format(avctx, c->width, c->height, + c->time_base.num, c->time_base.den)) { + av_log(avctx, AV_LOG_ERROR, "Unsupported video size or framerate!" + " Check available formats with -list_formats 1.\n"); + return -1; + } + if (ctx->dlo->EnableVideoOutput(ctx->bmd_mode, + bmdVideoOutputFlagDefault) != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not enable video output!\n"); + return -1; + } + + /* Set callback. */ + ctx->output_callback = new decklink_output_callback(); + ctx->dlo->SetScheduledFrameCompletionCallback(ctx->output_callback); + + /* Start video semaphore. */ + ctx->frames_preroll = c->time_base.den * ctx->preroll; + if (c->time_base.den > 1000) + ctx->frames_preroll /= 1000; + + /* Buffer twice as many frames as the preroll. */ + ctx->frames_buffer = ctx->frames_preroll * 2; + ctx->frames_buffer = FFMIN(ctx->frames_buffer, 60); + sem_init(&ctx->semaphore, 0, ctx->frames_buffer); + + /* The device expects the framerate to be fixed. */ + avpriv_set_pts_info(st, 64, c->time_base.num, c->time_base.den); + + ctx->video = 1; + + return 0; +} + +static int decklink_setup_audio(AVFormatContext *avctx, AVStream *st) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx = (struct decklink_ctx *) cctx->ctx; + AVCodecContext *c = st->codec; + + if (ctx->audio) { + av_log(avctx, AV_LOG_ERROR, "Only one audio stream is supported!\n"); + return -1; + } + if (c->sample_rate != 48000) { + av_log(avctx, AV_LOG_ERROR, "Unsupported sample rate!" + " Only 48kHz is supported.\n"); + return -1; + } + if (c->channels != 2 && c->channels != 8) { + av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!" + " Only stereo and 7.1 are supported.\n"); + return -1; + } + if (ctx->dlo->EnableAudioOutput(bmdAudioSampleRate48kHz, + bmdAudioSampleType16bitInteger, + c->channels, + bmdAudioOutputStreamTimestamped) != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not enable audio output!\n"); + return -1; + } + if (ctx->dlo->BeginAudioPreroll() != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not begin audio preroll!\n"); + return -1; + } + + /* The device expects the sample rate to be fixed. */ + avpriv_set_pts_info(st, 64, 1, c->sample_rate); + ctx->channels = c->channels; + + ctx->audio = 1; + + return 0; +} + +av_cold int ff_decklink_write_trailer(AVFormatContext *avctx) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx = (struct decklink_ctx *) cctx->ctx; + + if (ctx->playback_started) { + BMDTimeValue actual; + ctx->dlo->StopScheduledPlayback(ctx->last_pts * ctx->bmd_tb_num, + &actual, ctx->bmd_tb_den); + ctx->dlo->DisableVideoOutput(); + if (ctx->audio) + ctx->dlo->DisableAudioOutput(); + } + + if (ctx->dlo) + ctx->dlo->Release(); + if (ctx->dl) + ctx->dl->Release(); + + if (ctx->output_callback) + delete ctx->output_callback; + + sem_destroy(&ctx->semaphore); + + av_freep(&cctx->ctx); + + return 0; +} + +static int decklink_write_video_packet(AVFormatContext *avctx, AVPacket *pkt) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx = (struct decklink_ctx *) cctx->ctx; + AVPicture *avpicture = (AVPicture *) pkt->data; + AVFrame *avframe, *tmp; + decklink_frame *frame; + buffercount_type buffered; + HRESULT hr; + + /* HACK while av_uncoded_frame() isn't implemented */ + int ret; + + tmp = av_frame_alloc(); + if (!tmp) + return AVERROR(ENOMEM); + tmp->format = AV_PIX_FMT_UYVY422; + tmp->width = ctx->bmd_width; + tmp->height = ctx->bmd_height; + ret = av_frame_get_buffer(tmp, 32); + if (ret < 0) { + av_frame_free(&tmp); + return ret; + } + av_image_copy(tmp->data, tmp->linesize, (const uint8_t **) avpicture->data, + avpicture->linesize, (AVPixelFormat) tmp->format, tmp->width, + tmp->height); + avframe = av_frame_clone(tmp); + av_frame_free(&tmp); + if (!avframe) { + av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n"); + return AVERROR(EIO); + } + /* end HACK */ + + frame = new decklink_frame(ctx, avframe, ctx->bmd_width, ctx->bmd_height, + (void *) avframe->data[0]); + if (!frame) { + av_log(avctx, AV_LOG_ERROR, "Could not create new frame.\n"); + return AVERROR(EIO); + } + + /* Always keep at most one second of frames buffered. */ + sem_wait(&ctx->semaphore); + + /* Schedule frame for playback. */ + hr = ctx->dlo->ScheduleVideoFrame((struct IDeckLinkVideoFrame *) frame, + pkt->pts * ctx->bmd_tb_num, + ctx->bmd_tb_num, ctx->bmd_tb_den); + if (hr != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not schedule video frame." + " error %08x.\n", (uint32_t) hr); + frame->Release(); + return AVERROR(EIO); + } + + ctx->dlo->GetBufferedVideoFrameCount(&buffered); + av_log(avctx, AV_LOG_DEBUG, "Buffered video frames: %d.\n", (int) buffered); + if (pkt->pts > 2 && buffered <= 2) + av_log(avctx, AV_LOG_WARNING, "There are not enough buffered video frames." + " Video may misbehave!\n"); + + /* Preroll video frames. */ + if (!ctx->playback_started && pkt->pts > ctx->frames_preroll) { + av_log(avctx, AV_LOG_DEBUG, "Ending audio preroll.\n"); + if (ctx->audio && ctx->dlo->EndAudioPreroll() != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not end audio preroll!\n"); + return AVERROR(EIO); + } + av_log(avctx, AV_LOG_DEBUG, "Starting scheduled playback.\n"); + if (ctx->dlo->StartScheduledPlayback(0, ctx->bmd_tb_den, 1.0) != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not start scheduled playback!\n"); + return AVERROR(EIO); + } + ctx->playback_started = 1; + } + + return 0; +} + +static int decklink_write_audio_packet(AVFormatContext *avctx, AVPacket *pkt) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx = (struct decklink_ctx *) cctx->ctx; + int sample_count = pkt->size / (ctx->channels << 1); + buffercount_type buffered; + + ctx->dlo->GetBufferedAudioSampleFrameCount(&buffered); + if (pkt->pts > 1 && !buffered) + av_log(avctx, AV_LOG_WARNING, "There's no buffered audio." + " Audio will misbehave!\n"); + + if (ctx->dlo->ScheduleAudioSamples(pkt->data, sample_count, pkt->pts, + bmdAudioSampleRate48kHz, NULL) != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not schedule audio samples.\n"); + return AVERROR(EIO); + } + + return 0; +} + +extern "C" { + +av_cold int ff_decklink_write_header(AVFormatContext *avctx) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx; + IDeckLinkDisplayModeIterator *itermode; + IDeckLinkIterator *iter; + IDeckLink *dl = NULL; + unsigned int n; + + ctx = (struct decklink_ctx *) av_mallocz(sizeof(struct decklink_ctx)); + if (!ctx) + return AVERROR(ENOMEM); + ctx->list_devices = cctx->list_devices; + ctx->list_formats = cctx->list_formats; + ctx->preroll = cctx->preroll; + cctx->ctx = ctx; + + iter = CreateDeckLinkIteratorInstance(); + if (!iter) { + av_log(avctx, AV_LOG_ERROR, "Could not create DeckLink iterator\n"); + return AVERROR(EIO); + } + + /* List available devices. */ + if (ctx->list_devices) { + ff_decklink_list_devices(avctx); + return AVERROR_EXIT; + } + + /* Open device. */ + while (iter->Next(&dl) == S_OK) { + const char *displayName; + ff_decklink_get_display_name(dl, &displayName); + if (!strcmp(avctx->filename, displayName)) { + av_free((void *) displayName); + ctx->dl = dl; + break; + } + av_free((void *) displayName); + dl->Release(); + } + iter->Release(); + if (!ctx->dl) { + av_log(avctx, AV_LOG_ERROR, "Could not open '%s'\n", avctx->filename); + return AVERROR(EIO); + } + + /* Get output device. */ + if (ctx->dl->QueryInterface(IID_IDeckLinkOutput, (void **) &ctx->dlo) != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not open output device from '%s'\n", + avctx->filename); + ctx->dl->Release(); + return AVERROR(EIO); + } + + /* List supported formats. */ + if (ctx->list_formats) { + ff_decklink_list_formats(avctx); + ctx->dlo->Release(); + ctx->dl->Release(); + return AVERROR_EXIT; + } + + if (ctx->dlo->GetDisplayModeIterator(&itermode) != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not get Display Mode Iterator\n"); + ctx->dl->Release(); + return AVERROR(EIO); + } + + /* Setup streams. */ + for (n = 0; n < avctx->nb_streams; n++) { + AVStream *st = avctx->streams[n]; + AVCodecContext *c = st->codec; + if (c->codec_type == AVMEDIA_TYPE_AUDIO) { + if (decklink_setup_audio(avctx, st)) + goto error; + } else if (c->codec_type == AVMEDIA_TYPE_VIDEO) { + if (decklink_setup_video(avctx, st)) + goto error; + } else { + av_log(avctx, AV_LOG_ERROR, "Unsupported stream type.\n"); + goto error; + } + } + itermode->Release(); + + return 0; + +error: + + ctx->dlo->Release(); + ctx->dl->Release(); + + return AVERROR(EIO); +} + +int ff_decklink_write_packet(AVFormatContext *avctx, AVPacket *pkt) +{ + struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data; + struct decklink_ctx *ctx = (struct decklink_ctx *) cctx->ctx; + AVStream *st = avctx->streams[pkt->stream_index]; + + ctx->last_pts = FFMAX(ctx->last_pts, pkt->pts); + + if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) + return decklink_write_video_packet(avctx, pkt); + else if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) + return decklink_write_audio_packet(avctx, pkt); + + return AVERROR(EIO); +} + +} /* extern "C" */ diff --git a/libavdevice/decklink_enc.h b/libavdevice/decklink_enc.h new file mode 100644 index 0000000..6086947 --- /dev/null +++ b/libavdevice/decklink_enc.h @@ -0,0 +1,32 @@ +/* + * Blackmagic DeckLink output + * Copyright (c) 2013-2014 Ramiro Polla + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifdef __cplusplus +extern "C" { +#endif + +int ff_decklink_write_header(AVFormatContext *avctx); +int ff_decklink_write_packet(AVFormatContext *avctx, AVPacket *pkt); +int ff_decklink_write_trailer(AVFormatContext *avctx); + +#ifdef __cplusplus +} /* extern "C" */ +#endif diff --git a/libavdevice/decklink_enc_c.c b/libavdevice/decklink_enc_c.c new file mode 100644 index 0000000..c3c9018 --- /dev/null +++ b/libavdevice/decklink_enc_c.c @@ -0,0 +1,57 @@ +/* + * Blackmagic DeckLink output + * Copyright (c) 2013-2014 Ramiro Polla + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "libavformat/avformat.h" +#include "libavutil/opt.h" + +#include "decklink_common_c.h" +#include "decklink_enc.h" + +#define OFFSET(x) offsetof(struct decklink_cctx, x) +#define ENC AV_OPT_FLAG_ENCODING_PARAM +static const AVOption options[] = { + { "list_devices", "list available devices" , OFFSET(list_devices), AV_OPT_TYPE_INT , { .i64 = 0 }, 0, 1, ENC }, + { "list_formats", "list supported formats" , OFFSET(list_formats), AV_OPT_TYPE_INT , { .i64 = 0 }, 0, 1, ENC }, + { "preroll" , "video preroll in seconds", OFFSET(preroll ), AV_OPT_TYPE_DOUBLE, { .dbl = 0.5 }, 0, 5, ENC }, + { NULL }, +}; + +static const AVClass decklink_muxer_class = { + .class_name = "Blackmagic DeckLink muxer", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT, +}; + +AVOutputFormat ff_decklink_muxer = { + .name = "decklink", + .long_name = NULL_IF_CONFIG_SMALL("Blackmagic DeckLink output"), + .audio_codec = AV_CODEC_ID_PCM_S16LE, + .video_codec = AV_CODEC_ID_RAWVIDEO, + .subtitle_codec = AV_CODEC_ID_NONE, + .flags = AVFMT_NOFILE | AVFMT_RAWPICTURE, + .priv_class = &decklink_muxer_class, + .priv_data_size = sizeof(struct decklink_cctx), + .write_header = ff_decklink_write_header, + .write_packet = ff_decklink_write_packet, + .write_trailer = ff_decklink_write_trailer, +}; diff --git a/libavdevice/dshow.c b/libavdevice/dshow.c new file mode 100644 index 0000000..a543249 --- /dev/null +++ b/libavdevice/dshow.c @@ -0,0 +1,1100 @@ +/* + * Directshow capture interface + * Copyright (c) 2010 Ramiro Polla + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "libavutil/parseutils.h" +#include "libavutil/pixdesc.h" +#include "libavutil/opt.h" +#include "libavformat/internal.h" +#include "libavformat/riff.h" +#include "avdevice.h" +#include "dshow_capture.h" +#include "libavcodec/raw.h" + +struct dshow_ctx { + const AVClass *class; + + IGraphBuilder *graph; + + char *device_name[2]; + int video_device_number; + int audio_device_number; + + int list_options; + int list_devices; + int audio_buffer_size; + + IBaseFilter *device_filter[2]; + IPin *device_pin[2]; + libAVFilter *capture_filter[2]; + libAVPin *capture_pin[2]; + + HANDLE mutex; + HANDLE event[2]; /* event[0] is set by DirectShow + * event[1] is set by callback() */ + AVPacketList *pktl; + + int eof; + + int64_t curbufsize[2]; + unsigned int video_frame_num; + + IMediaControl *control; + IMediaEvent *media_event; + + enum AVPixelFormat pixel_format; + enum AVCodecID video_codec_id; + char *framerate; + + int requested_width; + int requested_height; + AVRational requested_framerate; + + int sample_rate; + int sample_size; + int channels; +}; + +static enum AVPixelFormat dshow_pixfmt(DWORD biCompression, WORD biBitCount) +{ + switch(biCompression) { + case BI_BITFIELDS: + case BI_RGB: + switch(biBitCount) { /* 1-8 are untested */ + case 1: + return AV_PIX_FMT_MONOWHITE; + case 4: + return AV_PIX_FMT_RGB4; + case 8: + return AV_PIX_FMT_RGB8; + case 16: + return AV_PIX_FMT_RGB555; + case 24: + return AV_PIX_FMT_BGR24; + case 32: + return AV_PIX_FMT_0RGB32; + } + } + return avpriv_find_pix_fmt(avpriv_get_raw_pix_fmt_tags(), biCompression); // all others +} + +static int +dshow_read_close(AVFormatContext *s) +{ + struct dshow_ctx *ctx = s->priv_data; + AVPacketList *pktl; + + if (ctx->control) { + IMediaControl_Stop(ctx->control); + IMediaControl_Release(ctx->control); + } + + if (ctx->media_event) + IMediaEvent_Release(ctx->media_event); + + if (ctx->graph) { + IEnumFilters *fenum; + int r; + r = IGraphBuilder_EnumFilters(ctx->graph, &fenum); + if (r == S_OK) { + IBaseFilter *f; + IEnumFilters_Reset(fenum); + while (IEnumFilters_Next(fenum, 1, &f, NULL) == S_OK) { + if (IGraphBuilder_RemoveFilter(ctx->graph, f) == S_OK) + IEnumFilters_Reset(fenum); /* When a filter is removed, + * the list must be reset. */ + IBaseFilter_Release(f); + } + IEnumFilters_Release(fenum); + } + IGraphBuilder_Release(ctx->graph); + } + + if (ctx->capture_pin[VideoDevice]) + libAVPin_Release(ctx->capture_pin[VideoDevice]); + if (ctx->capture_pin[AudioDevice]) + libAVPin_Release(ctx->capture_pin[AudioDevice]); + if (ctx->capture_filter[VideoDevice]) + libAVFilter_Release(ctx->capture_filter[VideoDevice]); + if (ctx->capture_filter[AudioDevice]) + libAVFilter_Release(ctx->capture_filter[AudioDevice]); + + if (ctx->device_pin[VideoDevice]) + IPin_Release(ctx->device_pin[VideoDevice]); + if (ctx->device_pin[AudioDevice]) + IPin_Release(ctx->device_pin[AudioDevice]); + if (ctx->device_filter[VideoDevice]) + IBaseFilter_Release(ctx->device_filter[VideoDevice]); + if (ctx->device_filter[AudioDevice]) + IBaseFilter_Release(ctx->device_filter[AudioDevice]); + + if (ctx->device_name[0]) + av_free(ctx->device_name[0]); + if (ctx->device_name[1]) + av_free(ctx->device_name[1]); + + if(ctx->mutex) + CloseHandle(ctx->mutex); + if(ctx->event[0]) + CloseHandle(ctx->event[0]); + if(ctx->event[1]) + CloseHandle(ctx->event[1]); + + pktl = ctx->pktl; + while (pktl) { + AVPacketList *next = pktl->next; + av_destruct_packet(&pktl->pkt); + av_free(pktl); + pktl = next; + } + + CoUninitialize(); + + return 0; +} + +static char *dup_wchar_to_utf8(wchar_t *w) +{ + char *s = NULL; + int l = WideCharToMultiByte(CP_UTF8, 0, w, -1, 0, 0, 0, 0); + s = av_malloc(l); + if (s) + WideCharToMultiByte(CP_UTF8, 0, w, -1, s, l, 0, 0); + return s; +} + +static int shall_we_drop(AVFormatContext *s, int index, enum dshowDeviceType devtype) +{ + struct dshow_ctx *ctx = s->priv_data; + static const uint8_t dropscore[] = {62, 75, 87, 100}; + const int ndropscores = FF_ARRAY_ELEMS(dropscore); + unsigned int buffer_fullness = (ctx->curbufsize[index]*100)/s->max_picture_buffer; + + if(dropscore[++ctx->video_frame_num%ndropscores] <= buffer_fullness) { + av_log(s, AV_LOG_ERROR, + "real-time buffer[%s] too full (%d%% of size: %d)! frame dropped!\n", ctx->device_name[devtype], buffer_fullness, s->max_picture_buffer); + return 1; + } + + return 0; +} + +static void +callback(void *priv_data, int index, uint8_t *buf, int buf_size, int64_t time, enum dshowDeviceType devtype) +{ + AVFormatContext *s = priv_data; + struct dshow_ctx *ctx = s->priv_data; + AVPacketList **ppktl, *pktl_next; + +// dump_videohdr(s, vdhdr); + + WaitForSingleObject(ctx->mutex, INFINITE); + + if(shall_we_drop(s, index, devtype)) + goto fail; + + pktl_next = av_mallocz(sizeof(AVPacketList)); + if(!pktl_next) + goto fail; + + if(av_new_packet(&pktl_next->pkt, buf_size) < 0) { + av_free(pktl_next); + goto fail; + } + + pktl_next->pkt.stream_index = index; + pktl_next->pkt.pts = time; + memcpy(pktl_next->pkt.data, buf, buf_size); + + for(ppktl = &ctx->pktl ; *ppktl ; ppktl = &(*ppktl)->next); + *ppktl = pktl_next; + ctx->curbufsize[index] += buf_size; + + SetEvent(ctx->event[1]); + ReleaseMutex(ctx->mutex); + + return; +fail: + ReleaseMutex(ctx->mutex); + return; +} + +/** + * Cycle through available devices using the device enumerator devenum, + * retrieve the device with type specified by devtype and return the + * pointer to the object found in *pfilter. + * If pfilter is NULL, list all device names. + */ +static int +dshow_cycle_devices(AVFormatContext *avctx, ICreateDevEnum *devenum, + enum dshowDeviceType devtype, IBaseFilter **pfilter) +{ + struct dshow_ctx *ctx = avctx->priv_data; + IBaseFilter *device_filter = NULL; + IEnumMoniker *classenum = NULL; + IMoniker *m = NULL; + const char *device_name = ctx->device_name[devtype]; + int skip = (devtype == VideoDevice) ? ctx->video_device_number + : ctx->audio_device_number; + int r; + + const GUID *device_guid[2] = { &CLSID_VideoInputDeviceCategory, + &CLSID_AudioInputDeviceCategory }; + const char *devtypename = (devtype == VideoDevice) ? "video" : "audio"; + + r = ICreateDevEnum_CreateClassEnumerator(devenum, device_guid[devtype], + (IEnumMoniker **) &classenum, 0); + if (r != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not enumerate %s devices.\n", + devtypename); + return AVERROR(EIO); + } + + while (!device_filter && IEnumMoniker_Next(classenum, 1, &m, NULL) == S_OK) { + IPropertyBag *bag = NULL; + char *buf = NULL; + VARIANT var; + + r = IMoniker_BindToStorage(m, 0, 0, &IID_IPropertyBag, (void *) &bag); + if (r != S_OK) + goto fail1; + + var.vt = VT_BSTR; + r = IPropertyBag_Read(bag, L"FriendlyName", &var, NULL); + if (r != S_OK) + goto fail1; + + buf = dup_wchar_to_utf8(var.bstrVal); + + if (pfilter) { + if (strcmp(device_name, buf)) + goto fail1; + + if (!skip--) + IMoniker_BindToObject(m, 0, 0, &IID_IBaseFilter, (void *) &device_filter); + } else { + av_log(avctx, AV_LOG_INFO, " \"%s\"\n", buf); + } + +fail1: + if (buf) + av_free(buf); + if (bag) + IPropertyBag_Release(bag); + IMoniker_Release(m); + } + + IEnumMoniker_Release(classenum); + + if (pfilter) { + if (!device_filter) { + av_log(avctx, AV_LOG_ERROR, "Could not find %s device.\n", + devtypename); + return AVERROR(EIO); + } + *pfilter = device_filter; + } + + return 0; +} + +/** + * Cycle through available formats using the specified pin, + * try to set parameters specified through AVOptions and if successful + * return 1 in *pformat_set. + * If pformat_set is NULL, list all pin capabilities. + */ +static void +dshow_cycle_formats(AVFormatContext *avctx, enum dshowDeviceType devtype, + IPin *pin, int *pformat_set) +{ + struct dshow_ctx *ctx = avctx->priv_data; + IAMStreamConfig *config = NULL; + AM_MEDIA_TYPE *type = NULL; + int format_set = 0; + void *caps = NULL; + int i, n, size; + + if (IPin_QueryInterface(pin, &IID_IAMStreamConfig, (void **) &config) != S_OK) + return; + if (IAMStreamConfig_GetNumberOfCapabilities(config, &n, &size) != S_OK) + goto end; + + caps = av_malloc(size); + if (!caps) + goto end; + + for (i = 0; i < n && !format_set; i++) { + IAMStreamConfig_GetStreamCaps(config, i, &type, (void *) caps); + +#if DSHOWDEBUG + ff_print_AM_MEDIA_TYPE(type); +#endif + + if (devtype == VideoDevice) { + VIDEO_STREAM_CONFIG_CAPS *vcaps = caps; + BITMAPINFOHEADER *bih; + int64_t *fr; + const AVCodecTag *const tags[] = { avformat_get_riff_video_tags(), NULL }; +#if DSHOWDEBUG + ff_print_VIDEO_STREAM_CONFIG_CAPS(vcaps); +#endif + if (IsEqualGUID(&type->formattype, &FORMAT_VideoInfo)) { + VIDEOINFOHEADER *v = (void *) type->pbFormat; + fr = &v->AvgTimePerFrame; + bih = &v->bmiHeader; + } else if (IsEqualGUID(&type->formattype, &FORMAT_VideoInfo2)) { + VIDEOINFOHEADER2 *v = (void *) type->pbFormat; + fr = &v->AvgTimePerFrame; + bih = &v->bmiHeader; + } else { + goto next; + } + if (!pformat_set) { + enum AVPixelFormat pix_fmt = dshow_pixfmt(bih->biCompression, bih->biBitCount); + if (pix_fmt == AV_PIX_FMT_NONE) { + enum AVCodecID codec_id = av_codec_get_id(tags, bih->biCompression); + AVCodec *codec = avcodec_find_decoder(codec_id); + if (codec_id == AV_CODEC_ID_NONE || !codec) { + av_log(avctx, AV_LOG_INFO, " unknown compression type 0x%X", (int) bih->biCompression); + } else { + av_log(avctx, AV_LOG_INFO, " vcodec=%s", codec->name); + } + } else { + av_log(avctx, AV_LOG_INFO, " pixel_format=%s", av_get_pix_fmt_name(pix_fmt)); + } + av_log(avctx, AV_LOG_INFO, " min s=%ldx%ld fps=%g max s=%ldx%ld fps=%g\n", + vcaps->MinOutputSize.cx, vcaps->MinOutputSize.cy, + 1e7 / vcaps->MaxFrameInterval, + vcaps->MaxOutputSize.cx, vcaps->MaxOutputSize.cy, + 1e7 / vcaps->MinFrameInterval); + continue; + } + if (ctx->video_codec_id != AV_CODEC_ID_RAWVIDEO) { + if (ctx->video_codec_id != av_codec_get_id(tags, bih->biCompression)) + goto next; + } + if (ctx->pixel_format != AV_PIX_FMT_NONE && + ctx->pixel_format != dshow_pixfmt(bih->biCompression, bih->biBitCount)) { + goto next; + } + if (ctx->framerate) { + int64_t framerate = ((int64_t) ctx->requested_framerate.den*10000000) + / ctx->requested_framerate.num; + if (framerate > vcaps->MaxFrameInterval || + framerate < vcaps->MinFrameInterval) + goto next; + *fr = framerate; + } + if (ctx->requested_width && ctx->requested_height) { + if (ctx->requested_width > vcaps->MaxOutputSize.cx || + ctx->requested_width < vcaps->MinOutputSize.cx || + ctx->requested_height > vcaps->MaxOutputSize.cy || + ctx->requested_height < vcaps->MinOutputSize.cy) + goto next; + bih->biWidth = ctx->requested_width; + bih->biHeight = ctx->requested_height; + } + } else { + AUDIO_STREAM_CONFIG_CAPS *acaps = caps; + WAVEFORMATEX *fx; +#if DSHOWDEBUG + ff_print_AUDIO_STREAM_CONFIG_CAPS(acaps); +#endif + if (IsEqualGUID(&type->formattype, &FORMAT_WaveFormatEx)) { + fx = (void *) type->pbFormat; + } else { + goto next; + } + if (!pformat_set) { + av_log(avctx, AV_LOG_INFO, " min ch=%lu bits=%lu rate=%6lu max ch=%lu bits=%lu rate=%6lu\n", + acaps->MinimumChannels, acaps->MinimumBitsPerSample, acaps->MinimumSampleFrequency, + acaps->MaximumChannels, acaps->MaximumBitsPerSample, acaps->MaximumSampleFrequency); + continue; + } + if (ctx->sample_rate) { + if (ctx->sample_rate > acaps->MaximumSampleFrequency || + ctx->sample_rate < acaps->MinimumSampleFrequency) + goto next; + fx->nSamplesPerSec = ctx->sample_rate; + } + if (ctx->sample_size) { + if (ctx->sample_size > acaps->MaximumBitsPerSample || + ctx->sample_size < acaps->MinimumBitsPerSample) + goto next; + fx->wBitsPerSample = ctx->sample_size; + } + if (ctx->channels) { + if (ctx->channels > acaps->MaximumChannels || + ctx->channels < acaps->MinimumChannels) + goto next; + fx->nChannels = ctx->channels; + } + } + if (IAMStreamConfig_SetFormat(config, type) != S_OK) + goto next; + format_set = 1; +next: + if (type->pbFormat) + CoTaskMemFree(type->pbFormat); + CoTaskMemFree(type); + } +end: + IAMStreamConfig_Release(config); + if (caps) + av_free(caps); + if (pformat_set) + *pformat_set = format_set; +} + +/** + * Set audio device buffer size in milliseconds (which can directly impact + * latency, depending on the device). + */ +static int +dshow_set_audio_buffer_size(AVFormatContext *avctx, IPin *pin) +{ + struct dshow_ctx *ctx = avctx->priv_data; + IAMBufferNegotiation *buffer_negotiation = NULL; + ALLOCATOR_PROPERTIES props = { -1, -1, -1, -1 }; + IAMStreamConfig *config = NULL; + AM_MEDIA_TYPE *type = NULL; + int ret = AVERROR(EIO); + + if (IPin_QueryInterface(pin, &IID_IAMStreamConfig, (void **) &config) != S_OK) + goto end; + if (IAMStreamConfig_GetFormat(config, &type) != S_OK) + goto end; + if (!IsEqualGUID(&type->formattype, &FORMAT_WaveFormatEx)) + goto end; + + props.cbBuffer = (((WAVEFORMATEX *) type->pbFormat)->nAvgBytesPerSec) + * ctx->audio_buffer_size / 1000; + + if (IPin_QueryInterface(pin, &IID_IAMBufferNegotiation, (void **) &buffer_negotiation) != S_OK) + goto end; + if (IAMBufferNegotiation_SuggestAllocatorProperties(buffer_negotiation, &props) != S_OK) + goto end; + + ret = 0; + +end: + if (buffer_negotiation) + IAMBufferNegotiation_Release(buffer_negotiation); + if (type) { + if (type->pbFormat) + CoTaskMemFree(type->pbFormat); + CoTaskMemFree(type); + } + if (config) + IAMStreamConfig_Release(config); + + return ret; +} + +/** + * Cycle through available pins using the device_filter device, of type + * devtype, retrieve the first output pin and return the pointer to the + * object found in *ppin. + * If ppin is NULL, cycle through all pins listing audio/video capabilities. + */ +static int +dshow_cycle_pins(AVFormatContext *avctx, enum dshowDeviceType devtype, + IBaseFilter *device_filter, IPin **ppin) +{ + struct dshow_ctx *ctx = avctx->priv_data; + IEnumPins *pins = 0; + IPin *device_pin = NULL; + IPin *pin; + int r; + + const GUID *mediatype[2] = { &MEDIATYPE_Video, &MEDIATYPE_Audio }; + const char *devtypename = (devtype == VideoDevice) ? "video" : "audio"; + + int set_format = (devtype == VideoDevice && (ctx->framerate || + (ctx->requested_width && ctx->requested_height) || + ctx->pixel_format != AV_PIX_FMT_NONE || + ctx->video_codec_id != AV_CODEC_ID_RAWVIDEO)) + || (devtype == AudioDevice && (ctx->channels || ctx->sample_rate)); + int format_set = 0; + + r = IBaseFilter_EnumPins(device_filter, &pins); + if (r != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not enumerate pins.\n"); + return AVERROR(EIO); + } + + if (!ppin) { + av_log(avctx, AV_LOG_INFO, "DirectShow %s device options\n", + devtypename); + } + while (!device_pin && IEnumPins_Next(pins, 1, &pin, NULL) == S_OK) { + IKsPropertySet *p = NULL; + IEnumMediaTypes *types = NULL; + PIN_INFO info = {0}; + AM_MEDIA_TYPE *type; + GUID category; + DWORD r2; + + IPin_QueryPinInfo(pin, &info); + IBaseFilter_Release(info.pFilter); + + if (info.dir != PINDIR_OUTPUT) + goto next; + if (IPin_QueryInterface(pin, &IID_IKsPropertySet, (void **) &p) != S_OK) + goto next; + if (IKsPropertySet_Get(p, &ROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, + NULL, 0, &category, sizeof(GUID), &r2) != S_OK) + goto next; + if (!IsEqualGUID(&category, &PIN_CATEGORY_CAPTURE)) + goto next; + + if (!ppin) { + char *buf = dup_wchar_to_utf8(info.achName); + av_log(avctx, AV_LOG_INFO, " Pin \"%s\"\n", buf); + av_free(buf); + dshow_cycle_formats(avctx, devtype, pin, NULL); + goto next; + } + if (set_format) { + dshow_cycle_formats(avctx, devtype, pin, &format_set); + if (!format_set) { + goto next; + } + } + if (devtype == AudioDevice && ctx->audio_buffer_size) { + if (dshow_set_audio_buffer_size(avctx, pin) < 0) { + av_log(avctx, AV_LOG_ERROR, "unable to set audio buffer size %d to pin, using pin anyway...", ctx->audio_buffer_size); + } + } + + if (IPin_EnumMediaTypes(pin, &types) != S_OK) + goto next; + + IEnumMediaTypes_Reset(types); + while (!device_pin && IEnumMediaTypes_Next(types, 1, &type, NULL) == S_OK) { + if (IsEqualGUID(&type->majortype, mediatype[devtype])) { + device_pin = pin; + goto next; + } + CoTaskMemFree(type); + } + +next: + if (types) + IEnumMediaTypes_Release(types); + if (p) + IKsPropertySet_Release(p); + if (device_pin != pin) + IPin_Release(pin); + } + + IEnumPins_Release(pins); + + if (ppin) { + if (set_format && !format_set) { + av_log(avctx, AV_LOG_ERROR, "Could not set %s options\n", devtypename); + return AVERROR(EIO); + } + if (!device_pin) { + av_log(avctx, AV_LOG_ERROR, + "Could not find output pin from %s capture device.\n", devtypename); + return AVERROR(EIO); + } + *ppin = device_pin; + } + + return 0; +} + +/** + * List options for device with type devtype. + * + * @param devenum device enumerator used for accessing the device + */ +static int +dshow_list_device_options(AVFormatContext *avctx, ICreateDevEnum *devenum, + enum dshowDeviceType devtype) +{ + struct dshow_ctx *ctx = avctx->priv_data; + IBaseFilter *device_filter = NULL; + int r; + + if ((r = dshow_cycle_devices(avctx, devenum, devtype, &device_filter)) < 0) + return r; + ctx->device_filter[devtype] = device_filter; + if ((r = dshow_cycle_pins(avctx, devtype, device_filter, NULL)) < 0) + return r; + + return 0; +} + +static int +dshow_open_device(AVFormatContext *avctx, ICreateDevEnum *devenum, + enum dshowDeviceType devtype) +{ + struct dshow_ctx *ctx = avctx->priv_data; + IBaseFilter *device_filter = NULL; + IGraphBuilder *graph = ctx->graph; + IPin *device_pin = NULL; + libAVPin *capture_pin = NULL; + libAVFilter *capture_filter = NULL; + int ret = AVERROR(EIO); + int r; + + const wchar_t *filter_name[2] = { L"Audio capture filter", L"Video capture filter" }; + + if ((r = dshow_cycle_devices(avctx, devenum, devtype, &device_filter)) < 0) { + ret = r; + goto error; + } + + ctx->device_filter [devtype] = device_filter; + + r = IGraphBuilder_AddFilter(graph, device_filter, NULL); + if (r != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not add device filter to graph.\n"); + goto error; + } + + if ((r = dshow_cycle_pins(avctx, devtype, device_filter, &device_pin)) < 0) { + ret = r; + goto error; + } + ctx->device_pin[devtype] = device_pin; + + capture_filter = libAVFilter_Create(avctx, callback, devtype); + if (!capture_filter) { + av_log(avctx, AV_LOG_ERROR, "Could not create grabber filter.\n"); + goto error; + } + ctx->capture_filter[devtype] = capture_filter; + + r = IGraphBuilder_AddFilter(graph, (IBaseFilter *) capture_filter, + filter_name[devtype]); + if (r != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not add capture filter to graph\n"); + goto error; + } + + libAVPin_AddRef(capture_filter->pin); + capture_pin = capture_filter->pin; + ctx->capture_pin[devtype] = capture_pin; + + r = IGraphBuilder_ConnectDirect(graph, device_pin, (IPin *) capture_pin, NULL); + if (r != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not connect pins\n"); + goto error; + } + + ret = 0; + +error: + return ret; +} + +static enum AVCodecID waveform_codec_id(enum AVSampleFormat sample_fmt) +{ + switch (sample_fmt) { + case AV_SAMPLE_FMT_U8: return AV_CODEC_ID_PCM_U8; + case AV_SAMPLE_FMT_S16: return AV_CODEC_ID_PCM_S16LE; + case AV_SAMPLE_FMT_S32: return AV_CODEC_ID_PCM_S32LE; + default: return AV_CODEC_ID_NONE; /* Should never happen. */ + } +} + +static enum AVSampleFormat sample_fmt_bits_per_sample(int bits) +{ + switch (bits) { + case 8: return AV_SAMPLE_FMT_U8; + case 16: return AV_SAMPLE_FMT_S16; + case 32: return AV_SAMPLE_FMT_S32; + default: return AV_SAMPLE_FMT_NONE; /* Should never happen. */ + } +} + +static int +dshow_add_device(AVFormatContext *avctx, + enum dshowDeviceType devtype) +{ + struct dshow_ctx *ctx = avctx->priv_data; + AM_MEDIA_TYPE type; + AVCodecContext *codec; + AVStream *st; + int ret = AVERROR(EIO); + + st = avformat_new_stream(avctx, NULL); + if (!st) { + ret = AVERROR(ENOMEM); + goto error; + } + st->id = devtype; + + ctx->capture_filter[devtype]->stream_index = st->index; + + libAVPin_ConnectionMediaType(ctx->capture_pin[devtype], &type); + + codec = st->codec; + if (devtype == VideoDevice) { + BITMAPINFOHEADER *bih = NULL; + AVRational time_base; + + if (IsEqualGUID(&type.formattype, &FORMAT_VideoInfo)) { + VIDEOINFOHEADER *v = (void *) type.pbFormat; + time_base = (AVRational) { v->AvgTimePerFrame, 10000000 }; + bih = &v->bmiHeader; + } else if (IsEqualGUID(&type.formattype, &FORMAT_VideoInfo2)) { + VIDEOINFOHEADER2 *v = (void *) type.pbFormat; + time_base = (AVRational) { v->AvgTimePerFrame, 10000000 }; + bih = &v->bmiHeader; + } + if (!bih) { + av_log(avctx, AV_LOG_ERROR, "Could not get media type.\n"); + goto error; + } + + codec->time_base = time_base; + codec->codec_type = AVMEDIA_TYPE_VIDEO; + codec->width = bih->biWidth; + codec->height = bih->biHeight; + codec->codec_tag = bih->biCompression; + codec->pix_fmt = dshow_pixfmt(bih->biCompression, bih->biBitCount); + if (bih->biCompression == MKTAG('H', 'D', 'Y', 'C')) { + av_log(avctx, AV_LOG_DEBUG, "attempt to use full range for HDYC...\n"); + codec->color_range = AVCOL_RANGE_MPEG; // just in case it needs this... + } + if (codec->pix_fmt == AV_PIX_FMT_NONE) { + const AVCodecTag *const tags[] = { avformat_get_riff_video_tags(), NULL }; + codec->codec_id = av_codec_get_id(tags, bih->biCompression); + if (codec->codec_id == AV_CODEC_ID_NONE) { + av_log(avctx, AV_LOG_ERROR, "Unknown compression type. " + "Please report type 0x%X.\n", (int) bih->biCompression); + return AVERROR_PATCHWELCOME; + } + codec->bits_per_coded_sample = bih->biBitCount; + } else { + codec->codec_id = AV_CODEC_ID_RAWVIDEO; + if (bih->biCompression == BI_RGB || bih->biCompression == BI_BITFIELDS) { + codec->bits_per_coded_sample = bih->biBitCount; + codec->extradata = av_malloc(9 + FF_INPUT_BUFFER_PADDING_SIZE); + if (codec->extradata) { + codec->extradata_size = 9; + memcpy(codec->extradata, "BottomUp", 9); + } + } + } + } else { + WAVEFORMATEX *fx = NULL; + + if (IsEqualGUID(&type.formattype, &FORMAT_WaveFormatEx)) { + fx = (void *) type.pbFormat; + } + if (!fx) { + av_log(avctx, AV_LOG_ERROR, "Could not get media type.\n"); + goto error; + } + + codec->codec_type = AVMEDIA_TYPE_AUDIO; + codec->sample_fmt = sample_fmt_bits_per_sample(fx->wBitsPerSample); + codec->codec_id = waveform_codec_id(codec->sample_fmt); + codec->sample_rate = fx->nSamplesPerSec; + codec->channels = fx->nChannels; + } + + avpriv_set_pts_info(st, 64, 1, 10000000); + + ret = 0; + +error: + return ret; +} + +static int parse_device_name(AVFormatContext *avctx) +{ + struct dshow_ctx *ctx = avctx->priv_data; + char **device_name = ctx->device_name; + char *name = av_strdup(avctx->filename); + char *tmp = name; + int ret = 1; + char *type; + + while ((type = strtok(tmp, "="))) { + char *token = strtok(NULL, ":"); + tmp = NULL; + + if (!strcmp(type, "video")) { + device_name[0] = token; + } else if (!strcmp(type, "audio")) { + device_name[1] = token; + } else { + device_name[0] = NULL; + device_name[1] = NULL; + break; + } + } + + if (!device_name[0] && !device_name[1]) { + ret = 0; + } else { + if (device_name[0]) + device_name[0] = av_strdup(device_name[0]); + if (device_name[1]) + device_name[1] = av_strdup(device_name[1]); + } + + av_free(name); + return ret; +} + +static int dshow_read_header(AVFormatContext *avctx) +{ + struct dshow_ctx *ctx = avctx->priv_data; + IGraphBuilder *graph = NULL; + ICreateDevEnum *devenum = NULL; + IMediaControl *control = NULL; + IMediaEvent *media_event = NULL; + HANDLE media_event_handle; + HANDLE proc; + int ret = AVERROR(EIO); + int r; + + CoInitialize(0); + + if (!ctx->list_devices && !parse_device_name(avctx)) { + av_log(avctx, AV_LOG_ERROR, "Malformed dshow input string.\n"); + goto error; + } + + ctx->video_codec_id = avctx->video_codec_id ? avctx->video_codec_id + : AV_CODEC_ID_RAWVIDEO; + if (ctx->pixel_format != AV_PIX_FMT_NONE) { + if (ctx->video_codec_id != AV_CODEC_ID_RAWVIDEO) { + av_log(avctx, AV_LOG_ERROR, "Pixel format may only be set when " + "video codec is not set or set to rawvideo\n"); + ret = AVERROR(EINVAL); + goto error; + } + } + if (ctx->framerate) { + r = av_parse_video_rate(&ctx->requested_framerate, ctx->framerate); + if (r < 0) { + av_log(avctx, AV_LOG_ERROR, "Could not parse framerate '%s'.\n", ctx->framerate); + goto error; + } + } + + r = CoCreateInstance(&CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, + &IID_IGraphBuilder, (void **) &graph); + if (r != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not create capture graph.\n"); + goto error; + } + ctx->graph = graph; + + r = CoCreateInstance(&CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, + &IID_ICreateDevEnum, (void **) &devenum); + if (r != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not enumerate system devices.\n"); + goto error; + } + + if (ctx->list_devices) { + av_log(avctx, AV_LOG_INFO, "DirectShow video devices\n"); + dshow_cycle_devices(avctx, devenum, VideoDevice, NULL); + av_log(avctx, AV_LOG_INFO, "DirectShow audio devices\n"); + dshow_cycle_devices(avctx, devenum, AudioDevice, NULL); + ret = AVERROR_EXIT; + goto error; + } + if (ctx->list_options) { + if (ctx->device_name[VideoDevice]) + dshow_list_device_options(avctx, devenum, VideoDevice); + if (ctx->device_name[AudioDevice]) + dshow_list_device_options(avctx, devenum, AudioDevice); + ret = AVERROR_EXIT; + goto error; + } + + if (ctx->device_name[VideoDevice]) { + if ((r = dshow_open_device(avctx, devenum, VideoDevice)) < 0 || + (r = dshow_add_device(avctx, VideoDevice)) < 0) { + ret = r; + goto error; + } + } + if (ctx->device_name[AudioDevice]) { + if ((r = dshow_open_device(avctx, devenum, AudioDevice)) < 0 || + (r = dshow_add_device(avctx, AudioDevice)) < 0) { + ret = r; + goto error; + } + } + ctx->curbufsize[0] = 0; + ctx->curbufsize[1] = 0; + ctx->mutex = CreateMutex(NULL, 0, NULL); + if (!ctx->mutex) { + av_log(avctx, AV_LOG_ERROR, "Could not create Mutex\n"); + goto error; + } + ctx->event[1] = CreateEvent(NULL, 1, 0, NULL); + if (!ctx->event[1]) { + av_log(avctx, AV_LOG_ERROR, "Could not create Event\n"); + goto error; + } + + r = IGraphBuilder_QueryInterface(graph, &IID_IMediaControl, (void **) &control); + if (r != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not get media control.\n"); + goto error; + } + ctx->control = control; + + r = IGraphBuilder_QueryInterface(graph, &IID_IMediaEvent, (void **) &media_event); + if (r != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not get media event.\n"); + goto error; + } + ctx->media_event = media_event; + + r = IMediaEvent_GetEventHandle(media_event, (void *) &media_event_handle); + if (r != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not get media event handle.\n"); + goto error; + } + proc = GetCurrentProcess(); + r = DuplicateHandle(proc, media_event_handle, proc, &ctx->event[0], + 0, 0, DUPLICATE_SAME_ACCESS); + if (!r) { + av_log(avctx, AV_LOG_ERROR, "Could not duplicate media event handle.\n"); + goto error; + } + + r = IMediaControl_Run(control); + if (r == S_FALSE) { + OAFilterState pfs; + r = IMediaControl_GetState(control, 0, &pfs); + } + if (r != S_OK) { + av_log(avctx, AV_LOG_ERROR, "Could not run filter\n"); + goto error; + } + + ret = 0; + +error: + + if (devenum) + ICreateDevEnum_Release(devenum); + + if (ret < 0) + dshow_read_close(avctx); + + return ret; +} + +/** + * Checks media events from DirectShow and returns -1 on error or EOF. Also + * purges all events that might be in the event queue to stop the trigger + * of event notification. + */ +static int dshow_check_event_queue(IMediaEvent *media_event) +{ + LONG_PTR p1, p2; + long code; + int ret = 0; + + while (IMediaEvent_GetEvent(media_event, &code, &p1, &p2, 0) != E_ABORT) { + if (code == EC_COMPLETE || code == EC_DEVICE_LOST || code == EC_ERRORABORT) + ret = -1; + IMediaEvent_FreeEventParams(media_event, code, p1, p2); + } + + return ret; +} + +static int dshow_read_packet(AVFormatContext *s, AVPacket *pkt) +{ + struct dshow_ctx *ctx = s->priv_data; + AVPacketList *pktl = NULL; + + while (!ctx->eof && !pktl) { + WaitForSingleObject(ctx->mutex, INFINITE); + pktl = ctx->pktl; + if (pktl) { + *pkt = pktl->pkt; + ctx->pktl = ctx->pktl->next; + av_free(pktl); + ctx->curbufsize[pkt->stream_index] -= pkt->size; + } + ResetEvent(ctx->event[1]); + ReleaseMutex(ctx->mutex); + if (!pktl) { + if (dshow_check_event_queue(ctx->media_event) < 0) { + ctx->eof = 1; + } else if (s->flags & AVFMT_FLAG_NONBLOCK) { + return AVERROR(EAGAIN); + } else { + WaitForMultipleObjects(2, ctx->event, 0, INFINITE); + } + } + } + + return ctx->eof ? AVERROR(EIO) : pkt->size; +} + +#define OFFSET(x) offsetof(struct dshow_ctx, x) +#define DEC AV_OPT_FLAG_DECODING_PARAM +static const AVOption options[] = { + { "video_size", "set video size given a string such as 640x480 or hd720.", OFFSET(requested_width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, DEC }, + { "pixel_format", "set video pixel format", OFFSET(pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_NONE}, -1, INT_MAX, DEC }, + { "framerate", "set video frame rate", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC }, + { "sample_rate", "set audio sample rate", OFFSET(sample_rate), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, DEC }, + { "sample_size", "set audio sample size", OFFSET(sample_size), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 16, DEC }, + { "channels", "set number of audio channels, such as 1 or 2", OFFSET(channels), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, DEC }, + { "list_devices", "list available devices", OFFSET(list_devices), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, DEC, "list_devices" }, + { "true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, DEC, "list_devices" }, + { "false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, DEC, "list_devices" }, + { "list_options", "list available options for specified device", OFFSET(list_options), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, DEC, "list_options" }, + { "true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, DEC, "list_options" }, + { "false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, DEC, "list_options" }, + { "video_device_number", "set video device number for devices with same name (starts at 0)", OFFSET(video_device_number), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, DEC }, + { "audio_device_number", "set audio device number for devices with same name (starts at 0)", OFFSET(audio_device_number), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, DEC }, + { "audio_buffer_size", "set audio device buffer latency size in milliseconds (default is the device's default)", OFFSET(audio_buffer_size), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, DEC }, + { NULL }, +}; + +static const AVClass dshow_class = { + .class_name = "dshow indev", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, +}; + +AVInputFormat ff_dshow_demuxer = { + .name = "dshow", + .long_name = NULL_IF_CONFIG_SMALL("DirectShow capture"), + .priv_data_size = sizeof(struct dshow_ctx), + .read_header = dshow_read_header, + .read_packet = dshow_read_packet, + .read_close = dshow_read_close, + .flags = AVFMT_NOFILE, + .priv_class = &dshow_class, +}; diff --git a/libavdevice/dshow_capture.h b/libavdevice/dshow_capture.h new file mode 100644 index 0000000..e4b4dce --- /dev/null +++ b/libavdevice/dshow_capture.h @@ -0,0 +1,279 @@ +/* + * DirectShow capture interface + * Copyright (c) 2010 Ramiro Polla + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVDEVICE_DSHOW_H +#define AVDEVICE_DSHOW_H + +#define DSHOWDEBUG 0 + +#include "avdevice.h" + +#define COBJMACROS +#include <windows.h> +#define NO_DSHOW_STRSAFE +#include <dshow.h> +#include <dvdmedia.h> + +/* EC_DEVICE_LOST is not defined in MinGW dshow headers. */ +#ifndef EC_DEVICE_LOST +#define EC_DEVICE_LOST 0x1f +#endif + +long ff_copy_dshow_media_type(AM_MEDIA_TYPE *dst, const AM_MEDIA_TYPE *src); +void ff_print_VIDEO_STREAM_CONFIG_CAPS(const VIDEO_STREAM_CONFIG_CAPS *caps); +void ff_print_AUDIO_STREAM_CONFIG_CAPS(const AUDIO_STREAM_CONFIG_CAPS *caps); +void ff_print_AM_MEDIA_TYPE(const AM_MEDIA_TYPE *type); +void ff_printGUID(const GUID *g); + +#if DSHOWDEBUG +extern const AVClass *ff_dshow_context_class_ptr; +#define dshowdebug(...) av_log(&ff_dshow_context_class_ptr, AV_LOG_DEBUG, __VA_ARGS__) +#else +#define dshowdebug(...) +#endif + +static inline void nothing(void *foo) +{ +} + +struct GUIDoffset { + const GUID *iid; + int offset; +}; + +enum dshowDeviceType { + VideoDevice = 0, + AudioDevice = 1, +}; + +#define DECLARE_QUERYINTERFACE(class, ...) \ +long WINAPI \ +class##_QueryInterface(class *this, const GUID *riid, void **ppvObject) \ +{ \ + struct GUIDoffset ifaces[] = __VA_ARGS__; \ + int i; \ + dshowdebug(AV_STRINGIFY(class)"_QueryInterface(%p, %p, %p)\n", this, riid, ppvObject); \ + ff_printGUID(riid); \ + if (!ppvObject) \ + return E_POINTER; \ + for (i = 0; i < sizeof(ifaces)/sizeof(ifaces[0]); i++) { \ + if (IsEqualGUID(riid, ifaces[i].iid)) { \ + void *obj = (void *) ((uint8_t *) this + ifaces[i].offset); \ + class##_AddRef(this); \ + dshowdebug("\tfound %d with offset %d\n", i, ifaces[i].offset); \ + *ppvObject = (void *) obj; \ + return S_OK; \ + } \ + } \ + dshowdebug("\tE_NOINTERFACE\n"); \ + *ppvObject = NULL; \ + return E_NOINTERFACE; \ +} +#define DECLARE_ADDREF(class) \ +unsigned long WINAPI \ +class##_AddRef(class *this) \ +{ \ + dshowdebug(AV_STRINGIFY(class)"_AddRef(%p)\t%ld\n", this, this->ref+1); \ + return InterlockedIncrement(&this->ref); \ +} +#define DECLARE_RELEASE(class) \ +unsigned long WINAPI \ +class##_Release(class *this) \ +{ \ + long ref = InterlockedDecrement(&this->ref); \ + dshowdebug(AV_STRINGIFY(class)"_Release(%p)\t%ld\n", this, ref); \ + if (!ref) \ + class##_Destroy(this); \ + return ref; \ +} + +#define DECLARE_DESTROY(class, func) \ +void class##_Destroy(class *this) \ +{ \ + dshowdebug(AV_STRINGIFY(class)"_Destroy(%p)\n", this); \ + func(this); \ + if (this) { \ + if (this->vtbl) \ + CoTaskMemFree(this->vtbl); \ + CoTaskMemFree(this); \ + } \ +} +#define DECLARE_CREATE(class, setup, ...) \ +class *class##_Create(__VA_ARGS__) \ +{ \ + class *this = CoTaskMemAlloc(sizeof(class)); \ + void *vtbl = CoTaskMemAlloc(sizeof(*this->vtbl)); \ + dshowdebug(AV_STRINGIFY(class)"_Create(%p)\n", this); \ + if (!this || !vtbl) \ + goto fail; \ + ZeroMemory(this, sizeof(class)); \ + ZeroMemory(vtbl, sizeof(*this->vtbl)); \ + this->ref = 1; \ + this->vtbl = vtbl; \ + if (!setup) \ + goto fail; \ + dshowdebug("created "AV_STRINGIFY(class)" %p\n", this); \ + return this; \ +fail: \ + class##_Destroy(this); \ + dshowdebug("could not create "AV_STRINGIFY(class)"\n"); \ + return NULL; \ +} + +#define SETVTBL(vtbl, class, fn) \ + do { (vtbl)->fn = (void *) class##_##fn; } while(0) + +/***************************************************************************** + * Forward Declarations + ****************************************************************************/ +typedef struct libAVPin libAVPin; +typedef struct libAVMemInputPin libAVMemInputPin; +typedef struct libAVEnumPins libAVEnumPins; +typedef struct libAVEnumMediaTypes libAVEnumMediaTypes; +typedef struct libAVFilter libAVFilter; + +/***************************************************************************** + * libAVPin + ****************************************************************************/ +struct libAVPin { + IPinVtbl *vtbl; + long ref; + libAVFilter *filter; + IPin *connectedto; + AM_MEDIA_TYPE type; + IMemInputPinVtbl *imemvtbl; +}; + +long WINAPI libAVPin_QueryInterface (libAVPin *, const GUID *, void **); +unsigned long WINAPI libAVPin_AddRef (libAVPin *); +unsigned long WINAPI libAVPin_Release (libAVPin *); +long WINAPI libAVPin_Connect (libAVPin *, IPin *, const AM_MEDIA_TYPE *); +long WINAPI libAVPin_ReceiveConnection (libAVPin *, IPin *, const AM_MEDIA_TYPE *); +long WINAPI libAVPin_Disconnect (libAVPin *); +long WINAPI libAVPin_ConnectedTo (libAVPin *, IPin **); +long WINAPI libAVPin_ConnectionMediaType (libAVPin *, AM_MEDIA_TYPE *); +long WINAPI libAVPin_QueryPinInfo (libAVPin *, PIN_INFO *); +long WINAPI libAVPin_QueryDirection (libAVPin *, PIN_DIRECTION *); +long WINAPI libAVPin_QueryId (libAVPin *, wchar_t **); +long WINAPI libAVPin_QueryAccept (libAVPin *, const AM_MEDIA_TYPE *); +long WINAPI libAVPin_EnumMediaTypes (libAVPin *, IEnumMediaTypes **); +long WINAPI libAVPin_QueryInternalConnections(libAVPin *, IPin **, unsigned long *); +long WINAPI libAVPin_EndOfStream (libAVPin *); +long WINAPI libAVPin_BeginFlush (libAVPin *); +long WINAPI libAVPin_EndFlush (libAVPin *); +long WINAPI libAVPin_NewSegment (libAVPin *, REFERENCE_TIME, REFERENCE_TIME, double); + +long WINAPI libAVMemInputPin_QueryInterface (libAVMemInputPin *, const GUID *, void **); +unsigned long WINAPI libAVMemInputPin_AddRef (libAVMemInputPin *); +unsigned long WINAPI libAVMemInputPin_Release (libAVMemInputPin *); +long WINAPI libAVMemInputPin_GetAllocator (libAVMemInputPin *, IMemAllocator **); +long WINAPI libAVMemInputPin_NotifyAllocator (libAVMemInputPin *, IMemAllocator *, BOOL); +long WINAPI libAVMemInputPin_GetAllocatorRequirements(libAVMemInputPin *, ALLOCATOR_PROPERTIES *); +long WINAPI libAVMemInputPin_Receive (libAVMemInputPin *, IMediaSample *); +long WINAPI libAVMemInputPin_ReceiveMultiple (libAVMemInputPin *, IMediaSample **, long, long *); +long WINAPI libAVMemInputPin_ReceiveCanBlock (libAVMemInputPin *); + +void libAVPin_Destroy(libAVPin *); +libAVPin *libAVPin_Create (libAVFilter *filter); + +void libAVMemInputPin_Destroy(libAVMemInputPin *); + +/***************************************************************************** + * libAVEnumPins + ****************************************************************************/ +struct libAVEnumPins { + IEnumPinsVtbl *vtbl; + long ref; + int pos; + libAVPin *pin; + libAVFilter *filter; +}; + +long WINAPI libAVEnumPins_QueryInterface(libAVEnumPins *, const GUID *, void **); +unsigned long WINAPI libAVEnumPins_AddRef (libAVEnumPins *); +unsigned long WINAPI libAVEnumPins_Release (libAVEnumPins *); +long WINAPI libAVEnumPins_Next (libAVEnumPins *, unsigned long, IPin **, unsigned long *); +long WINAPI libAVEnumPins_Skip (libAVEnumPins *, unsigned long); +long WINAPI libAVEnumPins_Reset (libAVEnumPins *); +long WINAPI libAVEnumPins_Clone (libAVEnumPins *, libAVEnumPins **); + +void libAVEnumPins_Destroy(libAVEnumPins *); +libAVEnumPins *libAVEnumPins_Create (libAVPin *pin, libAVFilter *filter); + +/***************************************************************************** + * libAVEnumMediaTypes + ****************************************************************************/ +struct libAVEnumMediaTypes { + IEnumPinsVtbl *vtbl; + long ref; + int pos; + AM_MEDIA_TYPE type; +}; + +long WINAPI libAVEnumMediaTypes_QueryInterface(libAVEnumMediaTypes *, const GUID *, void **); +unsigned long WINAPI libAVEnumMediaTypes_AddRef (libAVEnumMediaTypes *); +unsigned long WINAPI libAVEnumMediaTypes_Release (libAVEnumMediaTypes *); +long WINAPI libAVEnumMediaTypes_Next (libAVEnumMediaTypes *, unsigned long, AM_MEDIA_TYPE **, unsigned long *); +long WINAPI libAVEnumMediaTypes_Skip (libAVEnumMediaTypes *, unsigned long); +long WINAPI libAVEnumMediaTypes_Reset (libAVEnumMediaTypes *); +long WINAPI libAVEnumMediaTypes_Clone (libAVEnumMediaTypes *, libAVEnumMediaTypes **); + +void libAVEnumMediaTypes_Destroy(libAVEnumMediaTypes *); +libAVEnumMediaTypes *libAVEnumMediaTypes_Create(const AM_MEDIA_TYPE *type); + +/***************************************************************************** + * libAVFilter + ****************************************************************************/ +struct libAVFilter { + IBaseFilterVtbl *vtbl; + long ref; + const wchar_t *name; + libAVPin *pin; + FILTER_INFO info; + FILTER_STATE state; + IReferenceClock *clock; + enum dshowDeviceType type; + void *priv_data; + int stream_index; + int64_t start_time; + void (*callback)(void *priv_data, int index, uint8_t *buf, int buf_size, int64_t time, enum dshowDeviceType type); +}; + +long WINAPI libAVFilter_QueryInterface (libAVFilter *, const GUID *, void **); +unsigned long WINAPI libAVFilter_AddRef (libAVFilter *); +unsigned long WINAPI libAVFilter_Release (libAVFilter *); +long WINAPI libAVFilter_GetClassID (libAVFilter *, CLSID *); +long WINAPI libAVFilter_Stop (libAVFilter *); +long WINAPI libAVFilter_Pause (libAVFilter *); +long WINAPI libAVFilter_Run (libAVFilter *, REFERENCE_TIME); +long WINAPI libAVFilter_GetState (libAVFilter *, DWORD, FILTER_STATE *); +long WINAPI libAVFilter_SetSyncSource (libAVFilter *, IReferenceClock *); +long WINAPI libAVFilter_GetSyncSource (libAVFilter *, IReferenceClock **); +long WINAPI libAVFilter_EnumPins (libAVFilter *, IEnumPins **); +long WINAPI libAVFilter_FindPin (libAVFilter *, const wchar_t *, IPin **); +long WINAPI libAVFilter_QueryFilterInfo(libAVFilter *, FILTER_INFO *); +long WINAPI libAVFilter_JoinFilterGraph(libAVFilter *, IFilterGraph *, const wchar_t *); +long WINAPI libAVFilter_QueryVendorInfo(libAVFilter *, wchar_t **); + +void libAVFilter_Destroy(libAVFilter *); +libAVFilter *libAVFilter_Create (void *, void *, enum dshowDeviceType); + +#endif /* AVDEVICE_DSHOW_H */ diff --git a/libavdevice/dshow_common.c b/libavdevice/dshow_common.c new file mode 100644 index 0000000..f7f0dfb --- /dev/null +++ b/libavdevice/dshow_common.c @@ -0,0 +1,190 @@ +/* + * Directshow capture interface + * Copyright (c) 2010 Ramiro Polla + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "dshow_capture.h" + +long ff_copy_dshow_media_type(AM_MEDIA_TYPE *dst, const AM_MEDIA_TYPE *src) +{ + uint8_t *pbFormat = NULL; + + if (src->cbFormat) { + pbFormat = CoTaskMemAlloc(src->cbFormat); + if (!pbFormat) + return E_OUTOFMEMORY; + memcpy(pbFormat, src->pbFormat, src->cbFormat); + } + + *dst = *src; + dst->pUnk = NULL; + dst->pbFormat = pbFormat; + + return S_OK; +} + +void ff_printGUID(const GUID *g) +{ +#if DSHOWDEBUG + const uint32_t *d = (const uint32_t *) &g->Data1; + const uint16_t *w = (const uint16_t *) &g->Data2; + const uint8_t *c = (const uint8_t *) &g->Data4; + + dshowdebug("0x%08x 0x%04x 0x%04x %02x%02x%02x%02x%02x%02x%02x%02x", + d[0], w[0], w[1], + c[0], c[1], c[2], c[3], c[4], c[5], c[6], c[7]); +#endif +} + +static const char *dshow_context_to_name(void *ptr) +{ + return "dshow"; +} +static const AVClass ff_dshow_context_class = { "DirectShow", dshow_context_to_name }; +const AVClass *ff_dshow_context_class_ptr = &ff_dshow_context_class; + +#define dstruct(pctx, sname, var, type) \ + dshowdebug(" "#var":\t%"type"\n", sname->var) + +#if DSHOWDEBUG +static void dump_bih(void *s, BITMAPINFOHEADER *bih) +{ + dshowdebug(" BITMAPINFOHEADER\n"); + dstruct(s, bih, biSize, "lu"); + dstruct(s, bih, biWidth, "ld"); + dstruct(s, bih, biHeight, "ld"); + dstruct(s, bih, biPlanes, "d"); + dstruct(s, bih, biBitCount, "d"); + dstruct(s, bih, biCompression, "lu"); + dshowdebug(" biCompression:\t\"%.4s\"\n", + (char*) &bih->biCompression); + dstruct(s, bih, biSizeImage, "lu"); + dstruct(s, bih, biXPelsPerMeter, "lu"); + dstruct(s, bih, biYPelsPerMeter, "lu"); + dstruct(s, bih, biClrUsed, "lu"); + dstruct(s, bih, biClrImportant, "lu"); +} +#endif + +void ff_print_VIDEO_STREAM_CONFIG_CAPS(const VIDEO_STREAM_CONFIG_CAPS *caps) +{ +#if DSHOWDEBUG + dshowdebug(" VIDEO_STREAM_CONFIG_CAPS\n"); + dshowdebug(" guid\t"); + ff_printGUID(&caps->guid); + dshowdebug("\n"); + dshowdebug(" VideoStandard\t%lu\n", caps->VideoStandard); + dshowdebug(" InputSize %ld\t%ld\n", caps->InputSize.cx, caps->InputSize.cy); + dshowdebug(" MinCroppingSize %ld\t%ld\n", caps->MinCroppingSize.cx, caps->MinCroppingSize.cy); + dshowdebug(" MaxCroppingSize %ld\t%ld\n", caps->MaxCroppingSize.cx, caps->MaxCroppingSize.cy); + dshowdebug(" CropGranularityX\t%d\n", caps->CropGranularityX); + dshowdebug(" CropGranularityY\t%d\n", caps->CropGranularityY); + dshowdebug(" CropAlignX\t%d\n", caps->CropAlignX); + dshowdebug(" CropAlignY\t%d\n", caps->CropAlignY); + dshowdebug(" MinOutputSize %ld\t%ld\n", caps->MinOutputSize.cx, caps->MinOutputSize.cy); + dshowdebug(" MaxOutputSize %ld\t%ld\n", caps->MaxOutputSize.cx, caps->MaxOutputSize.cy); + dshowdebug(" OutputGranularityX\t%d\n", caps->OutputGranularityX); + dshowdebug(" OutputGranularityY\t%d\n", caps->OutputGranularityY); + dshowdebug(" StretchTapsX\t%d\n", caps->StretchTapsX); + dshowdebug(" StretchTapsY\t%d\n", caps->StretchTapsY); + dshowdebug(" ShrinkTapsX\t%d\n", caps->ShrinkTapsX); + dshowdebug(" ShrinkTapsY\t%d\n", caps->ShrinkTapsY); + dshowdebug(" MinFrameInterval\t%"PRId64"\n", caps->MinFrameInterval); + dshowdebug(" MaxFrameInterval\t%"PRId64"\n", caps->MaxFrameInterval); + dshowdebug(" MinBitsPerSecond\t%ld\n", caps->MinBitsPerSecond); + dshowdebug(" MaxBitsPerSecond\t%ld\n", caps->MaxBitsPerSecond); +#endif +} + +void ff_print_AUDIO_STREAM_CONFIG_CAPS(const AUDIO_STREAM_CONFIG_CAPS *caps) +{ +#if DSHOWDEBUG + dshowdebug(" AUDIO_STREAM_CONFIG_CAPS\n"); + dshowdebug(" guid\t"); + ff_printGUID(&caps->guid); + dshowdebug("\n"); + dshowdebug(" MinimumChannels\t%lu\n", caps->MinimumChannels); + dshowdebug(" MaximumChannels\t%lu\n", caps->MaximumChannels); + dshowdebug(" ChannelsGranularity\t%lu\n", caps->ChannelsGranularity); + dshowdebug(" MinimumBitsPerSample\t%lu\n", caps->MinimumBitsPerSample); + dshowdebug(" MaximumBitsPerSample\t%lu\n", caps->MaximumBitsPerSample); + dshowdebug(" BitsPerSampleGranularity\t%lu\n", caps->BitsPerSampleGranularity); + dshowdebug(" MinimumSampleFrequency\t%lu\n", caps->MinimumSampleFrequency); + dshowdebug(" MaximumSampleFrequency\t%lu\n", caps->MaximumSampleFrequency); + dshowdebug(" SampleFrequencyGranularity\t%lu\n", caps->SampleFrequencyGranularity); +#endif +} + +void ff_print_AM_MEDIA_TYPE(const AM_MEDIA_TYPE *type) +{ +#if DSHOWDEBUG + dshowdebug(" majortype\t"); + ff_printGUID(&type->majortype); + dshowdebug("\n"); + dshowdebug(" subtype\t"); + ff_printGUID(&type->subtype); + dshowdebug("\n"); + dshowdebug(" bFixedSizeSamples\t%d\n", type->bFixedSizeSamples); + dshowdebug(" bTemporalCompression\t%d\n", type->bTemporalCompression); + dshowdebug(" lSampleSize\t%lu\n", type->lSampleSize); + dshowdebug(" formattype\t"); + ff_printGUID(&type->formattype); + dshowdebug("\n"); + dshowdebug(" pUnk\t%p\n", type->pUnk); + dshowdebug(" cbFormat\t%lu\n", type->cbFormat); + dshowdebug(" pbFormat\t%p\n", type->pbFormat); + + if (IsEqualGUID(&type->formattype, &FORMAT_VideoInfo)) { + VIDEOINFOHEADER *v = (void *) type->pbFormat; + dshowdebug(" rcSource: left %ld top %ld right %ld bottom %ld\n", + v->rcSource.left, v->rcSource.top, v->rcSource.right, v->rcSource.bottom); + dshowdebug(" rcTarget: left %ld top %ld right %ld bottom %ld\n", + v->rcTarget.left, v->rcTarget.top, v->rcTarget.right, v->rcTarget.bottom); + dshowdebug(" dwBitRate: %lu\n", v->dwBitRate); + dshowdebug(" dwBitErrorRate: %lu\n", v->dwBitErrorRate); + dshowdebug(" AvgTimePerFrame: %"PRId64"\n", v->AvgTimePerFrame); + dump_bih(NULL, &v->bmiHeader); + } else if (IsEqualGUID(&type->formattype, &FORMAT_VideoInfo2)) { + VIDEOINFOHEADER2 *v = (void *) type->pbFormat; + dshowdebug(" rcSource: left %ld top %ld right %ld bottom %ld\n", + v->rcSource.left, v->rcSource.top, v->rcSource.right, v->rcSource.bottom); + dshowdebug(" rcTarget: left %ld top %ld right %ld bottom %ld\n", + v->rcTarget.left, v->rcTarget.top, v->rcTarget.right, v->rcTarget.bottom); + dshowdebug(" dwBitRate: %lu\n", v->dwBitRate); + dshowdebug(" dwBitErrorRate: %lu\n", v->dwBitErrorRate); + dshowdebug(" AvgTimePerFrame: %"PRId64"\n", v->AvgTimePerFrame); + dshowdebug(" dwInterlaceFlags: %lu\n", v->dwInterlaceFlags); + dshowdebug(" dwCopyProtectFlags: %lu\n", v->dwCopyProtectFlags); + dshowdebug(" dwPictAspectRatioX: %lu\n", v->dwPictAspectRatioX); + dshowdebug(" dwPictAspectRatioY: %lu\n", v->dwPictAspectRatioY); +// dshowdebug(" dwReserved1: %lu\n", v->u.dwReserved1); /* mingw-w64 is buggy and doesn't name unnamed unions */ + dshowdebug(" dwReserved2: %lu\n", v->dwReserved2); + dump_bih(NULL, &v->bmiHeader); + } else if (IsEqualGUID(&type->formattype, &FORMAT_WaveFormatEx)) { + WAVEFORMATEX *fx = (void *) type->pbFormat; + dshowdebug(" wFormatTag: %u\n", fx->wFormatTag); + dshowdebug(" nChannels: %u\n", fx->nChannels); + dshowdebug(" nSamplesPerSec: %lu\n", fx->nSamplesPerSec); + dshowdebug(" nAvgBytesPerSec: %lu\n", fx->nAvgBytesPerSec); + dshowdebug(" nBlockAlign: %u\n", fx->nBlockAlign); + dshowdebug(" wBitsPerSample: %u\n", fx->wBitsPerSample); + dshowdebug(" cbSize: %u\n", fx->cbSize); + } +#endif +} diff --git a/libavdevice/dshow_enummediatypes.c b/libavdevice/dshow_enummediatypes.c new file mode 100644 index 0000000..5b69a5b --- /dev/null +++ b/libavdevice/dshow_enummediatypes.c @@ -0,0 +1,103 @@ +/* + * DirectShow capture interface + * Copyright (c) 2010 Ramiro Polla + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "dshow_capture.h" + +DECLARE_QUERYINTERFACE(libAVEnumMediaTypes, + { {&IID_IUnknown,0}, {&IID_IEnumMediaTypes,0} }) +DECLARE_ADDREF(libAVEnumMediaTypes) +DECLARE_RELEASE(libAVEnumMediaTypes) + +long WINAPI +libAVEnumMediaTypes_Next(libAVEnumMediaTypes *this, unsigned long n, + AM_MEDIA_TYPE **types, unsigned long *fetched) +{ + int count = 0; + dshowdebug("libAVEnumMediaTypes_Next(%p)\n", this); + if (!types) + return E_POINTER; + if (!this->pos && n == 1) { + if (!IsEqualGUID(&this->type.majortype, &GUID_NULL)) { + AM_MEDIA_TYPE *type = av_malloc(sizeof(AM_MEDIA_TYPE)); + ff_copy_dshow_media_type(type, &this->type); + *types = type; + count = 1; + } + this->pos = 1; + } + if (fetched) + *fetched = count; + if (!count) + return S_FALSE; + return S_OK; +} +long WINAPI +libAVEnumMediaTypes_Skip(libAVEnumMediaTypes *this, unsigned long n) +{ + dshowdebug("libAVEnumMediaTypes_Skip(%p)\n", this); + if (n) /* Any skip will always fall outside of the only valid type. */ + return S_FALSE; + return S_OK; +} +long WINAPI +libAVEnumMediaTypes_Reset(libAVEnumMediaTypes *this) +{ + dshowdebug("libAVEnumMediaTypes_Reset(%p)\n", this); + this->pos = 0; + return S_OK; +} +long WINAPI +libAVEnumMediaTypes_Clone(libAVEnumMediaTypes *this, libAVEnumMediaTypes **enums) +{ + libAVEnumMediaTypes *new; + dshowdebug("libAVEnumMediaTypes_Clone(%p)\n", this); + if (!enums) + return E_POINTER; + new = libAVEnumMediaTypes_Create(&this->type); + if (!new) + return E_OUTOFMEMORY; + new->pos = this->pos; + *enums = new; + return S_OK; +} + +static int +libAVEnumMediaTypes_Setup(libAVEnumMediaTypes *this, const AM_MEDIA_TYPE *type) +{ + IEnumMediaTypesVtbl *vtbl = this->vtbl; + SETVTBL(vtbl, libAVEnumMediaTypes, QueryInterface); + SETVTBL(vtbl, libAVEnumMediaTypes, AddRef); + SETVTBL(vtbl, libAVEnumMediaTypes, Release); + SETVTBL(vtbl, libAVEnumMediaTypes, Next); + SETVTBL(vtbl, libAVEnumMediaTypes, Skip); + SETVTBL(vtbl, libAVEnumMediaTypes, Reset); + SETVTBL(vtbl, libAVEnumMediaTypes, Clone); + + if (!type) { + this->type.majortype = GUID_NULL; + } else { + ff_copy_dshow_media_type(&this->type, type); + } + + return 1; +} +DECLARE_CREATE(libAVEnumMediaTypes, libAVEnumMediaTypes_Setup(this, type), const AM_MEDIA_TYPE *type) +DECLARE_DESTROY(libAVEnumMediaTypes, nothing) diff --git a/libavdevice/dshow_enumpins.c b/libavdevice/dshow_enumpins.c new file mode 100644 index 0000000..e5c11cb --- /dev/null +++ b/libavdevice/dshow_enumpins.c @@ -0,0 +1,105 @@ +/* + * DirectShow capture interface + * Copyright (c) 2010 Ramiro Polla + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "dshow_capture.h" + +DECLARE_QUERYINTERFACE(libAVEnumPins, + { {&IID_IUnknown,0}, {&IID_IEnumPins,0} }) +DECLARE_ADDREF(libAVEnumPins) +DECLARE_RELEASE(libAVEnumPins) + +long WINAPI +libAVEnumPins_Next(libAVEnumPins *this, unsigned long n, IPin **pins, + unsigned long *fetched) +{ + int count = 0; + dshowdebug("libAVEnumPins_Next(%p)\n", this); + if (!pins) + return E_POINTER; + if (!this->pos && n == 1) { + libAVPin_AddRef(this->pin); + *pins = (IPin *) this->pin; + count = 1; + this->pos = 1; + } + if (fetched) + *fetched = count; + if (!count) + return S_FALSE; + return S_OK; +} +long WINAPI +libAVEnumPins_Skip(libAVEnumPins *this, unsigned long n) +{ + dshowdebug("libAVEnumPins_Skip(%p)\n", this); + if (n) /* Any skip will always fall outside of the only valid pin. */ + return S_FALSE; + return S_OK; +} +long WINAPI +libAVEnumPins_Reset(libAVEnumPins *this) +{ + dshowdebug("libAVEnumPins_Reset(%p)\n", this); + this->pos = 0; + return S_OK; +} +long WINAPI +libAVEnumPins_Clone(libAVEnumPins *this, libAVEnumPins **pins) +{ + libAVEnumPins *new; + dshowdebug("libAVEnumPins_Clone(%p)\n", this); + if (!pins) + return E_POINTER; + new = libAVEnumPins_Create(this->pin, this->filter); + if (!new) + return E_OUTOFMEMORY; + new->pos = this->pos; + *pins = new; + return S_OK; +} + +static int +libAVEnumPins_Setup(libAVEnumPins *this, libAVPin *pin, libAVFilter *filter) +{ + IEnumPinsVtbl *vtbl = this->vtbl; + SETVTBL(vtbl, libAVEnumPins, QueryInterface); + SETVTBL(vtbl, libAVEnumPins, AddRef); + SETVTBL(vtbl, libAVEnumPins, Release); + SETVTBL(vtbl, libAVEnumPins, Next); + SETVTBL(vtbl, libAVEnumPins, Skip); + SETVTBL(vtbl, libAVEnumPins, Reset); + SETVTBL(vtbl, libAVEnumPins, Clone); + + this->pin = pin; + this->filter = filter; + libAVFilter_AddRef(this->filter); + + return 1; +} +static int +libAVEnumPins_Cleanup(libAVEnumPins *this) +{ + libAVFilter_Release(this->filter); + return 1; +} +DECLARE_CREATE(libAVEnumPins, libAVEnumPins_Setup(this, pin, filter), + libAVPin *pin, libAVFilter *filter) +DECLARE_DESTROY(libAVEnumPins, libAVEnumPins_Cleanup) diff --git a/libavdevice/dshow_filter.c b/libavdevice/dshow_filter.c new file mode 100644 index 0000000..7360adc --- /dev/null +++ b/libavdevice/dshow_filter.c @@ -0,0 +1,202 @@ +/* + * DirectShow capture interface + * Copyright (c) 2010 Ramiro Polla + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "dshow_capture.h" + +DECLARE_QUERYINTERFACE(libAVFilter, + { {&IID_IUnknown,0}, {&IID_IBaseFilter,0} }) +DECLARE_ADDREF(libAVFilter) +DECLARE_RELEASE(libAVFilter) + +long WINAPI +libAVFilter_GetClassID(libAVFilter *this, CLSID *id) +{ + dshowdebug("libAVFilter_GetClassID(%p)\n", this); + /* I'm not creating a ClassID just for this. */ + return E_FAIL; +} +long WINAPI +libAVFilter_Stop(libAVFilter *this) +{ + dshowdebug("libAVFilter_Stop(%p)\n", this); + this->state = State_Stopped; + return S_OK; +} +long WINAPI +libAVFilter_Pause(libAVFilter *this) +{ + dshowdebug("libAVFilter_Pause(%p)\n", this); + this->state = State_Paused; + return S_OK; +} +long WINAPI +libAVFilter_Run(libAVFilter *this, REFERENCE_TIME start) +{ + dshowdebug("libAVFilter_Run(%p) %"PRId64"\n", this, start); + this->state = State_Running; + this->start_time = start; + return S_OK; +} +long WINAPI +libAVFilter_GetState(libAVFilter *this, DWORD ms, FILTER_STATE *state) +{ + dshowdebug("libAVFilter_GetState(%p)\n", this); + if (!state) + return E_POINTER; + *state = this->state; + return S_OK; +} +long WINAPI +libAVFilter_SetSyncSource(libAVFilter *this, IReferenceClock *clock) +{ + dshowdebug("libAVFilter_SetSyncSource(%p)\n", this); + + if (this->clock != clock) { + if (this->clock) + IReferenceClock_Release(this->clock); + this->clock = clock; + if (clock) + IReferenceClock_AddRef(clock); + } + + return S_OK; +} +long WINAPI +libAVFilter_GetSyncSource(libAVFilter *this, IReferenceClock **clock) +{ + dshowdebug("libAVFilter_GetSyncSource(%p)\n", this); + + if (!clock) + return E_POINTER; + if (this->clock) + IReferenceClock_AddRef(this->clock); + *clock = this->clock; + + return S_OK; +} +long WINAPI +libAVFilter_EnumPins(libAVFilter *this, IEnumPins **enumpin) +{ + libAVEnumPins *new; + dshowdebug("libAVFilter_EnumPins(%p)\n", this); + + if (!enumpin) + return E_POINTER; + new = libAVEnumPins_Create(this->pin, this); + if (!new) + return E_OUTOFMEMORY; + + *enumpin = (IEnumPins *) new; + return S_OK; +} +long WINAPI +libAVFilter_FindPin(libAVFilter *this, const wchar_t *id, IPin **pin) +{ + libAVPin *found = NULL; + dshowdebug("libAVFilter_FindPin(%p)\n", this); + + if (!id || !pin) + return E_POINTER; + if (!wcscmp(id, L"In")) { + found = this->pin; + libAVPin_AddRef(found); + } + *pin = (IPin *) found; + if (!found) + return VFW_E_NOT_FOUND; + + return S_OK; +} +long WINAPI +libAVFilter_QueryFilterInfo(libAVFilter *this, FILTER_INFO *info) +{ + dshowdebug("libAVFilter_QueryFilterInfo(%p)\n", this); + + if (!info) + return E_POINTER; + if (this->info.pGraph) + IFilterGraph_AddRef(this->info.pGraph); + *info = this->info; + + return S_OK; +} +long WINAPI +libAVFilter_JoinFilterGraph(libAVFilter *this, IFilterGraph *graph, + const wchar_t *name) +{ + dshowdebug("libAVFilter_JoinFilterGraph(%p)\n", this); + + this->info.pGraph = graph; + if (name) + wcscpy(this->info.achName, name); + + return S_OK; +} +long WINAPI +libAVFilter_QueryVendorInfo(libAVFilter *this, wchar_t **info) +{ + dshowdebug("libAVFilter_QueryVendorInfo(%p)\n", this); + + if (!info) + return E_POINTER; + *info = wcsdup(L"libAV"); + + return S_OK; +} + +static int +libAVFilter_Setup(libAVFilter *this, void *priv_data, void *callback, + enum dshowDeviceType type) +{ + IBaseFilterVtbl *vtbl = this->vtbl; + SETVTBL(vtbl, libAVFilter, QueryInterface); + SETVTBL(vtbl, libAVFilter, AddRef); + SETVTBL(vtbl, libAVFilter, Release); + SETVTBL(vtbl, libAVFilter, GetClassID); + SETVTBL(vtbl, libAVFilter, Stop); + SETVTBL(vtbl, libAVFilter, Pause); + SETVTBL(vtbl, libAVFilter, Run); + SETVTBL(vtbl, libAVFilter, GetState); + SETVTBL(vtbl, libAVFilter, SetSyncSource); + SETVTBL(vtbl, libAVFilter, GetSyncSource); + SETVTBL(vtbl, libAVFilter, EnumPins); + SETVTBL(vtbl, libAVFilter, FindPin); + SETVTBL(vtbl, libAVFilter, QueryFilterInfo); + SETVTBL(vtbl, libAVFilter, JoinFilterGraph); + SETVTBL(vtbl, libAVFilter, QueryVendorInfo); + + this->pin = libAVPin_Create(this); + + this->priv_data = priv_data; + this->callback = callback; + this->type = type; + + return 1; +} +static int +libAVFilter_Cleanup(libAVFilter *this) +{ + libAVPin_Release(this->pin); + return 1; +} +DECLARE_CREATE(libAVFilter, libAVFilter_Setup(this, priv_data, callback, type), + void *priv_data, void *callback, enum dshowDeviceType type) +DECLARE_DESTROY(libAVFilter, libAVFilter_Cleanup) diff --git a/libavdevice/dshow_pin.c b/libavdevice/dshow_pin.c new file mode 100644 index 0000000..1c0dca2 --- /dev/null +++ b/libavdevice/dshow_pin.c @@ -0,0 +1,362 @@ +/* + * DirectShow capture interface + * Copyright (c) 2010 Ramiro Polla + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "dshow_capture.h" + +#include <stddef.h> +#define imemoffset offsetof(libAVPin, imemvtbl) + +DECLARE_QUERYINTERFACE(libAVPin, + { {&IID_IUnknown,0}, {&IID_IPin,0}, {&IID_IMemInputPin,imemoffset} }) +DECLARE_ADDREF(libAVPin) +DECLARE_RELEASE(libAVPin) + +long WINAPI +libAVPin_Connect(libAVPin *this, IPin *pin, const AM_MEDIA_TYPE *type) +{ + dshowdebug("libAVPin_Connect(%p, %p, %p)\n", this, pin, type); + /* Input pins receive connections. */ + return S_FALSE; +} +long WINAPI +libAVPin_ReceiveConnection(libAVPin *this, IPin *pin, + const AM_MEDIA_TYPE *type) +{ + enum dshowDeviceType devtype = this->filter->type; + dshowdebug("libAVPin_ReceiveConnection(%p)\n", this); + + if (!pin) + return E_POINTER; + if (this->connectedto) + return VFW_E_ALREADY_CONNECTED; + + ff_print_AM_MEDIA_TYPE(type); + if (devtype == VideoDevice) { + if (!IsEqualGUID(&type->majortype, &MEDIATYPE_Video)) + return VFW_E_TYPE_NOT_ACCEPTED; + } else { + if (!IsEqualGUID(&type->majortype, &MEDIATYPE_Audio)) + return VFW_E_TYPE_NOT_ACCEPTED; + } + + IPin_AddRef(pin); + this->connectedto = pin; + + ff_copy_dshow_media_type(&this->type, type); + + return S_OK; +} +long WINAPI +libAVPin_Disconnect(libAVPin *this) +{ + dshowdebug("libAVPin_Disconnect(%p)\n", this); + + if (this->filter->state != State_Stopped) + return VFW_E_NOT_STOPPED; + if (!this->connectedto) + return S_FALSE; + IPin_Release(this->connectedto); + this->connectedto = NULL; + + return S_OK; +} +long WINAPI +libAVPin_ConnectedTo(libAVPin *this, IPin **pin) +{ + dshowdebug("libAVPin_ConnectedTo(%p)\n", this); + + if (!pin) + return E_POINTER; + if (!this->connectedto) + return VFW_E_NOT_CONNECTED; + IPin_AddRef(this->connectedto); + *pin = this->connectedto; + + return S_OK; +} +long WINAPI +libAVPin_ConnectionMediaType(libAVPin *this, AM_MEDIA_TYPE *type) +{ + dshowdebug("libAVPin_ConnectionMediaType(%p)\n", this); + + if (!type) + return E_POINTER; + if (!this->connectedto) + return VFW_E_NOT_CONNECTED; + + return ff_copy_dshow_media_type(type, &this->type); +} +long WINAPI +libAVPin_QueryPinInfo(libAVPin *this, PIN_INFO *info) +{ + dshowdebug("libAVPin_QueryPinInfo(%p)\n", this); + + if (!info) + return E_POINTER; + + if (this->filter) + libAVFilter_AddRef(this->filter); + + info->pFilter = (IBaseFilter *) this->filter; + info->dir = PINDIR_INPUT; + wcscpy(info->achName, L"Capture"); + + return S_OK; +} +long WINAPI +libAVPin_QueryDirection(libAVPin *this, PIN_DIRECTION *dir) +{ + dshowdebug("libAVPin_QueryDirection(%p)\n", this); + if (!dir) + return E_POINTER; + *dir = PINDIR_INPUT; + return S_OK; +} +long WINAPI +libAVPin_QueryId(libAVPin *this, wchar_t **id) +{ + dshowdebug("libAVPin_QueryId(%p)\n", this); + + if (!id) + return E_POINTER; + + *id = wcsdup(L"libAV Pin"); + + return S_OK; +} +long WINAPI +libAVPin_QueryAccept(libAVPin *this, const AM_MEDIA_TYPE *type) +{ + dshowdebug("libAVPin_QueryAccept(%p)\n", this); + return S_FALSE; +} +long WINAPI +libAVPin_EnumMediaTypes(libAVPin *this, IEnumMediaTypes **enumtypes) +{ + const AM_MEDIA_TYPE *type = NULL; + libAVEnumMediaTypes *new; + dshowdebug("libAVPin_EnumMediaTypes(%p)\n", this); + + if (!enumtypes) + return E_POINTER; + new = libAVEnumMediaTypes_Create(type); + if (!new) + return E_OUTOFMEMORY; + + *enumtypes = (IEnumMediaTypes *) new; + return S_OK; +} +long WINAPI +libAVPin_QueryInternalConnections(libAVPin *this, IPin **pin, + unsigned long *npin) +{ + dshowdebug("libAVPin_QueryInternalConnections(%p)\n", this); + return E_NOTIMPL; +} +long WINAPI +libAVPin_EndOfStream(libAVPin *this) +{ + dshowdebug("libAVPin_EndOfStream(%p)\n", this); + /* I don't care. */ + return S_OK; +} +long WINAPI +libAVPin_BeginFlush(libAVPin *this) +{ + dshowdebug("libAVPin_BeginFlush(%p)\n", this); + /* I don't care. */ + return S_OK; +} +long WINAPI +libAVPin_EndFlush(libAVPin *this) +{ + dshowdebug("libAVPin_EndFlush(%p)\n", this); + /* I don't care. */ + return S_OK; +} +long WINAPI +libAVPin_NewSegment(libAVPin *this, REFERENCE_TIME start, REFERENCE_TIME stop, + double rate) +{ + dshowdebug("libAVPin_NewSegment(%p)\n", this); + /* I don't care. */ + return S_OK; +} + +static int +libAVPin_Setup(libAVPin *this, libAVFilter *filter) +{ + IPinVtbl *vtbl = this->vtbl; + IMemInputPinVtbl *imemvtbl; + + if (!filter) + return 0; + + imemvtbl = av_malloc(sizeof(IMemInputPinVtbl)); + if (!imemvtbl) + return 0; + + SETVTBL(imemvtbl, libAVMemInputPin, QueryInterface); + SETVTBL(imemvtbl, libAVMemInputPin, AddRef); + SETVTBL(imemvtbl, libAVMemInputPin, Release); + SETVTBL(imemvtbl, libAVMemInputPin, GetAllocator); + SETVTBL(imemvtbl, libAVMemInputPin, NotifyAllocator); + SETVTBL(imemvtbl, libAVMemInputPin, GetAllocatorRequirements); + SETVTBL(imemvtbl, libAVMemInputPin, Receive); + SETVTBL(imemvtbl, libAVMemInputPin, ReceiveMultiple); + SETVTBL(imemvtbl, libAVMemInputPin, ReceiveCanBlock); + + this->imemvtbl = imemvtbl; + + SETVTBL(vtbl, libAVPin, QueryInterface); + SETVTBL(vtbl, libAVPin, AddRef); + SETVTBL(vtbl, libAVPin, Release); + SETVTBL(vtbl, libAVPin, Connect); + SETVTBL(vtbl, libAVPin, ReceiveConnection); + SETVTBL(vtbl, libAVPin, Disconnect); + SETVTBL(vtbl, libAVPin, ConnectedTo); + SETVTBL(vtbl, libAVPin, ConnectionMediaType); + SETVTBL(vtbl, libAVPin, QueryPinInfo); + SETVTBL(vtbl, libAVPin, QueryDirection); + SETVTBL(vtbl, libAVPin, QueryId); + SETVTBL(vtbl, libAVPin, QueryAccept); + SETVTBL(vtbl, libAVPin, EnumMediaTypes); + SETVTBL(vtbl, libAVPin, QueryInternalConnections); + SETVTBL(vtbl, libAVPin, EndOfStream); + SETVTBL(vtbl, libAVPin, BeginFlush); + SETVTBL(vtbl, libAVPin, EndFlush); + SETVTBL(vtbl, libAVPin, NewSegment); + + this->filter = filter; + + return 1; +} +DECLARE_CREATE(libAVPin, libAVPin_Setup(this, filter), libAVFilter *filter) +DECLARE_DESTROY(libAVPin, nothing) + +/***************************************************************************** + * libAVMemInputPin + ****************************************************************************/ +long WINAPI +libAVMemInputPin_QueryInterface(libAVMemInputPin *this, const GUID *riid, + void **ppvObject) +{ + libAVPin *pin = (libAVPin *) ((uint8_t *) this - imemoffset); + dshowdebug("libAVMemInputPin_QueryInterface(%p)\n", this); + return libAVPin_QueryInterface(pin, riid, ppvObject); +} +unsigned long WINAPI +libAVMemInputPin_AddRef(libAVMemInputPin *this) +{ + libAVPin *pin = (libAVPin *) ((uint8_t *) this - imemoffset); + dshowdebug("libAVMemInputPin_AddRef(%p)\n", this); + return libAVPin_AddRef(pin); +} +unsigned long WINAPI +libAVMemInputPin_Release(libAVMemInputPin *this) +{ + libAVPin *pin = (libAVPin *) ((uint8_t *) this - imemoffset); + dshowdebug("libAVMemInputPin_Release(%p)\n", this); + return libAVPin_Release(pin); +} +long WINAPI +libAVMemInputPin_GetAllocator(libAVMemInputPin *this, IMemAllocator **alloc) +{ + dshowdebug("libAVMemInputPin_GetAllocator(%p)\n", this); + return VFW_E_NO_ALLOCATOR; +} +long WINAPI +libAVMemInputPin_NotifyAllocator(libAVMemInputPin *this, IMemAllocator *alloc, + BOOL rdwr) +{ + dshowdebug("libAVMemInputPin_NotifyAllocator(%p)\n", this); + return S_OK; +} +long WINAPI +libAVMemInputPin_GetAllocatorRequirements(libAVMemInputPin *this, + ALLOCATOR_PROPERTIES *props) +{ + dshowdebug("libAVMemInputPin_GetAllocatorRequirements(%p)\n", this); + return E_NOTIMPL; +} +long WINAPI +libAVMemInputPin_Receive(libAVMemInputPin *this, IMediaSample *sample) +{ + libAVPin *pin = (libAVPin *) ((uint8_t *) this - imemoffset); + enum dshowDeviceType devtype = pin->filter->type; + void *priv_data; + uint8_t *buf; + int buf_size; + int index; + int64_t curtime; + + dshowdebug("libAVMemInputPin_Receive(%p)\n", this); + + if (!sample) + return E_POINTER; + + if (devtype == VideoDevice) { + /* PTS from video devices is unreliable. */ + IReferenceClock *clock = pin->filter->clock; + IReferenceClock_GetTime(clock, &curtime); + } else { + int64_t dummy; + IMediaSample_GetTime(sample, &curtime, &dummy); + curtime += pin->filter->start_time; + } + + buf_size = IMediaSample_GetActualDataLength(sample); + IMediaSample_GetPointer(sample, &buf); + priv_data = pin->filter->priv_data; + index = pin->filter->stream_index; + + pin->filter->callback(priv_data, index, buf, buf_size, curtime, devtype); + + return S_OK; +} +long WINAPI +libAVMemInputPin_ReceiveMultiple(libAVMemInputPin *this, + IMediaSample **samples, long n, long *nproc) +{ + int i; + dshowdebug("libAVMemInputPin_ReceiveMultiple(%p)\n", this); + + for (i = 0; i < n; i++) + libAVMemInputPin_Receive(this, samples[i]); + + *nproc = n; + return S_OK; +} +long WINAPI +libAVMemInputPin_ReceiveCanBlock(libAVMemInputPin *this) +{ + dshowdebug("libAVMemInputPin_ReceiveCanBlock(%p)\n", this); + /* I swear I will not block. */ + return S_FALSE; +} + +void +libAVMemInputPin_Destroy(libAVMemInputPin *this) +{ + libAVPin *pin = (libAVPin *) ((uint8_t *) this - imemoffset); + dshowdebug("libAVMemInputPin_Destroy(%p)\n", this); + libAVPin_Destroy(pin); +} diff --git a/libavdevice/dv1394.c b/libavdevice/dv1394.c index d259e1a..c8241e3 100644 --- a/libavdevice/dv1394.c +++ b/libavdevice/dv1394.c @@ -2,20 +2,20 @@ * Linux DV1394 interface * Copyright (c) 2003 Max Krasnyansky <maxk@qualcomm.com> * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -30,7 +30,7 @@ #include "libavutil/internal.h" #include "libavutil/log.h" #include "libavutil/opt.h" -#include "libavformat/avformat.h" +#include "avdevice.h" #include "libavformat/dv.h" #include "dv1394.h" @@ -186,7 +186,7 @@ restart_poll: size = avpriv_dv_produce_packet(dv->dv_demux, pkt, dv->ring + (dv->index * DV1394_PAL_FRAME_SIZE), - DV1394_PAL_FRAME_SIZE); + DV1394_PAL_FRAME_SIZE, -1); dv->index = (dv->index + 1) % DV1394_RING_FRAMES; dv->done++; dv->avail--; @@ -224,6 +224,7 @@ static const AVClass dv1394_class = { .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, }; AVInputFormat ff_dv1394_demuxer = { diff --git a/libavdevice/dv1394.h b/libavdevice/dv1394.h index 9710ff5..b76d633 100644 --- a/libavdevice/dv1394.h +++ b/libavdevice/dv1394.h @@ -8,20 +8,20 @@ * Copyright (C)1999,2000 Sebastien Rougeaux <sebastien.rougeaux@anu.edu.au> * Peter Schlaile <udbz@rz.uni-karlsruhe.de> * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ diff --git a/libavdevice/fbdev_common.c b/libavdevice/fbdev_common.c new file mode 100644 index 0000000..634780d --- /dev/null +++ b/libavdevice/fbdev_common.c @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2011 Stefano Sabatini + * Copyright (c) 2009 Giliard B. de Freitas <giliarde@gmail.com> + * Copyright (C) 2002 Gunnar Monell <gmo@linux.nu> + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include <unistd.h> +#include <fcntl.h> +#include <sys/ioctl.h> +#include <stdlib.h> +#include "fbdev_common.h" +#include "libavutil/common.h" +#include "avdevice.h" + +struct rgb_pixfmt_map_entry { + int bits_per_pixel; + int red_offset, green_offset, blue_offset, alpha_offset; + enum AVPixelFormat pixfmt; +}; + +static const struct rgb_pixfmt_map_entry rgb_pixfmt_map[] = { + // bpp, red_offset, green_offset, blue_offset, alpha_offset, pixfmt + { 32, 0, 8, 16, 24, AV_PIX_FMT_RGBA }, + { 32, 16, 8, 0, 24, AV_PIX_FMT_BGRA }, + { 32, 8, 16, 24, 0, AV_PIX_FMT_ARGB }, + { 32, 3, 2, 8, 0, AV_PIX_FMT_ABGR }, + { 24, 0, 8, 16, 0, AV_PIX_FMT_RGB24 }, + { 24, 16, 8, 0, 0, AV_PIX_FMT_BGR24 }, + { 16, 11, 5, 0, 16, AV_PIX_FMT_RGB565 }, +}; + +enum AVPixelFormat ff_get_pixfmt_from_fb_varinfo(struct fb_var_screeninfo *varinfo) +{ + int i; + + for (i = 0; i < FF_ARRAY_ELEMS(rgb_pixfmt_map); i++) { + const struct rgb_pixfmt_map_entry *entry = &rgb_pixfmt_map[i]; + if (entry->bits_per_pixel == varinfo->bits_per_pixel && + entry->red_offset == varinfo->red.offset && + entry->green_offset == varinfo->green.offset && + entry->blue_offset == varinfo->blue.offset) + return entry->pixfmt; + } + + return AV_PIX_FMT_NONE; +} + +const char* ff_fbdev_default_device() +{ + const char *dev = getenv("FRAMEBUFFER"); + if (!dev) + dev = "/dev/fb0"; + return dev; +} + +int ff_fbdev_get_device_list(AVDeviceInfoList *device_list) +{ + struct fb_var_screeninfo varinfo; + struct fb_fix_screeninfo fixinfo; + char device_file[12]; + AVDeviceInfo *device = NULL; + int i, fd, ret = 0; + const char *default_device = ff_fbdev_default_device(); + + if (!device_list) + return AVERROR(EINVAL); + + for (i = 0; i <= 31; i++) { + snprintf(device_file, sizeof(device_file), "/dev/fb%d", i); + + if ((fd = avpriv_open(device_file, O_RDWR)) < 0) + continue; + if (ioctl(fd, FBIOGET_VSCREENINFO, &varinfo) == -1) + goto fail_device; + if (ioctl(fd, FBIOGET_FSCREENINFO, &fixinfo) == -1) + goto fail_device; + + device = av_mallocz(sizeof(AVDeviceInfo)); + if (!device) { + ret = AVERROR(ENOMEM); + goto fail_device; + } + device->device_name = av_strdup(device_file); + device->device_description = av_strdup(fixinfo.id); + if (!device->device_name || !device->device_description) { + ret = AVERROR(ENOMEM); + goto fail_device; + } + + if ((ret = av_dynarray_add_nofree(&device_list->devices, + &device_list->nb_devices, device)) < 0) + goto fail_device; + + if (default_device && !strcmp(device->device_name, default_device)) { + device_list->default_device = device_list->nb_devices - 1; + default_device = NULL; + } + close(fd); + continue; + + fail_device: + if (device) { + av_free(device->device_name); + av_free(device->device_description); + av_freep(&device); + } + if (fd >= 0) + close(fd); + if (ret < 0) + return ret; + } + return 0; +} diff --git a/libavdevice/fbdev_common.h b/libavdevice/fbdev_common.h new file mode 100644 index 0000000..7b81a8d --- /dev/null +++ b/libavdevice/fbdev_common.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2011 Stefano Sabatini + * Copyright (c) 2009 Giliard B. de Freitas <giliarde@gmail.com> + * Copyright (C) 2002 Gunnar Monell <gmo@linux.nu> + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVDEVICE_FBDEV_COMMON_H +#define AVDEVICE_FBDEV_COMMON_H + +#include <features.h> +#include <linux/fb.h> +#include "libavutil/pixfmt.h" + +struct AVDeviceInfoList; + +enum AVPixelFormat ff_get_pixfmt_from_fb_varinfo(struct fb_var_screeninfo *varinfo); + +const char* ff_fbdev_default_device(void); + +int ff_fbdev_get_device_list(struct AVDeviceInfoList *device_list); + +#endif /* AVDEVICE_FBDEV_COMMON_H */ diff --git a/libavdevice/fbdev.c b/libavdevice/fbdev_dec.c index 34e09f9..1593b10 100644 --- a/libavdevice/fbdev.c +++ b/libavdevice/fbdev_dec.c @@ -3,20 +3,20 @@ * Copyright (c) 2009 Giliard B. de Freitas <giliarde@gmail.com> * Copyright (C) 2002 Gunnar Monell <gmo@linux.nu> * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -41,46 +41,14 @@ #include "libavutil/time.h" #include "libavutil/parseutils.h" #include "libavutil/pixdesc.h" -#include "libavformat/avformat.h" #include "libavformat/internal.h" - -struct rgb_pixfmt_map_entry { - int bits_per_pixel; - int red_offset, green_offset, blue_offset, alpha_offset; - enum AVPixelFormat pixfmt; -}; - -static struct rgb_pixfmt_map_entry rgb_pixfmt_map[] = { - // bpp, red_offset, green_offset, blue_offset, alpha_offset, pixfmt - { 32, 0, 8, 16, 24, AV_PIX_FMT_RGBA }, - { 32, 16, 8, 0, 24, AV_PIX_FMT_BGRA }, - { 32, 8, 16, 24, 0, AV_PIX_FMT_ARGB }, - { 32, 3, 2, 8, 0, AV_PIX_FMT_ABGR }, - { 24, 0, 8, 16, 0, AV_PIX_FMT_RGB24 }, - { 24, 16, 8, 0, 0, AV_PIX_FMT_BGR24 }, -}; - -static enum AVPixelFormat get_pixfmt_from_fb_varinfo(struct fb_var_screeninfo *varinfo) -{ - int i; - - for (i = 0; i < FF_ARRAY_ELEMS(rgb_pixfmt_map); i++) { - struct rgb_pixfmt_map_entry *entry = &rgb_pixfmt_map[i]; - if (entry->bits_per_pixel == varinfo->bits_per_pixel && - entry->red_offset == varinfo->red.offset && - entry->green_offset == varinfo->green.offset && - entry->blue_offset == varinfo->blue.offset) - return entry->pixfmt; - } - - return AV_PIX_FMT_NONE; -} +#include "avdevice.h" +#include "fbdev_common.h" typedef struct FBDevContext { AVClass *class; ///< class for private options int frame_size; ///< size in bytes of a grabbed frame AVRational framerate_q; ///< framerate - char *framerate; ///< framerate string set by a private option int64_t time_frame; ///< time for the next frame to output (in 1/1000000 units) int fd; ///< framebuffer device file descriptor @@ -101,12 +69,6 @@ static av_cold int fbdev_read_header(AVFormatContext *avctx) enum AVPixelFormat pix_fmt; int ret, flags = O_RDONLY; - ret = av_parse_video_rate(&fbdev->framerate_q, fbdev->framerate); - if (ret < 0) { - av_log(avctx, AV_LOG_ERROR, "Could not parse framerate '%s'.\n", fbdev->framerate); - return ret; - } - if (!(st = avformat_new_stream(avctx, NULL))) return AVERROR(ENOMEM); avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in microseconds */ @@ -119,25 +81,25 @@ static av_cold int fbdev_read_header(AVFormatContext *avctx) ret = AVERROR(errno); av_log(avctx, AV_LOG_ERROR, "Could not open framebuffer device '%s': %s\n", - avctx->filename, strerror(ret)); + avctx->filename, av_err2str(ret)); return ret; } if (ioctl(fbdev->fd, FBIOGET_VSCREENINFO, &fbdev->varinfo) < 0) { ret = AVERROR(errno); av_log(avctx, AV_LOG_ERROR, - "FBIOGET_VSCREENINFO: %s\n", strerror(errno)); + "FBIOGET_VSCREENINFO: %s\n", av_err2str(ret)); goto fail; } if (ioctl(fbdev->fd, FBIOGET_FSCREENINFO, &fbdev->fixinfo) < 0) { ret = AVERROR(errno); av_log(avctx, AV_LOG_ERROR, - "FBIOGET_FSCREENINFO: %s\n", strerror(errno)); + "FBIOGET_FSCREENINFO: %s\n", av_err2str(ret)); goto fail; } - pix_fmt = get_pixfmt_from_fb_varinfo(&fbdev->varinfo); + pix_fmt = ff_get_pixfmt_from_fb_varinfo(&fbdev->varinfo); if (pix_fmt == AV_PIX_FMT_NONE) { ret = AVERROR(EINVAL); av_log(avctx, AV_LOG_ERROR, @@ -154,7 +116,7 @@ static av_cold int fbdev_read_header(AVFormatContext *avctx) fbdev->data = mmap(NULL, fbdev->fixinfo.smem_len, PROT_READ, MAP_SHARED, fbdev->fd, 0); if (fbdev->data == MAP_FAILED) { ret = AVERROR(errno); - av_log(avctx, AV_LOG_ERROR, "Error in mmap(): %s\n", strerror(errno)); + av_log(avctx, AV_LOG_ERROR, "Error in mmap(): %s\n", av_err2str(ret)); goto fail; } @@ -163,7 +125,7 @@ static av_cold int fbdev_read_header(AVFormatContext *avctx) st->codec->width = fbdev->width; st->codec->height = fbdev->height; st->codec->pix_fmt = pix_fmt; - st->codec->time_base = (AVRational){fbdev->framerate_q.den, fbdev->framerate_q.num}; + st->codec->time_base = av_inv_q(fbdev->framerate_q); st->codec->bit_rate = fbdev->width * fbdev->height * fbdev->bytes_per_pixel * av_q2d(fbdev->framerate_q) * 8; @@ -192,20 +154,22 @@ static int fbdev_read_packet(AVFormatContext *avctx, AVPacket *pkt) fbdev->time_frame = av_gettime(); /* wait based on the frame rate */ - curtime = av_gettime(); - delay = fbdev->time_frame - curtime; - av_dlog(avctx, - "time_frame:%"PRId64" curtime:%"PRId64" delay:%"PRId64"\n", - fbdev->time_frame, curtime, delay); - if (delay > 0) { + while (1) { + curtime = av_gettime(); + delay = fbdev->time_frame - curtime; + av_dlog(avctx, + "time_frame:%"PRId64" curtime:%"PRId64" delay:%"PRId64"\n", + fbdev->time_frame, curtime, delay); + if (delay <= 0) { + fbdev->time_frame += INT64_C(1000000) / av_q2d(fbdev->framerate_q); + break; + } if (avctx->flags & AVFMT_FLAG_NONBLOCK) return AVERROR(EAGAIN); ts.tv_sec = delay / 1000000; ts.tv_nsec = (delay % 1000000) * 1000; while (nanosleep(&ts, &ts) < 0 && errno == EINTR); } - /* compute the time of the next frame */ - fbdev->time_frame += INT64_C(1000000) / av_q2d(fbdev->framerate_q); if ((ret = av_new_packet(pkt, fbdev->frame_size)) < 0) return ret; @@ -213,7 +177,7 @@ static int fbdev_read_packet(AVFormatContext *avctx, AVPacket *pkt) /* refresh fbdev->varinfo, visible data position may change at each call */ if (ioctl(fbdev->fd, FBIOGET_VSCREENINFO, &fbdev->varinfo) < 0) av_log(avctx, AV_LOG_WARNING, - "Error refreshing variable info: %s\n", strerror(errno)); + "Error refreshing variable info: %s\n", av_err2str(ret)); pkt->pts = curtime; @@ -222,7 +186,6 @@ static int fbdev_read_packet(AVFormatContext *avctx, AVPacket *pkt) fbdev->varinfo.yoffset * fbdev->fixinfo.line_length; pout = pkt->data; - // TODO it'd be nice if the lines were aligned for (i = 0; i < fbdev->height; i++) { memcpy(pout, pin, fbdev->frame_linesize); pin += fbdev->fixinfo.line_length; @@ -236,16 +199,21 @@ static av_cold int fbdev_read_close(AVFormatContext *avctx) { FBDevContext *fbdev = avctx->priv_data; - munmap(fbdev->data, fbdev->frame_size); + munmap(fbdev->data, fbdev->fixinfo.smem_len); close(fbdev->fd); return 0; } +static int fbdev_get_device_list(AVFormatContext *s, AVDeviceInfoList *device_list) +{ + return ff_fbdev_get_device_list(device_list); +} + #define OFFSET(x) offsetof(FBDevContext, x) #define DEC AV_OPT_FLAG_DECODING_PARAM static const AVOption options[] = { - { "framerate","", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = "25"}, 0, 0, DEC }, + { "framerate","", OFFSET(framerate_q), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, 0, DEC }, { NULL }, }; @@ -254,6 +222,7 @@ static const AVClass fbdev_class = { .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, }; AVInputFormat ff_fbdev_demuxer = { @@ -263,6 +232,7 @@ AVInputFormat ff_fbdev_demuxer = { .read_header = fbdev_read_header, .read_packet = fbdev_read_packet, .read_close = fbdev_read_close, + .get_device_list = fbdev_get_device_list, .flags = AVFMT_NOFILE, .priv_class = &fbdev_class, }; diff --git a/libavdevice/fbdev_enc.c b/libavdevice/fbdev_enc.c new file mode 100644 index 0000000..28efc71 --- /dev/null +++ b/libavdevice/fbdev_enc.c @@ -0,0 +1,220 @@ +/* + * Copyright (c) 2013 Lukasz Marek + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include <unistd.h> +#include <fcntl.h> +#include <sys/ioctl.h> +#include <sys/mman.h> +#include <linux/fb.h> +#include "libavutil/pixdesc.h" +#include "libavutil/log.h" +#include "libavutil/mem.h" +#include "libavutil/opt.h" +#include "libavformat/avformat.h" +#include "fbdev_common.h" +#include "avdevice.h" + +typedef struct { + AVClass *class; ///< class for private options + int xoffset; ///< x coordinate of top left corner + int yoffset; ///< y coordinate of top left corner + struct fb_var_screeninfo varinfo; ///< framebuffer variable info + struct fb_fix_screeninfo fixinfo; ///< framebuffer fixed info + int fd; ///< framebuffer device file descriptor + uint8_t *data; ///< framebuffer data +} FBDevContext; + +static av_cold int fbdev_write_header(AVFormatContext *h) +{ + FBDevContext *fbdev = h->priv_data; + enum AVPixelFormat pix_fmt; + int ret, flags = O_RDWR; + const char* device; + + if (h->nb_streams != 1 || h->streams[0]->codec->codec_type != AVMEDIA_TYPE_VIDEO) { + av_log(fbdev, AV_LOG_ERROR, "Only a single video stream is supported.\n"); + return AVERROR(EINVAL); + } + + if (h->filename[0]) + device = h->filename; + else + device = ff_fbdev_default_device(); + + if ((fbdev->fd = avpriv_open(device, flags)) == -1) { + ret = AVERROR(errno); + av_log(h, AV_LOG_ERROR, + "Could not open framebuffer device '%s': %s\n", + device, av_err2str(ret)); + return ret; + } + + if (ioctl(fbdev->fd, FBIOGET_VSCREENINFO, &fbdev->varinfo) < 0) { + ret = AVERROR(errno); + av_log(h, AV_LOG_ERROR, "FBIOGET_VSCREENINFO: %s\n", av_err2str(ret)); + goto fail; + } + + if (ioctl(fbdev->fd, FBIOGET_FSCREENINFO, &fbdev->fixinfo) < 0) { + ret = AVERROR(errno); + av_log(h, AV_LOG_ERROR, "FBIOGET_FSCREENINFO: %s\n", av_err2str(ret)); + goto fail; + } + + pix_fmt = ff_get_pixfmt_from_fb_varinfo(&fbdev->varinfo); + if (pix_fmt == AV_PIX_FMT_NONE) { + ret = AVERROR(EINVAL); + av_log(h, AV_LOG_ERROR, "Framebuffer pixel format not supported.\n"); + goto fail; + } + + fbdev->data = mmap(NULL, fbdev->fixinfo.smem_len, PROT_WRITE, MAP_SHARED, fbdev->fd, 0); + if (fbdev->data == MAP_FAILED) { + ret = AVERROR(errno); + av_log(h, AV_LOG_ERROR, "Error in mmap(): %s\n", av_err2str(ret)); + goto fail; + } + + return 0; + fail: + close(fbdev->fd); + return ret; +} + +static int fbdev_write_packet(AVFormatContext *h, AVPacket *pkt) +{ + FBDevContext *fbdev = h->priv_data; + uint8_t *pin, *pout; + enum AVPixelFormat fb_pix_fmt; + int disp_height; + int bytes_to_copy; + AVCodecContext *codec_ctx = h->streams[0]->codec; + enum AVPixelFormat video_pix_fmt = codec_ctx->pix_fmt; + int video_width = codec_ctx->width; + int video_height = codec_ctx->height; + int bytes_per_pixel = ((codec_ctx->bits_per_coded_sample + 7) >> 3); + int src_line_size = video_width * bytes_per_pixel; + int i; + + if (ioctl(fbdev->fd, FBIOGET_VSCREENINFO, &fbdev->varinfo) < 0) + av_log(h, AV_LOG_WARNING, + "Error refreshing variable info: %s\n", av_err2str(AVERROR(errno))); + + fb_pix_fmt = ff_get_pixfmt_from_fb_varinfo(&fbdev->varinfo); + + if (fb_pix_fmt != video_pix_fmt) { + av_log(h, AV_LOG_ERROR, "Pixel format %s is not supported, use %s\n", + av_get_pix_fmt_name(video_pix_fmt), av_get_pix_fmt_name(fb_pix_fmt)); + return AVERROR(EINVAL); + } + + disp_height = FFMIN(fbdev->varinfo.yres, video_height); + bytes_to_copy = FFMIN(fbdev->varinfo.xres, video_width) * bytes_per_pixel; + + pin = pkt->data; + pout = fbdev->data + + bytes_per_pixel * fbdev->varinfo.xoffset + + fbdev->varinfo.yoffset * fbdev->fixinfo.line_length; + + if (fbdev->xoffset) { + if (fbdev->xoffset < 0) { + if (-fbdev->xoffset >= video_width) //nothing to display + return 0; + bytes_to_copy += fbdev->xoffset * bytes_per_pixel; + pin -= fbdev->xoffset * bytes_per_pixel; + } else { + int diff = (video_width + fbdev->xoffset) - fbdev->varinfo.xres; + if (diff > 0) { + if (diff >= video_width) //nothing to display + return 0; + bytes_to_copy -= diff * bytes_per_pixel; + } + pout += bytes_per_pixel * fbdev->xoffset; + } + } + + if (fbdev->yoffset) { + if (fbdev->yoffset < 0) { + if (-fbdev->yoffset >= video_height) //nothing to display + return 0; + disp_height += fbdev->yoffset; + pin -= fbdev->yoffset * src_line_size; + } else { + int diff = (video_height + fbdev->yoffset) - fbdev->varinfo.yres; + if (diff > 0) { + if (diff >= video_height) //nothing to display + return 0; + disp_height -= diff; + } + pout += fbdev->yoffset * fbdev->fixinfo.line_length; + } + } + + for (i = 0; i < disp_height; i++) { + memcpy(pout, pin, bytes_to_copy); + pout += fbdev->fixinfo.line_length; + pin += src_line_size; + } + + return 0; +} + +static av_cold int fbdev_write_trailer(AVFormatContext *h) +{ + FBDevContext *fbdev = h->priv_data; + munmap(fbdev->data, fbdev->fixinfo.smem_len); + close(fbdev->fd); + return 0; +} + +static int fbdev_get_device_list(AVFormatContext *s, AVDeviceInfoList *device_list) +{ + return ff_fbdev_get_device_list(device_list); +} + +#define OFFSET(x) offsetof(FBDevContext, x) +#define ENC AV_OPT_FLAG_ENCODING_PARAM +static const AVOption options[] = { + { "xoffset", "set x coordinate of top left corner", OFFSET(xoffset), AV_OPT_TYPE_INT, {.i64 = 0}, INT_MIN, INT_MAX, ENC }, + { "yoffset", "set y coordinate of top left corner", OFFSET(yoffset), AV_OPT_TYPE_INT, {.i64 = 0}, INT_MIN, INT_MAX, ENC }, + { NULL } +}; + +static const AVClass fbdev_class = { + .class_name = "fbdev outdev", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT, +}; + +AVOutputFormat ff_fbdev_muxer = { + .name = "fbdev", + .long_name = NULL_IF_CONFIG_SMALL("Linux framebuffer"), + .priv_data_size = sizeof(FBDevContext), + .audio_codec = AV_CODEC_ID_NONE, + .video_codec = AV_CODEC_ID_RAWVIDEO, + .write_header = fbdev_write_header, + .write_packet = fbdev_write_packet, + .write_trailer = fbdev_write_trailer, + .get_device_list = fbdev_get_device_list, + .flags = AVFMT_NOFILE | AVFMT_VARIABLE_FPS | AVFMT_NOTIMESTAMPS, + .priv_class = &fbdev_class, +}; diff --git a/libavdevice/gdigrab.c b/libavdevice/gdigrab.c new file mode 100644 index 0000000..9a185d4 --- /dev/null +++ b/libavdevice/gdigrab.c @@ -0,0 +1,636 @@ +/* + * GDI video grab interface + * + * This file is part of FFmpeg. + * + * Copyright (C) 2013 Calvin Walton <calvin.walton@kepstin.ca> + * Copyright (C) 2007-2010 Christophe Gisquet <word1.word2@gmail.com> + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2.1 + * of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * GDI frame device demuxer + * @author Calvin Walton <calvin.walton@kepstin.ca> + * @author Christophe Gisquet <word1.word2@gmail.com> + */ + +#include "config.h" +#include "libavformat/internal.h" +#include "libavutil/opt.h" +#include "libavutil/time.h" +#include <windows.h> + +/** + * GDI Device Demuxer context + */ +struct gdigrab { + const AVClass *class; /**< Class for private options */ + + int frame_size; /**< Size in bytes of the frame pixel data */ + int header_size; /**< Size in bytes of the DIB header */ + AVRational time_base; /**< Time base */ + int64_t time_frame; /**< Current time */ + + int draw_mouse; /**< Draw mouse cursor (private option) */ + int show_region; /**< Draw border (private option) */ + AVRational framerate; /**< Capture framerate (private option) */ + int width; /**< Width of the grab frame (private option) */ + int height; /**< Height of the grab frame (private option) */ + int offset_x; /**< Capture x offset (private option) */ + int offset_y; /**< Capture y offset (private option) */ + + HWND hwnd; /**< Handle of the window for the grab */ + HDC source_hdc; /**< Source device context */ + HDC dest_hdc; /**< Destination, source-compatible DC */ + BITMAPINFO bmi; /**< Information describing DIB format */ + HBITMAP hbmp; /**< Information on the bitmap captured */ + void *buffer; /**< The buffer containing the bitmap image data */ + RECT clip_rect; /**< The subarea of the screen or window to clip */ + + HWND region_hwnd; /**< Handle of the region border window */ + + int cursor_error_printed; +}; + +#define WIN32_API_ERROR(str) \ + av_log(s1, AV_LOG_ERROR, str " (error %li)\n", GetLastError()) + +#define REGION_WND_BORDER 3 + +/** + * Callback to handle Windows messages for the region outline window. + * + * In particular, this handles painting the frame rectangle. + * + * @param hwnd The region outline window handle. + * @param msg The Windows message. + * @param wparam First Windows message parameter. + * @param lparam Second Windows message parameter. + * @return 0 success, !0 failure + */ +static LRESULT CALLBACK +gdigrab_region_wnd_proc(HWND hwnd, UINT msg, WPARAM wparam, LPARAM lparam) +{ + PAINTSTRUCT ps; + HDC hdc; + RECT rect; + + switch (msg) { + case WM_PAINT: + hdc = BeginPaint(hwnd, &ps); + + GetClientRect(hwnd, &rect); + FrameRect(hdc, &rect, GetStockObject(BLACK_BRUSH)); + + rect.left++; rect.top++; rect.right--; rect.bottom--; + FrameRect(hdc, &rect, GetStockObject(WHITE_BRUSH)); + + rect.left++; rect.top++; rect.right--; rect.bottom--; + FrameRect(hdc, &rect, GetStockObject(BLACK_BRUSH)); + + EndPaint(hwnd, &ps); + break; + default: + return DefWindowProc(hwnd, msg, wparam, lparam); + } + return 0; +} + +/** + * Initialize the region outline window. + * + * @param s1 The format context. + * @param gdigrab gdigrab context. + * @return 0 success, !0 failure + */ +static int +gdigrab_region_wnd_init(AVFormatContext *s1, struct gdigrab *gdigrab) +{ + HWND hwnd; + RECT rect = gdigrab->clip_rect; + HRGN region = NULL; + HRGN region_interior = NULL; + + DWORD style = WS_POPUP | WS_VISIBLE; + DWORD ex = WS_EX_TOOLWINDOW | WS_EX_TOPMOST | WS_EX_TRANSPARENT; + + rect.left -= REGION_WND_BORDER; rect.top -= REGION_WND_BORDER; + rect.right += REGION_WND_BORDER; rect.bottom += REGION_WND_BORDER; + + AdjustWindowRectEx(&rect, style, FALSE, ex); + + // Create a window with no owner; use WC_DIALOG instead of writing a custom + // window class + hwnd = CreateWindowEx(ex, WC_DIALOG, NULL, style, rect.left, rect.top, + rect.right - rect.left, rect.bottom - rect.top, + NULL, NULL, NULL, NULL); + if (!hwnd) { + WIN32_API_ERROR("Could not create region display window"); + goto error; + } + + // Set the window shape to only include the border area + GetClientRect(hwnd, &rect); + region = CreateRectRgn(0, 0, + rect.right - rect.left, rect.bottom - rect.top); + region_interior = CreateRectRgn(REGION_WND_BORDER, REGION_WND_BORDER, + rect.right - rect.left - REGION_WND_BORDER, + rect.bottom - rect.top - REGION_WND_BORDER); + CombineRgn(region, region, region_interior, RGN_DIFF); + if (!SetWindowRgn(hwnd, region, FALSE)) { + WIN32_API_ERROR("Could not set window region"); + goto error; + } + // The "region" memory is now owned by the window + region = NULL; + DeleteObject(region_interior); + + SetWindowLongPtr(hwnd, GWLP_WNDPROC, (LONG_PTR) gdigrab_region_wnd_proc); + + ShowWindow(hwnd, SW_SHOW); + + gdigrab->region_hwnd = hwnd; + + return 0; + +error: + if (region) + DeleteObject(region); + if (region_interior) + DeleteObject(region_interior); + if (hwnd) + DestroyWindow(hwnd); + return 1; +} + +/** + * Cleanup/free the region outline window. + * + * @param s1 The format context. + * @param gdigrab gdigrab context. + */ +static void +gdigrab_region_wnd_destroy(AVFormatContext *s1, struct gdigrab *gdigrab) +{ + if (gdigrab->region_hwnd) + DestroyWindow(gdigrab->region_hwnd); + gdigrab->region_hwnd = NULL; +} + +/** + * Process the Windows message queue. + * + * This is important to prevent Windows from thinking the window has become + * unresponsive. As well, things like WM_PAINT (to actually draw the window + * contents) are handled from the message queue context. + * + * @param s1 The format context. + * @param gdigrab gdigrab context. + */ +static void +gdigrab_region_wnd_update(AVFormatContext *s1, struct gdigrab *gdigrab) +{ + HWND hwnd = gdigrab->region_hwnd; + MSG msg; + + while (PeekMessage(&msg, hwnd, 0, 0, PM_REMOVE)) { + DispatchMessage(&msg); + } +} + +/** + * Initializes the gdi grab device demuxer (public device demuxer API). + * + * @param s1 Context from avformat core + * @return AVERROR_IO error, 0 success + */ +static int +gdigrab_read_header(AVFormatContext *s1) +{ + struct gdigrab *gdigrab = s1->priv_data; + + HWND hwnd; + HDC source_hdc = NULL; + HDC dest_hdc = NULL; + BITMAPINFO bmi; + HBITMAP hbmp = NULL; + void *buffer = NULL; + + const char *filename = s1->filename; + const char *name = NULL; + AVStream *st = NULL; + + int bpp; + RECT virtual_rect; + RECT clip_rect; + BITMAP bmp; + int ret; + + if (!strncmp(filename, "title=", 6)) { + name = filename + 6; + hwnd = FindWindow(NULL, name); + if (!hwnd) { + av_log(s1, AV_LOG_ERROR, + "Can't find window '%s', aborting.\n", name); + ret = AVERROR(EIO); + goto error; + } + if (gdigrab->show_region) { + av_log(s1, AV_LOG_WARNING, + "Can't show region when grabbing a window.\n"); + gdigrab->show_region = 0; + } + } else if (!strcmp(filename, "desktop")) { + hwnd = NULL; + } else { + av_log(s1, AV_LOG_ERROR, + "Please use \"desktop\" or \"title=<windowname>\" to specify your target.\n"); + ret = AVERROR(EIO); + goto error; + } + + if (hwnd) { + GetClientRect(hwnd, &virtual_rect); + } else { + virtual_rect.left = GetSystemMetrics(SM_XVIRTUALSCREEN); + virtual_rect.top = GetSystemMetrics(SM_YVIRTUALSCREEN); + virtual_rect.right = virtual_rect.left + GetSystemMetrics(SM_CXVIRTUALSCREEN); + virtual_rect.bottom = virtual_rect.top + GetSystemMetrics(SM_CYVIRTUALSCREEN); + } + + /* If no width or height set, use full screen/window area */ + if (!gdigrab->width || !gdigrab->height) { + clip_rect.left = virtual_rect.left; + clip_rect.top = virtual_rect.top; + clip_rect.right = virtual_rect.right; + clip_rect.bottom = virtual_rect.bottom; + } else { + clip_rect.left = gdigrab->offset_x; + clip_rect.top = gdigrab->offset_y; + clip_rect.right = gdigrab->width + gdigrab->offset_x; + clip_rect.bottom = gdigrab->height + gdigrab->offset_y; + } + + if (clip_rect.left < virtual_rect.left || + clip_rect.top < virtual_rect.top || + clip_rect.right > virtual_rect.right || + clip_rect.bottom > virtual_rect.bottom) { + av_log(s1, AV_LOG_ERROR, + "Capture area (%li,%li),(%li,%li) extends outside window area (%li,%li),(%li,%li)", + clip_rect.left, clip_rect.top, + clip_rect.right, clip_rect.bottom, + virtual_rect.left, virtual_rect.top, + virtual_rect.right, virtual_rect.bottom); + ret = AVERROR(EIO); + goto error; + } + + /* This will get the device context for the selected window, or if + * none, the primary screen */ + source_hdc = GetDC(hwnd); + if (!source_hdc) { + WIN32_API_ERROR("Couldn't get window device context"); + ret = AVERROR(EIO); + goto error; + } + bpp = GetDeviceCaps(source_hdc, BITSPIXEL); + + if (name) { + av_log(s1, AV_LOG_INFO, + "Found window %s, capturing %lix%lix%i at (%li,%li)\n", + name, + clip_rect.right - clip_rect.left, + clip_rect.bottom - clip_rect.top, + bpp, clip_rect.left, clip_rect.top); + } else { + av_log(s1, AV_LOG_INFO, + "Capturing whole desktop as %lix%lix%i at (%li,%li)\n", + clip_rect.right - clip_rect.left, + clip_rect.bottom - clip_rect.top, + bpp, clip_rect.left, clip_rect.top); + } + + if (clip_rect.right - clip_rect.left <= 0 || + clip_rect.bottom - clip_rect.top <= 0 || bpp%8) { + av_log(s1, AV_LOG_ERROR, "Invalid properties, aborting\n"); + ret = AVERROR(EIO); + goto error; + } + + dest_hdc = CreateCompatibleDC(source_hdc); + if (!dest_hdc) { + WIN32_API_ERROR("Screen DC CreateCompatibleDC"); + ret = AVERROR(EIO); + goto error; + } + + /* Create a DIB and select it into the dest_hdc */ + bmi.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); + bmi.bmiHeader.biWidth = clip_rect.right - clip_rect.left; + bmi.bmiHeader.biHeight = -(clip_rect.bottom - clip_rect.top); + bmi.bmiHeader.biPlanes = 1; + bmi.bmiHeader.biBitCount = bpp; + bmi.bmiHeader.biCompression = BI_RGB; + bmi.bmiHeader.biSizeImage = 0; + bmi.bmiHeader.biXPelsPerMeter = 0; + bmi.bmiHeader.biYPelsPerMeter = 0; + bmi.bmiHeader.biClrUsed = 0; + bmi.bmiHeader.biClrImportant = 0; + hbmp = CreateDIBSection(dest_hdc, &bmi, DIB_RGB_COLORS, + &buffer, NULL, 0); + if (!hbmp) { + WIN32_API_ERROR("Creating DIB Section"); + ret = AVERROR(EIO); + goto error; + } + + if (!SelectObject(dest_hdc, hbmp)) { + WIN32_API_ERROR("SelectObject"); + ret = AVERROR(EIO); + goto error; + } + + /* Get info from the bitmap */ + GetObject(hbmp, sizeof(BITMAP), &bmp); + + st = avformat_new_stream(s1, NULL); + if (!st) { + ret = AVERROR(ENOMEM); + goto error; + } + avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */ + + gdigrab->frame_size = bmp.bmWidthBytes * bmp.bmHeight * bmp.bmPlanes; + gdigrab->header_size = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER) + + (bpp <= 8 ? (1 << bpp) : 0) * sizeof(RGBQUAD) /* palette size */; + gdigrab->time_base = av_inv_q(gdigrab->framerate); + gdigrab->time_frame = av_gettime() / av_q2d(gdigrab->time_base); + + gdigrab->hwnd = hwnd; + gdigrab->source_hdc = source_hdc; + gdigrab->dest_hdc = dest_hdc; + gdigrab->hbmp = hbmp; + gdigrab->bmi = bmi; + gdigrab->buffer = buffer; + gdigrab->clip_rect = clip_rect; + + gdigrab->cursor_error_printed = 0; + + if (gdigrab->show_region) { + if (gdigrab_region_wnd_init(s1, gdigrab)) { + ret = AVERROR(EIO); + goto error; + } + } + + st->codec->codec_type = AVMEDIA_TYPE_VIDEO; + st->codec->codec_id = AV_CODEC_ID_BMP; + st->codec->time_base = gdigrab->time_base; + st->codec->bit_rate = (gdigrab->header_size + gdigrab->frame_size) * 1/av_q2d(gdigrab->time_base) * 8; + + return 0; + +error: + if (source_hdc) + ReleaseDC(hwnd, source_hdc); + if (dest_hdc) + DeleteDC(dest_hdc); + if (hbmp) + DeleteObject(hbmp); + if (source_hdc) + DeleteDC(source_hdc); + return ret; +} + +/** + * Paints a mouse pointer in a Win32 image. + * + * @param s1 Context of the log information + * @param s Current grad structure + */ +static void paint_mouse_pointer(AVFormatContext *s1, struct gdigrab *gdigrab) +{ + CURSORINFO ci = {0}; + +#define CURSOR_ERROR(str) \ + if (!gdigrab->cursor_error_printed) { \ + WIN32_API_ERROR(str); \ + gdigrab->cursor_error_printed = 1; \ + } + + ci.cbSize = sizeof(ci); + + if (GetCursorInfo(&ci)) { + HCURSOR icon = CopyCursor(ci.hCursor); + ICONINFO info; + POINT pos; + RECT clip_rect = gdigrab->clip_rect; + HWND hwnd = gdigrab->hwnd; + info.hbmMask = NULL; + info.hbmColor = NULL; + + if (ci.flags != CURSOR_SHOWING) + return; + + if (!icon) { + /* Use the standard arrow cursor as a fallback. + * You'll probably only hit this in Wine, which can't fetch + * the current system cursor. */ + icon = CopyCursor(LoadCursor(NULL, IDC_ARROW)); + } + + if (!GetIconInfo(icon, &info)) { + CURSOR_ERROR("Could not get icon info"); + goto icon_error; + } + + pos.x = ci.ptScreenPos.x - clip_rect.left - info.xHotspot; + pos.y = ci.ptScreenPos.y - clip_rect.top - info.yHotspot; + + if (hwnd) { + RECT rect; + + if (GetWindowRect(hwnd, &rect)) { + pos.x -= rect.left; + pos.y -= rect.top; + } else { + CURSOR_ERROR("Couldn't get window rectangle"); + goto icon_error; + } + } + + av_log(s1, AV_LOG_DEBUG, "Cursor pos (%li,%li) -> (%li,%li)\n", + ci.ptScreenPos.x, ci.ptScreenPos.y, pos.x, pos.y); + + if (pos.x >= 0 && pos.x <= clip_rect.right - clip_rect.left && + pos.y >= 0 && pos.y <= clip_rect.bottom - clip_rect.top) { + if (!DrawIcon(gdigrab->dest_hdc, pos.x, pos.y, icon)) + CURSOR_ERROR("Couldn't draw icon"); + } + +icon_error: + if (info.hbmMask) + DeleteObject(info.hbmMask); + if (info.hbmColor) + DeleteObject(info.hbmColor); + if (icon) + DestroyCursor(icon); + } else { + CURSOR_ERROR("Couldn't get cursor info"); + } +} + +/** + * Grabs a frame from gdi (public device demuxer API). + * + * @param s1 Context from avformat core + * @param pkt Packet holding the grabbed frame + * @return frame size in bytes + */ +static int gdigrab_read_packet(AVFormatContext *s1, AVPacket *pkt) +{ + struct gdigrab *gdigrab = s1->priv_data; + + HDC dest_hdc = gdigrab->dest_hdc; + HDC source_hdc = gdigrab->source_hdc; + RECT clip_rect = gdigrab->clip_rect; + AVRational time_base = gdigrab->time_base; + int64_t time_frame = gdigrab->time_frame; + + BITMAPFILEHEADER bfh; + int file_size = gdigrab->header_size + gdigrab->frame_size; + + int64_t curtime, delay; + + /* Calculate the time of the next frame */ + time_frame += INT64_C(1000000); + + /* Run Window message processing queue */ + if (gdigrab->show_region) + gdigrab_region_wnd_update(s1, gdigrab); + + /* wait based on the frame rate */ + for (;;) { + curtime = av_gettime(); + delay = time_frame * av_q2d(time_base) - curtime; + if (delay <= 0) { + if (delay < INT64_C(-1000000) * av_q2d(time_base)) { + time_frame += INT64_C(1000000); + } + break; + } + if (s1->flags & AVFMT_FLAG_NONBLOCK) { + return AVERROR(EAGAIN); + } else { + av_usleep(delay); + } + } + + if (av_new_packet(pkt, file_size) < 0) + return AVERROR(ENOMEM); + pkt->pts = curtime; + + /* Blit screen grab */ + if (!BitBlt(dest_hdc, 0, 0, + clip_rect.right - clip_rect.left, + clip_rect.bottom - clip_rect.top, + source_hdc, + clip_rect.left, clip_rect.top, SRCCOPY | CAPTUREBLT)) { + WIN32_API_ERROR("Failed to capture image"); + return AVERROR(EIO); + } + if (gdigrab->draw_mouse) + paint_mouse_pointer(s1, gdigrab); + + /* Copy bits to packet data */ + + bfh.bfType = 0x4d42; /* "BM" in little-endian */ + bfh.bfSize = file_size; + bfh.bfReserved1 = 0; + bfh.bfReserved2 = 0; + bfh.bfOffBits = gdigrab->header_size; + + memcpy(pkt->data, &bfh, sizeof(bfh)); + + memcpy(pkt->data + sizeof(bfh), &gdigrab->bmi.bmiHeader, sizeof(gdigrab->bmi.bmiHeader)); + + if (gdigrab->bmi.bmiHeader.biBitCount <= 8) + GetDIBColorTable(dest_hdc, 0, 1 << gdigrab->bmi.bmiHeader.biBitCount, + (RGBQUAD *) (pkt->data + sizeof(bfh) + sizeof(gdigrab->bmi.bmiHeader))); + + memcpy(pkt->data + gdigrab->header_size, gdigrab->buffer, gdigrab->frame_size); + + gdigrab->time_frame = time_frame; + + return gdigrab->header_size + gdigrab->frame_size; +} + +/** + * Closes gdi frame grabber (public device demuxer API). + * + * @param s1 Context from avformat core + * @return 0 success, !0 failure + */ +static int gdigrab_read_close(AVFormatContext *s1) +{ + struct gdigrab *s = s1->priv_data; + + if (s->show_region) + gdigrab_region_wnd_destroy(s1, s); + + if (s->source_hdc) + ReleaseDC(s->hwnd, s->source_hdc); + if (s->dest_hdc) + DeleteDC(s->dest_hdc); + if (s->hbmp) + DeleteObject(s->hbmp); + if (s->source_hdc) + DeleteDC(s->source_hdc); + + return 0; +} + +#define OFFSET(x) offsetof(struct gdigrab, x) +#define DEC AV_OPT_FLAG_DECODING_PARAM +static const AVOption options[] = { + { "draw_mouse", "draw the mouse pointer", OFFSET(draw_mouse), AV_OPT_TYPE_INT, {.i64 = 1}, 0, 1, DEC }, + { "show_region", "draw border around capture area", OFFSET(show_region), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 1, DEC }, + { "framerate", "set video frame rate", OFFSET(framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "ntsc"}, 0, 0, DEC }, + { "video_size", "set video frame size", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, DEC }, + { "offset_x", "capture area x offset", OFFSET(offset_x), AV_OPT_TYPE_INT, {.i64 = 0}, INT_MIN, INT_MAX, DEC }, + { "offset_y", "capture area y offset", OFFSET(offset_y), AV_OPT_TYPE_INT, {.i64 = 0}, INT_MIN, INT_MAX, DEC }, + { NULL }, +}; + +static const AVClass gdigrab_class = { + .class_name = "GDIgrab indev", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, +}; + +/** gdi grabber device demuxer declaration */ +AVInputFormat ff_gdigrab_demuxer = { + .name = "gdigrab", + .long_name = NULL_IF_CONFIG_SMALL("GDI API Windows frame grabber"), + .priv_data_size = sizeof(struct gdigrab), + .read_header = gdigrab_read_header, + .read_packet = gdigrab_read_packet, + .read_close = gdigrab_read_close, + .flags = AVFMT_NOFILE, + .priv_class = &gdigrab_class, +}; diff --git a/libavdevice/iec61883.c b/libavdevice/iec61883.c new file mode 100644 index 0000000..6d5ec4a --- /dev/null +++ b/libavdevice/iec61883.c @@ -0,0 +1,498 @@ +/* + * Copyright (c) 2012 Georg Lippitsch <georg.lippitsch@gmx.at> + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * libiec61883 interface + */ + +#include <sys/poll.h> +#include <libraw1394/raw1394.h> +#include <libavc1394/avc1394.h> +#include <libavc1394/rom1394.h> +#include <libiec61883/iec61883.h> +#include "libavformat/dv.h" +#include "libavformat/mpegts.h" +#include "libavutil/opt.h" +#include "avdevice.h" + +#define THREADS HAVE_PTHREADS + +#if THREADS +#include <pthread.h> +#endif + +#define MOTDCT_SPEC_ID 0x00005068 +#define IEC61883_AUTO 0 +#define IEC61883_DV 1 +#define IEC61883_HDV 2 + +/** + * For DV, one packet corresponds exactly to one frame. + * For HDV, these are MPEG2 transport stream packets. + * The queue is implemented as linked list. + */ +typedef struct DVPacket { + uint8_t *buf; ///< actual buffer data + int len; ///< size of buffer allocated + struct DVPacket *next; ///< next DVPacket +} DVPacket; + +struct iec61883_data { + AVClass *class; + raw1394handle_t raw1394; ///< handle for libraw1394 + iec61883_dv_fb_t iec61883_dv; ///< handle for libiec61883 when used with DV + iec61883_mpeg2_t iec61883_mpeg2; ///< handle for libiec61883 when used with HDV + + DVDemuxContext *dv_demux; ///< generic DV muxing/demuxing context + MpegTSContext *mpeg_demux; ///< generic HDV muxing/demuxing context + + DVPacket *queue_first; ///< first element of packet queue + DVPacket *queue_last; ///< last element of packet queue + + char *device_guid; ///< to select one of multiple DV devices + + int packets; ///< Number of packets queued + int max_packets; ///< Max. number of packets in queue + + int bandwidth; ///< returned by libiec61883 + int channel; ///< returned by libiec61883 + int input_port; ///< returned by libiec61883 + int type; ///< Stream type, to distinguish DV/HDV + int node; ///< returned by libiec61883 + int output_port; ///< returned by libiec61883 + int thread_loop; ///< Condition for thread while-loop + int receiving; ///< True as soon data from device available + int receive_error; ///< Set in receive task in case of error + int eof; ///< True as soon as no more data available + + struct pollfd raw1394_poll; ///< to poll for new data from libraw1394 + + /** Parse function for DV/HDV differs, so this is set before packets arrive */ + int (*parse_queue)(struct iec61883_data *dv, AVPacket *pkt); + +#if THREADS + pthread_t receive_task_thread; + pthread_mutex_t mutex; + pthread_cond_t cond; +#endif +}; + +static int iec61883_callback(unsigned char *data, int length, + int complete, void *callback_data) +{ + struct iec61883_data *dv = callback_data; + DVPacket *packet; + int ret; + +#if THREADS + pthread_mutex_lock(&dv->mutex); +#endif + + if (dv->packets >= dv->max_packets) { + av_log(NULL, AV_LOG_ERROR, "DV packet queue overrun, dropping.\n"); + ret = 0; + goto exit; + } + + packet = av_mallocz(sizeof(*packet)); + if (!packet) { + ret = -1; + goto exit; + } + + packet->buf = av_malloc(length); + if (!packet->buf) { + ret = -1; + goto exit; + } + packet->len = length; + + memcpy(packet->buf, data, length); + + if (dv->queue_first) { + dv->queue_last->next = packet; + dv->queue_last = packet; + } else { + dv->queue_first = packet; + dv->queue_last = packet; + } + dv->packets++; + + ret = 0; + +exit: +#if THREADS + pthread_cond_broadcast(&dv->cond); + pthread_mutex_unlock(&dv->mutex); +#endif + return ret; +} + +static void *iec61883_receive_task(void *opaque) +{ + struct iec61883_data *dv = (struct iec61883_data *)opaque; + int result; + +#if THREADS + while (dv->thread_loop) +#endif + { + while ((result = poll(&dv->raw1394_poll, 1, 200)) < 0) { + if (!(errno == EAGAIN || errno == EINTR)) { + av_log(NULL, AV_LOG_ERROR, "Raw1394 poll error occurred.\n"); + dv->receive_error = AVERROR(EIO); + return NULL; + } + } + if (result > 0 && ((dv->raw1394_poll.revents & POLLIN) + || (dv->raw1394_poll.revents & POLLPRI))) { + dv->receiving = 1; + raw1394_loop_iterate(dv->raw1394); + } else if (dv->receiving) { + av_log(NULL, AV_LOG_ERROR, "No more input data available\n"); +#if THREADS + pthread_mutex_lock(&dv->mutex); + dv->eof = 1; + pthread_cond_broadcast(&dv->cond); + pthread_mutex_unlock(&dv->mutex); +#else + dv->eof = 1; +#endif + return NULL; + } + } + + return NULL; +} + +static int iec61883_parse_queue_dv(struct iec61883_data *dv, AVPacket *pkt) +{ + DVPacket *packet; + int size; + + size = avpriv_dv_get_packet(dv->dv_demux, pkt); + if (size > 0) + return size; + + packet = dv->queue_first; + if (!packet) + return -1; + + size = avpriv_dv_produce_packet(dv->dv_demux, pkt, + packet->buf, packet->len, -1); + pkt->destruct = av_destruct_packet; + dv->queue_first = packet->next; + av_free(packet); + dv->packets--; + + if (size > 0) + return size; + + return -1; +} + +static int iec61883_parse_queue_hdv(struct iec61883_data *dv, AVPacket *pkt) +{ + DVPacket *packet; + int size; + + while (dv->queue_first) { + packet = dv->queue_first; + size = avpriv_mpegts_parse_packet(dv->mpeg_demux, pkt, packet->buf, + packet->len); + dv->queue_first = packet->next; + av_free(packet->buf); + av_free(packet); + dv->packets--; + + if (size > 0) + return size; + } + + return -1; +} + +static int iec61883_read_header(AVFormatContext *context) +{ + struct iec61883_data *dv = context->priv_data; + struct raw1394_portinfo pinf[16]; + rom1394_directory rom_dir; + char *endptr; + int inport; + int nb_ports; + int port = -1; + int response; + int i, j = 0; + uint64_t guid = 0; + + dv->input_port = -1; + dv->output_port = -1; + dv->channel = -1; + + dv->raw1394 = raw1394_new_handle(); + + if (!dv->raw1394) { + av_log(context, AV_LOG_ERROR, "Failed to open IEEE1394 interface.\n"); + return AVERROR(EIO); + } + + if ((nb_ports = raw1394_get_port_info(dv->raw1394, pinf, 16)) < 0) { + av_log(context, AV_LOG_ERROR, "Failed to get number of IEEE1394 ports.\n"); + goto fail; + } + + inport = strtol(context->filename, &endptr, 10); + if (endptr != context->filename && *endptr == '\0') { + av_log(context, AV_LOG_INFO, "Selecting IEEE1394 port: %d\n", inport); + j = inport; + nb_ports = inport + 1; + } else if (strcmp(context->filename, "auto")) { + av_log(context, AV_LOG_ERROR, "Invalid input \"%s\", you should specify " + "\"auto\" for auto-detection, or the port number.\n", context->filename); + goto fail; + } + + if (dv->device_guid) { + if (sscanf(dv->device_guid, "%llx", (long long unsigned int *)&guid) != 1) { + av_log(context, AV_LOG_INFO, "Invalid dvguid parameter: %s\n", + dv->device_guid); + goto fail; + } + } + + for (; j < nb_ports && port==-1; ++j) { + raw1394_destroy_handle(dv->raw1394); + + if (!(dv->raw1394 = raw1394_new_handle_on_port(j))) { + av_log(context, AV_LOG_ERROR, "Failed setting IEEE1394 port.\n"); + goto fail; + } + + for (i=0; i<raw1394_get_nodecount(dv->raw1394); ++i) { + + /* Select device explicitly by GUID */ + + if (guid > 1) { + if (guid == rom1394_get_guid(dv->raw1394, i)) { + dv->node = i; + port = j; + break; + } + } else { + + /* Select first AV/C tape recorder player node */ + + if (rom1394_get_directory(dv->raw1394, i, &rom_dir) < 0) + continue; + if (((rom1394_get_node_type(&rom_dir) == ROM1394_NODE_TYPE_AVC) && + avc1394_check_subunit_type(dv->raw1394, i, AVC1394_SUBUNIT_TYPE_VCR)) || + (rom_dir.unit_spec_id == MOTDCT_SPEC_ID)) { + rom1394_free_directory(&rom_dir); + dv->node = i; + port = j; + break; + } + rom1394_free_directory(&rom_dir); + } + } + } + + if (port == -1) { + av_log(context, AV_LOG_ERROR, "No AV/C devices found.\n"); + goto fail; + } + + /* Provide bus sanity for multiple connections */ + + iec61883_cmp_normalize_output(dv->raw1394, 0xffc0 | dv->node); + + /* Find out if device is DV or HDV */ + + if (dv->type == IEC61883_AUTO) { + response = avc1394_transaction(dv->raw1394, dv->node, + AVC1394_CTYPE_STATUS | + AVC1394_SUBUNIT_TYPE_TAPE_RECORDER | + AVC1394_SUBUNIT_ID_0 | + AVC1394_VCR_COMMAND_OUTPUT_SIGNAL_MODE | + 0xFF, 2); + response = AVC1394_GET_OPERAND0(response); + dv->type = (response == 0x10 || response == 0x90 || response == 0x1A || response == 0x9A) ? + IEC61883_HDV : IEC61883_DV; + } + + /* Connect to device, and do initialization */ + + dv->channel = iec61883_cmp_connect(dv->raw1394, dv->node, &dv->output_port, + raw1394_get_local_id(dv->raw1394), + &dv->input_port, &dv->bandwidth); + + if (dv->channel < 0) + dv->channel = 63; + + if (!dv->max_packets) + dv->max_packets = 100; + + if (dv->type == IEC61883_HDV) { + + /* Init HDV receive */ + + avformat_new_stream(context, NULL); + + dv->mpeg_demux = avpriv_mpegts_parse_open(context); + if (!dv->mpeg_demux) + goto fail; + + dv->parse_queue = iec61883_parse_queue_hdv; + + dv->iec61883_mpeg2 = iec61883_mpeg2_recv_init(dv->raw1394, + (iec61883_mpeg2_recv_t)iec61883_callback, + dv); + + dv->max_packets *= 766; + } else { + + /* Init DV receive */ + + dv->dv_demux = avpriv_dv_init_demux(context); + if (!dv->dv_demux) + goto fail; + + dv->parse_queue = iec61883_parse_queue_dv; + + dv->iec61883_dv = iec61883_dv_fb_init(dv->raw1394, iec61883_callback, dv); + } + + dv->raw1394_poll.fd = raw1394_get_fd(dv->raw1394); + dv->raw1394_poll.events = POLLIN | POLLERR | POLLHUP | POLLPRI; + + /* Actually start receiving */ + + if (dv->type == IEC61883_HDV) + iec61883_mpeg2_recv_start(dv->iec61883_mpeg2, dv->channel); + else + iec61883_dv_fb_start(dv->iec61883_dv, dv->channel); + +#if THREADS + dv->thread_loop = 1; + pthread_mutex_init(&dv->mutex, NULL); + pthread_cond_init(&dv->cond, NULL); + pthread_create(&dv->receive_task_thread, NULL, iec61883_receive_task, dv); +#endif + + return 0; + +fail: + raw1394_destroy_handle(dv->raw1394); + return AVERROR(EIO); +} + +static int iec61883_read_packet(AVFormatContext *context, AVPacket *pkt) +{ + struct iec61883_data *dv = context->priv_data; + int size; + + /** + * Try to parse frames from queue + */ + +#if THREADS + pthread_mutex_lock(&dv->mutex); + while ((size = dv->parse_queue(dv, pkt)) == -1) + if (!dv->eof) + pthread_cond_wait(&dv->cond, &dv->mutex); + else + break; + pthread_mutex_unlock(&dv->mutex); +#else + int result; + while ((size = dv->parse_queue(dv, pkt)) == -1) { + iec61883_receive_task((void *)dv); + if (dv->receive_error) + return dv->receive_error; + } +#endif + + return size; +} + +static int iec61883_close(AVFormatContext *context) +{ + struct iec61883_data *dv = context->priv_data; + +#if THREADS + dv->thread_loop = 0; + pthread_join(dv->receive_task_thread, NULL); + pthread_cond_destroy(&dv->cond); + pthread_mutex_destroy(&dv->mutex); +#endif + + if (dv->type == IEC61883_HDV) { + iec61883_mpeg2_recv_stop(dv->iec61883_mpeg2); + iec61883_mpeg2_close(dv->iec61883_mpeg2); + avpriv_mpegts_parse_close(dv->mpeg_demux); + } else { + iec61883_dv_fb_stop(dv->iec61883_dv); + iec61883_dv_fb_close(dv->iec61883_dv); + } + while (dv->queue_first) { + DVPacket *packet = dv->queue_first; + dv->queue_first = packet->next; + av_free(packet->buf); + av_free(packet); + } + + iec61883_cmp_disconnect(dv->raw1394, dv->node, dv->output_port, + raw1394_get_local_id(dv->raw1394), + dv->input_port, dv->channel, dv->bandwidth); + + raw1394_destroy_handle(dv->raw1394); + + return 0; +} + +static const AVOption options[] = { + { "dvtype", "override autodetection of DV/HDV", offsetof(struct iec61883_data, type), AV_OPT_TYPE_INT, {.i64 = IEC61883_AUTO}, IEC61883_AUTO, IEC61883_HDV, AV_OPT_FLAG_DECODING_PARAM, "dvtype" }, + { "auto", "auto detect DV/HDV", 0, AV_OPT_TYPE_CONST, {.i64 = IEC61883_AUTO}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "dvtype" }, + { "dv", "force device being treated as DV device", 0, AV_OPT_TYPE_CONST, {.i64 = IEC61883_DV}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "dvtype" }, + { "hdv" , "force device being treated as HDV device", 0, AV_OPT_TYPE_CONST, {.i64 = IEC61883_HDV}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "dvtype" }, + { "dvbuffer", "set queue buffer size (in packets)", offsetof(struct iec61883_data, max_packets), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM }, + { "dvguid", "select one of multiple DV devices by its GUID", offsetof(struct iec61883_data, device_guid), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, AV_OPT_FLAG_DECODING_PARAM }, + { NULL }, +}; + +static const AVClass iec61883_class = { + .class_name = "iec61883 indev", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, +}; + +AVInputFormat ff_iec61883_demuxer = { + .name = "iec61883", + .long_name = NULL_IF_CONFIG_SMALL("libiec61883 (new DV1394) A/V input device"), + .priv_data_size = sizeof(struct iec61883_data), + .read_header = iec61883_read_header, + .read_packet = iec61883_read_packet, + .read_close = iec61883_close, + .flags = AVFMT_NOFILE, + .priv_class = &iec61883_class, +}; diff --git a/libavdevice/jack_audio.c b/libavdevice/jack_audio.c index 8ce3321..2fda8ad 100644 --- a/libavdevice/jack_audio.c +++ b/libavdevice/jack_audio.c @@ -3,20 +3,20 @@ * Copyright (c) 2009 Samalyse * Author: Olivier Guilyardi <olivier samalyse com> * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -32,6 +32,7 @@ #include "libavformat/avformat.h" #include "libavformat/internal.h" #include "timefilter.h" +#include "avdevice.h" /** * Size of the internal FIFO buffers as a number of audio packets @@ -151,7 +152,6 @@ static int start_jack(AVFormatContext *context) JackData *self = context->priv_data; jack_status_t status; int i, test; - double o, period; /* Register as a JACK client, using the context filename as client name. */ self->client = jack_client_open(context->filename, JackNullOption, &status); @@ -163,7 +163,7 @@ static int start_jack(AVFormatContext *context) sem_init(&self->packet_count, 0, 0); self->sample_rate = jack_get_sample_rate(self->client); - self->ports = av_malloc(self->nports * sizeof(*self->ports)); + self->ports = av_malloc_array(self->nports, sizeof(*self->ports)); self->buffer_size = jack_get_buffer_size(self->client); /* Register JACK ports */ @@ -187,18 +187,16 @@ static int start_jack(AVFormatContext *context) jack_set_xrun_callback(self->client, xrun_callback, self); /* Create time filter */ - period = (double) self->buffer_size / self->sample_rate; - o = 2 * M_PI * 1.5 * period; /// bandwidth: 1.5Hz - self->timefilter = ff_timefilter_new (1.0 / self->sample_rate, sqrt(2 * o), o * o); + self->timefilter = ff_timefilter_new (1.0 / self->sample_rate, self->buffer_size, 1.5); if (!self->timefilter) { jack_client_close(self->client); return AVERROR(ENOMEM); } /* Create FIFO buffers */ - self->filled_pkts = av_fifo_alloc(FIFO_PACKETS_NUM * sizeof(AVPacket)); + self->filled_pkts = av_fifo_alloc_array(FIFO_PACKETS_NUM, sizeof(AVPacket)); /* New packets FIFO with one extra packet for safety against underruns */ - self->new_pkts = av_fifo_alloc((FIFO_PACKETS_NUM + 1) * sizeof(AVPacket)); + self->new_pkts = av_fifo_alloc_array((FIFO_PACKETS_NUM + 1), sizeof(AVPacket)); if ((test = supply_new_packets(self, context))) { jack_client_close(self->client); return test; @@ -208,14 +206,14 @@ static int start_jack(AVFormatContext *context) } -static void free_pkt_fifo(AVFifoBuffer *fifo) +static void free_pkt_fifo(AVFifoBuffer **fifo) { AVPacket pkt; - while (av_fifo_size(fifo)) { - av_fifo_generic_read(fifo, &pkt, sizeof(pkt), NULL); + while (av_fifo_size(*fifo)) { + av_fifo_generic_read(*fifo, &pkt, sizeof(pkt), NULL); av_free_packet(&pkt); } - av_fifo_free(fifo); + av_fifo_freep(fifo); } static void stop_jack(JackData *self) @@ -226,8 +224,8 @@ static void stop_jack(JackData *self) jack_client_close(self->client); } sem_destroy(&self->packet_count); - free_pkt_fifo(self->new_pkts); - free_pkt_fifo(self->filled_pkts); + free_pkt_fifo(&self->new_pkts); + free_pkt_fifo(&self->filled_pkts); av_freep(&self->ports); ff_timefilter_destroy(self->timefilter); } @@ -335,6 +333,7 @@ static const AVClass jack_indev_class = { .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT, }; AVInputFormat ff_jack_demuxer = { diff --git a/libavdevice/lavfi.c b/libavdevice/lavfi.c new file mode 100644 index 0000000..1398ece --- /dev/null +++ b/libavdevice/lavfi.c @@ -0,0 +1,436 @@ +/* + * Copyright (c) 2011 Stefano Sabatini + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * libavfilter virtual input device + */ + +/* #define DEBUG */ + +#include <float.h> /* DBL_MIN, DBL_MAX */ + +#include "libavutil/bprint.h" +#include "libavutil/channel_layout.h" +#include "libavutil/file.h" +#include "libavutil/log.h" +#include "libavutil/mem.h" +#include "libavutil/opt.h" +#include "libavutil/parseutils.h" +#include "libavutil/pixdesc.h" +#include "libavfilter/avfilter.h" +#include "libavfilter/avfiltergraph.h" +#include "libavfilter/buffersink.h" +#include "libavformat/internal.h" +#include "avdevice.h" + +typedef struct { + AVClass *class; ///< class for private options + char *graph_str; + char *graph_filename; + char *dump_graph; + AVFilterGraph *graph; + AVFilterContext **sinks; + int *sink_stream_map; + int *sink_eof; + int *stream_sink_map; + AVFrame *decoded_frame; +} LavfiContext; + +static int *create_all_formats(int n) +{ + int i, j, *fmts, count = 0; + + for (i = 0; i < n; i++) { + const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(i); + if (!(desc->flags & AV_PIX_FMT_FLAG_HWACCEL)) + count++; + } + + if (!(fmts = av_malloc((count+1) * sizeof(int)))) + return NULL; + for (j = 0, i = 0; i < n; i++) { + const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(i); + if (!(desc->flags & AV_PIX_FMT_FLAG_HWACCEL)) + fmts[j++] = i; + } + fmts[j] = -1; + return fmts; +} + +av_cold static int lavfi_read_close(AVFormatContext *avctx) +{ + LavfiContext *lavfi = avctx->priv_data; + + av_freep(&lavfi->sink_stream_map); + av_freep(&lavfi->sink_eof); + av_freep(&lavfi->stream_sink_map); + av_freep(&lavfi->sinks); + avfilter_graph_free(&lavfi->graph); + av_frame_free(&lavfi->decoded_frame); + + return 0; +} + +av_cold static int lavfi_read_header(AVFormatContext *avctx) +{ + LavfiContext *lavfi = avctx->priv_data; + AVFilterInOut *input_links = NULL, *output_links = NULL, *inout; + AVFilter *buffersink, *abuffersink; + int *pix_fmts = create_all_formats(AV_PIX_FMT_NB); + enum AVMediaType type; + int ret = 0, i, n; + +#define FAIL(ERR) { ret = ERR; goto end; } + + if (!pix_fmts) + FAIL(AVERROR(ENOMEM)); + + avfilter_register_all(); + + buffersink = avfilter_get_by_name("buffersink"); + abuffersink = avfilter_get_by_name("abuffersink"); + + if (lavfi->graph_filename && lavfi->graph_str) { + av_log(avctx, AV_LOG_ERROR, + "Only one of the graph or graph_file options must be specified\n"); + FAIL(AVERROR(EINVAL)); + } + + if (lavfi->graph_filename) { + AVBPrint graph_file_pb; + AVIOContext *avio = NULL; + ret = avio_open(&avio, lavfi->graph_filename, AVIO_FLAG_READ); + if (ret < 0) + goto end; + av_bprint_init(&graph_file_pb, 0, AV_BPRINT_SIZE_UNLIMITED); + ret = avio_read_to_bprint(avio, &graph_file_pb, INT_MAX); + avio_close(avio); + av_bprint_chars(&graph_file_pb, '\0', 1); + if (!ret && !av_bprint_is_complete(&graph_file_pb)) + ret = AVERROR(ENOMEM); + if (ret) { + av_bprint_finalize(&graph_file_pb, NULL); + goto end; + } + if ((ret = av_bprint_finalize(&graph_file_pb, &lavfi->graph_str))) + goto end; + } + + if (!lavfi->graph_str) + lavfi->graph_str = av_strdup(avctx->filename); + + /* parse the graph, create a stream for each open output */ + if (!(lavfi->graph = avfilter_graph_alloc())) + FAIL(AVERROR(ENOMEM)); + + if ((ret = avfilter_graph_parse_ptr(lavfi->graph, lavfi->graph_str, + &input_links, &output_links, avctx)) < 0) + goto end; + + if (input_links) { + av_log(avctx, AV_LOG_ERROR, + "Open inputs in the filtergraph are not acceptable\n"); + FAIL(AVERROR(EINVAL)); + } + + /* count the outputs */ + for (n = 0, inout = output_links; inout; n++, inout = inout->next); + + if (!(lavfi->sink_stream_map = av_malloc(sizeof(int) * n))) + FAIL(AVERROR(ENOMEM)); + if (!(lavfi->sink_eof = av_mallocz(sizeof(int) * n))) + FAIL(AVERROR(ENOMEM)); + if (!(lavfi->stream_sink_map = av_malloc(sizeof(int) * n))) + FAIL(AVERROR(ENOMEM)); + + for (i = 0; i < n; i++) + lavfi->stream_sink_map[i] = -1; + + /* parse the output link names - they need to be of the form out0, out1, ... + * create a mapping between them and the streams */ + for (i = 0, inout = output_links; inout; i++, inout = inout->next) { + int stream_idx; + if (!strcmp(inout->name, "out")) + stream_idx = 0; + else if (sscanf(inout->name, "out%d\n", &stream_idx) != 1) { + av_log(avctx, AV_LOG_ERROR, + "Invalid outpad name '%s'\n", inout->name); + FAIL(AVERROR(EINVAL)); + } + + if ((unsigned)stream_idx >= n) { + av_log(avctx, AV_LOG_ERROR, + "Invalid index was specified in output '%s', " + "must be a non-negative value < %d\n", + inout->name, n); + FAIL(AVERROR(EINVAL)); + } + + if (lavfi->stream_sink_map[stream_idx] != -1) { + av_log(avctx, AV_LOG_ERROR, + "An output with stream index %d was already specified\n", + stream_idx); + FAIL(AVERROR(EINVAL)); + } + lavfi->sink_stream_map[i] = stream_idx; + lavfi->stream_sink_map[stream_idx] = i; + } + + /* for each open output create a corresponding stream */ + for (i = 0, inout = output_links; inout; i++, inout = inout->next) { + AVStream *st; + if (!(st = avformat_new_stream(avctx, NULL))) + FAIL(AVERROR(ENOMEM)); + st->id = i; + } + + /* create a sink for each output and connect them to the graph */ + lavfi->sinks = av_malloc_array(avctx->nb_streams, sizeof(AVFilterContext *)); + if (!lavfi->sinks) + FAIL(AVERROR(ENOMEM)); + + for (i = 0, inout = output_links; inout; i++, inout = inout->next) { + AVFilterContext *sink; + + type = inout->filter_ctx->output_pads[inout->pad_idx].type; + + if (type == AVMEDIA_TYPE_VIDEO && ! buffersink || + type == AVMEDIA_TYPE_AUDIO && ! abuffersink) { + av_log(avctx, AV_LOG_ERROR, "Missing required buffersink filter, aborting.\n"); + FAIL(AVERROR_FILTER_NOT_FOUND); + } + + if (type == AVMEDIA_TYPE_VIDEO) { + ret = avfilter_graph_create_filter(&sink, buffersink, + inout->name, NULL, + NULL, lavfi->graph); + if (ret >= 0) + ret = av_opt_set_int_list(sink, "pix_fmts", pix_fmts, AV_PIX_FMT_NONE, AV_OPT_SEARCH_CHILDREN); + if (ret < 0) + goto end; + } else if (type == AVMEDIA_TYPE_AUDIO) { + enum AVSampleFormat sample_fmts[] = { AV_SAMPLE_FMT_U8, + AV_SAMPLE_FMT_S16, + AV_SAMPLE_FMT_S32, + AV_SAMPLE_FMT_FLT, + AV_SAMPLE_FMT_DBL, -1 }; + + ret = avfilter_graph_create_filter(&sink, abuffersink, + inout->name, NULL, + NULL, lavfi->graph); + if (ret >= 0) + ret = av_opt_set_int_list(sink, "sample_fmts", sample_fmts, AV_SAMPLE_FMT_NONE, AV_OPT_SEARCH_CHILDREN); + if (ret < 0) + goto end; + ret = av_opt_set_int(sink, "all_channel_counts", 1, + AV_OPT_SEARCH_CHILDREN); + if (ret < 0) + goto end; + } else { + av_log(avctx, AV_LOG_ERROR, + "Output '%s' is not a video or audio output, not yet supported\n", inout->name); + FAIL(AVERROR(EINVAL)); + } + + lavfi->sinks[i] = sink; + if ((ret = avfilter_link(inout->filter_ctx, inout->pad_idx, sink, 0)) < 0) + goto end; + } + + /* configure the graph */ + if ((ret = avfilter_graph_config(lavfi->graph, avctx)) < 0) + goto end; + + if (lavfi->dump_graph) { + char *dump = avfilter_graph_dump(lavfi->graph, lavfi->dump_graph); + fputs(dump, stderr); + fflush(stderr); + av_free(dump); + } + + /* fill each stream with the information in the corresponding sink */ + for (i = 0; i < avctx->nb_streams; i++) { + AVFilterLink *link = lavfi->sinks[lavfi->stream_sink_map[i]]->inputs[0]; + AVStream *st = avctx->streams[i]; + st->codec->codec_type = link->type; + avpriv_set_pts_info(st, 64, link->time_base.num, link->time_base.den); + if (link->type == AVMEDIA_TYPE_VIDEO) { + st->codec->codec_id = AV_CODEC_ID_RAWVIDEO; + st->codec->pix_fmt = link->format; + st->codec->time_base = link->time_base; + st->codec->width = link->w; + st->codec->height = link->h; + st ->sample_aspect_ratio = + st->codec->sample_aspect_ratio = link->sample_aspect_ratio; + avctx->probesize = FFMAX(avctx->probesize, + link->w * link->h * + av_get_padded_bits_per_pixel(av_pix_fmt_desc_get(link->format)) * + 30); + } else if (link->type == AVMEDIA_TYPE_AUDIO) { + st->codec->codec_id = av_get_pcm_codec(link->format, -1); + st->codec->channels = avfilter_link_get_channels(link); + st->codec->sample_fmt = link->format; + st->codec->sample_rate = link->sample_rate; + st->codec->time_base = link->time_base; + st->codec->channel_layout = link->channel_layout; + if (st->codec->codec_id == AV_CODEC_ID_NONE) + av_log(avctx, AV_LOG_ERROR, + "Could not find PCM codec for sample format %s.\n", + av_get_sample_fmt_name(link->format)); + } + } + + if (!(lavfi->decoded_frame = av_frame_alloc())) + FAIL(AVERROR(ENOMEM)); + +end: + av_free(pix_fmts); + avfilter_inout_free(&input_links); + avfilter_inout_free(&output_links); + if (ret < 0) + lavfi_read_close(avctx); + return ret; +} + +static int lavfi_read_packet(AVFormatContext *avctx, AVPacket *pkt) +{ + LavfiContext *lavfi = avctx->priv_data; + double min_pts = DBL_MAX; + int stream_idx, min_pts_sink_idx = 0; + AVFrame *frame = lavfi->decoded_frame; + AVPicture pict; + AVDictionary *frame_metadata; + int ret, i; + int size = 0; + + /* iterate through all the graph sinks. Select the sink with the + * minimum PTS */ + for (i = 0; i < avctx->nb_streams; i++) { + AVRational tb = lavfi->sinks[i]->inputs[0]->time_base; + double d; + int ret; + + if (lavfi->sink_eof[i]) + continue; + + ret = av_buffersink_get_frame_flags(lavfi->sinks[i], frame, + AV_BUFFERSINK_FLAG_PEEK); + if (ret == AVERROR_EOF) { + av_dlog(avctx, "EOF sink_idx:%d\n", i); + lavfi->sink_eof[i] = 1; + continue; + } else if (ret < 0) + return ret; + d = av_rescale_q(frame->pts, tb, AV_TIME_BASE_Q); + av_dlog(avctx, "sink_idx:%d time:%f\n", i, d); + av_frame_unref(frame); + + if (d < min_pts) { + min_pts = d; + min_pts_sink_idx = i; + } + } + if (min_pts == DBL_MAX) + return AVERROR_EOF; + + av_dlog(avctx, "min_pts_sink_idx:%i\n", min_pts_sink_idx); + + av_buffersink_get_frame_flags(lavfi->sinks[min_pts_sink_idx], frame, 0); + stream_idx = lavfi->sink_stream_map[min_pts_sink_idx]; + + if (frame->width /* FIXME best way of testing a video */) { + size = avpicture_get_size(frame->format, frame->width, frame->height); + if ((ret = av_new_packet(pkt, size)) < 0) + return ret; + + memcpy(pict.data, frame->data, 4*sizeof(frame->data[0])); + memcpy(pict.linesize, frame->linesize, 4*sizeof(frame->linesize[0])); + + avpicture_layout(&pict, frame->format, frame->width, frame->height, + pkt->data, size); + } else if (av_frame_get_channels(frame) /* FIXME test audio */) { + size = frame->nb_samples * av_get_bytes_per_sample(frame->format) * + av_frame_get_channels(frame); + if ((ret = av_new_packet(pkt, size)) < 0) + return ret; + memcpy(pkt->data, frame->data[0], size); + } + + frame_metadata = av_frame_get_metadata(frame); + if (frame_metadata) { + uint8_t *metadata; + AVDictionaryEntry *e = NULL; + AVBPrint meta_buf; + + av_bprint_init(&meta_buf, 0, AV_BPRINT_SIZE_UNLIMITED); + while ((e = av_dict_get(frame_metadata, "", e, AV_DICT_IGNORE_SUFFIX))) { + av_bprintf(&meta_buf, "%s", e->key); + av_bprint_chars(&meta_buf, '\0', 1); + av_bprintf(&meta_buf, "%s", e->value); + av_bprint_chars(&meta_buf, '\0', 1); + } + if (!av_bprint_is_complete(&meta_buf) || + !(metadata = av_packet_new_side_data(pkt, AV_PKT_DATA_STRINGS_METADATA, + meta_buf.len))) { + av_bprint_finalize(&meta_buf, NULL); + return AVERROR(ENOMEM); + } + memcpy(metadata, meta_buf.str, meta_buf.len); + av_bprint_finalize(&meta_buf, NULL); + } + + pkt->stream_index = stream_idx; + pkt->pts = frame->pts; + pkt->pos = av_frame_get_pkt_pos(frame); + pkt->size = size; + av_frame_unref(frame); + return size; +} + +#define OFFSET(x) offsetof(LavfiContext, x) + +#define DEC AV_OPT_FLAG_DECODING_PARAM + +static const AVOption options[] = { + { "graph", "set libavfilter graph", OFFSET(graph_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC }, + { "graph_file","set libavfilter graph filename", OFFSET(graph_filename), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC}, + { "dumpgraph", "dump graph to stderr", OFFSET(dump_graph), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC }, + { NULL }, +}; + +static const AVClass lavfi_class = { + .class_name = "lavfi indev", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_INPUT, +}; + +AVInputFormat ff_lavfi_demuxer = { + .name = "lavfi", + .long_name = NULL_IF_CONFIG_SMALL("Libavfilter virtual input device"), + .priv_data_size = sizeof(LavfiContext), + .read_header = lavfi_read_header, + .read_packet = lavfi_read_packet, + .read_close = lavfi_read_close, + .flags = AVFMT_NOFILE, + .priv_class = &lavfi_class, +}; diff --git a/libavdevice/libcdio.c b/libavdevice/libcdio.c index 06ddb4a..16a4b26 100644 --- a/libavdevice/libcdio.c +++ b/libavdevice/libcdio.c @@ -1,20 +1,20 @@ /* * Copyright (c) 2011 Anton Khirnov <anton@khirnov.net> * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -41,7 +41,7 @@ #include "libavformat/internal.h" typedef struct CDIOContext { - AVClass *class; + const AVClass *class; cdrom_drive_t *drive; cdrom_paranoia_t *paranoia; int32_t last_sector; @@ -177,6 +177,7 @@ static const AVClass libcdio_class = { .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT, }; AVInputFormat ff_libcdio_demuxer = { diff --git a/libavdevice/libdc1394.c b/libavdevice/libdc1394.c index f030e3e..e9bd4c9 100644 --- a/libavdevice/libdc1394.c +++ b/libavdevice/libdc1394.c @@ -3,20 +3,20 @@ * Copyright (c) 2004 Roman Shaposhnik * Copyright (c) 2008 Alessandro Sappia * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -112,6 +112,7 @@ static const AVClass libdc1394_class = { .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, }; diff --git a/libavdevice/openal-dec.c b/libavdevice/openal-dec.c new file mode 100644 index 0000000..4c4ba28 --- /dev/null +++ b/libavdevice/openal-dec.c @@ -0,0 +1,253 @@ +/* + * Copyright (c) 2011 Jonathan Baldwin + * + * This file is part of FFmpeg. + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH + * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + * AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, + * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR + * OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THIS SOFTWARE. + */ + +/** + * @file + * OpenAL 1.1 capture device for libavdevice + **/ + +#include <AL/al.h> +#include <AL/alc.h> + +#include "libavutil/opt.h" +#include "libavutil/time.h" +#include "libavformat/internal.h" +#include "avdevice.h" + +typedef struct { + AVClass *class; + /** OpenAL capture device context. **/ + ALCdevice *device; + /** The number of channels in the captured audio. **/ + int channels; + /** The sample rate (in Hz) of the captured audio. **/ + int sample_rate; + /** The sample size (in bits) of the captured audio. **/ + int sample_size; + /** The OpenAL sample format of the captured audio. **/ + ALCenum sample_format; + /** The number of bytes between two consecutive samples of the same channel/component. **/ + ALCint sample_step; + /** If true, print a list of capture devices on this system and exit. **/ + int list_devices; +} al_data; + +typedef struct { + ALCenum al_fmt; + enum AVCodecID codec_id; + int channels; +} al_format_info; + +#define LOWEST_AL_FORMAT FFMIN(FFMIN(AL_FORMAT_MONO8,AL_FORMAT_MONO16),FFMIN(AL_FORMAT_STEREO8,AL_FORMAT_STEREO16)) + +/** + * Get information about an AL_FORMAT value. + * @param al_fmt the AL_FORMAT value to find information about. + * @return A pointer to a structure containing information about the AL_FORMAT value. + */ +static inline al_format_info* get_al_format_info(ALCenum al_fmt) +{ + static al_format_info info_table[] = { + [AL_FORMAT_MONO8-LOWEST_AL_FORMAT] = {AL_FORMAT_MONO8, AV_CODEC_ID_PCM_U8, 1}, + [AL_FORMAT_MONO16-LOWEST_AL_FORMAT] = {AL_FORMAT_MONO16, AV_NE (AV_CODEC_ID_PCM_S16BE, AV_CODEC_ID_PCM_S16LE), 1}, + [AL_FORMAT_STEREO8-LOWEST_AL_FORMAT] = {AL_FORMAT_STEREO8, AV_CODEC_ID_PCM_U8, 2}, + [AL_FORMAT_STEREO16-LOWEST_AL_FORMAT] = {AL_FORMAT_STEREO16, AV_NE (AV_CODEC_ID_PCM_S16BE, AV_CODEC_ID_PCM_S16LE), 2}, + }; + + return &info_table[al_fmt-LOWEST_AL_FORMAT]; +} + +/** + * Get the OpenAL error code, translated into an av/errno error code. + * @param device The ALC device to check for errors. + * @param error_msg_ret A pointer to a char* in which to return the error message, or NULL if desired. + * @return The error code, or 0 if there is no error. + */ +static inline int al_get_error(ALCdevice *device, const char** error_msg_ret) +{ + ALCenum error = alcGetError(device); + if (error_msg_ret) + *error_msg_ret = (const char*) alcGetString(device, error); + switch (error) { + case ALC_NO_ERROR: + return 0; + case ALC_INVALID_DEVICE: + return AVERROR(ENODEV); + break; + case ALC_INVALID_CONTEXT: + case ALC_INVALID_ENUM: + case ALC_INVALID_VALUE: + return AVERROR(EINVAL); + break; + case ALC_OUT_OF_MEMORY: + return AVERROR(ENOMEM); + break; + default: + return AVERROR(EIO); + } +} + +/** + * Print out a list of OpenAL capture devices on this system. + */ +static inline void print_al_capture_devices(void *log_ctx) +{ + const char *devices; + + if (!(devices = alcGetString(NULL, ALC_CAPTURE_DEVICE_SPECIFIER))) + return; + + av_log(log_ctx, AV_LOG_INFO, "List of OpenAL capture devices on this system:\n"); + + for (; *devices != '\0'; devices += strlen(devices) + 1) + av_log(log_ctx, AV_LOG_INFO, " %s\n", devices); +} + +static int read_header(AVFormatContext *ctx) +{ + al_data *ad = ctx->priv_data; + static const ALCenum sample_formats[2][2] = { + { AL_FORMAT_MONO8, AL_FORMAT_STEREO8 }, + { AL_FORMAT_MONO16, AL_FORMAT_STEREO16 } + }; + int error = 0; + const char *error_msg; + AVStream *st = NULL; + AVCodecContext *codec = NULL; + + if (ad->list_devices) { + print_al_capture_devices(ctx); + return AVERROR_EXIT; + } + + ad->sample_format = sample_formats[ad->sample_size/8-1][ad->channels-1]; + + /* Open device for capture */ + ad->device = + alcCaptureOpenDevice(ctx->filename[0] ? ctx->filename : NULL, + ad->sample_rate, + ad->sample_format, + ad->sample_rate); /* Maximum 1 second of sample data to be read at once */ + + if (error = al_get_error(ad->device, &error_msg)) goto fail; + + /* Create stream */ + if (!(st = avformat_new_stream(ctx, NULL))) { + error = AVERROR(ENOMEM); + goto fail; + } + + /* We work in microseconds */ + avpriv_set_pts_info(st, 64, 1, 1000000); + + /* Set codec parameters */ + codec = st->codec; + codec->codec_type = AVMEDIA_TYPE_AUDIO; + codec->sample_rate = ad->sample_rate; + codec->channels = get_al_format_info(ad->sample_format)->channels; + codec->codec_id = get_al_format_info(ad->sample_format)->codec_id; + + /* This is needed to read the audio data */ + ad->sample_step = (av_get_bits_per_sample(get_al_format_info(ad->sample_format)->codec_id) * + get_al_format_info(ad->sample_format)->channels) / 8; + + /* Finally, start the capture process */ + alcCaptureStart(ad->device); + + return 0; + +fail: + /* Handle failure */ + if (ad->device) + alcCaptureCloseDevice(ad->device); + if (error_msg) + av_log(ctx, AV_LOG_ERROR, "Cannot open device: %s\n", error_msg); + return error; +} + +static int read_packet(AVFormatContext* ctx, AVPacket *pkt) +{ + al_data *ad = ctx->priv_data; + int error=0; + const char *error_msg; + ALCint nb_samples; + + /* Get number of samples available */ + alcGetIntegerv(ad->device, ALC_CAPTURE_SAMPLES, (ALCsizei) sizeof(ALCint), &nb_samples); + if (error = al_get_error(ad->device, &error_msg)) goto fail; + + /* Create a packet of appropriate size */ + av_new_packet(pkt, nb_samples*ad->sample_step); + pkt->pts = av_gettime(); + + /* Fill the packet with the available samples */ + alcCaptureSamples(ad->device, pkt->data, nb_samples); + if (error = al_get_error(ad->device, &error_msg)) goto fail; + + return pkt->size; +fail: + /* Handle failure */ + if (pkt->data) + av_destruct_packet(pkt); + if (error_msg) + av_log(ctx, AV_LOG_ERROR, "Error: %s\n", error_msg); + return error; +} + +static int read_close(AVFormatContext* ctx) +{ + al_data *ad = ctx->priv_data; + + if (ad->device) { + alcCaptureStop(ad->device); + alcCaptureCloseDevice(ad->device); + } + return 0; +} + +#define OFFSET(x) offsetof(al_data, x) + +static const AVOption options[] = { + {"channels", "set number of channels", OFFSET(channels), AV_OPT_TYPE_INT, {.i64=2}, 1, 2, AV_OPT_FLAG_DECODING_PARAM }, + {"sample_rate", "set sample rate", OFFSET(sample_rate), AV_OPT_TYPE_INT, {.i64=44100}, 1, 192000, AV_OPT_FLAG_DECODING_PARAM }, + {"sample_size", "set sample size", OFFSET(sample_size), AV_OPT_TYPE_INT, {.i64=16}, 8, 16, AV_OPT_FLAG_DECODING_PARAM }, + {"list_devices", "list available devices", OFFSET(list_devices), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM, "list_devices" }, + {"true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" }, + {"false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" }, + {NULL}, +}; + +static const AVClass class = { + .class_name = "openal", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT, +}; + +AVInputFormat ff_openal_demuxer = { + .name = "openal", + .long_name = NULL_IF_CONFIG_SMALL("OpenAL audio capture device"), + .priv_data_size = sizeof(al_data), + .read_probe = NULL, + .read_header = read_header, + .read_packet = read_packet, + .read_close = read_close, + .flags = AVFMT_NOFILE, + .priv_class = &class +}; diff --git a/libavdevice/opengl_enc.c b/libavdevice/opengl_enc.c new file mode 100644 index 0000000..5f5b800 --- /dev/null +++ b/libavdevice/opengl_enc.c @@ -0,0 +1,1306 @@ +/* + * Copyright (c) 2014 Lukasz Marek + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +//TODO: support for more formats +//TODO: support for more systems. +//TODO: implement X11, Windows, Mac OS native default window. SDL 1.2 doesn't allow to render to custom thread. + +#include <stdio.h> +#include <stdlib.h> +#include <string.h> +#include <unistd.h> +#include <stddef.h> + +#include "config.h" + +#if HAVE_WINDOWS_H +#include <windows.h> +#endif +#if HAVE_OPENGL_GL3_H +#include <OpenGL/gl3.h> +#elif HAVE_ES2_GL_H +#include <ES2/gl.h> +#else +#include <GL/gl.h> +#include <GL/glext.h> +#endif +#if HAVE_GLXGETPROCADDRESS +#include <GL/glx.h> +#endif + +#if HAVE_SDL +#include <SDL.h> +#endif + +#include "libavutil/common.h" +#include "libavutil/pixdesc.h" +#include "libavutil/log.h" +#include "libavutil/opt.h" +#include "libavutil/avassert.h" +#include "libavutil/avstring.h" +#include "libavformat/avformat.h" +#include "libavformat/internal.h" +#include "libavdevice/avdevice.h" +#include "opengl_enc_shaders.h" + +#ifndef APIENTRY +#define APIENTRY +#endif + +/* FF_GL_RED_COMPONENT is used for plannar pixel types. + * Only red component is sampled in shaders. + * On some platforms GL_RED is not available and GL_LUMINANCE have to be used, + * but since OpenGL 3.0 GL_LUMINANCE is deprecated. + * GL_RED produces RGBA = value, 0, 0, 1. + * GL_LUMINANCE produces RGBA = value, value, value, 1. + * Note: GL_INTENSITY may also be used which produce RGBA = value, value, value, value. */ +#if defined(GL_RED) +#define FF_GL_RED_COMPONENT GL_RED +#elif defined(GL_LUMINANCE) +#define FF_GL_RED_COMPONENT GL_LUMINANCE +#else +#define FF_GL_RED_COMPONENT 0x1903; //GL_RED +#endif + +/* Constants not defined for iOS */ +#define FF_GL_UNSIGNED_BYTE_3_3_2 0x8032 +#define FF_GL_UNSIGNED_BYTE_2_3_3_REV 0x8362 +#define FF_GL_UNSIGNED_SHORT_1_5_5_5_REV 0x8366 +#define FF_GL_UNPACK_ROW_LENGTH 0x0CF2 + +/* MinGW exposes only OpenGL 1.1 API */ +#define FF_GL_ARRAY_BUFFER 0x8892 +#define FF_GL_ELEMENT_ARRAY_BUFFER 0x8893 +#define FF_GL_STATIC_DRAW 0x88E4 +#define FF_GL_FRAGMENT_SHADER 0x8B30 +#define FF_GL_VERTEX_SHADER 0x8B31 +#define FF_GL_COMPILE_STATUS 0x8B81 +#define FF_GL_LINK_STATUS 0x8B82 +#define FF_GL_INFO_LOG_LENGTH 0x8B84 +typedef void (APIENTRY *FF_PFNGLACTIVETEXTUREPROC) (GLenum texture); +typedef void (APIENTRY *FF_PFNGLGENBUFFERSPROC) (GLsizei n, GLuint *buffers); +typedef void (APIENTRY *FF_PFNGLDELETEBUFFERSPROC) (GLsizei n, const GLuint *buffers); +typedef void (APIENTRY *FF_PFNGLBUFFERDATAPROC) (GLenum target, ptrdiff_t size, const GLvoid *data, GLenum usage); +typedef void (APIENTRY *FF_PFNGLBINDBUFFERPROC) (GLenum target, GLuint buffer); +typedef GLint (APIENTRY *FF_PFNGLGETATTRIBLOCATIONPROC) (GLuint program, const char *name); +typedef void (APIENTRY *FF_PFNGLENABLEVERTEXATTRIBARRAYPROC) (GLuint index); +typedef void (APIENTRY *FF_PFNGLVERTEXATTRIBPOINTERPROC) (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, uintptr_t pointer); +typedef GLint (APIENTRY *FF_PFNGLGETUNIFORMLOCATIONPROC) (GLuint program, const char *name); +typedef void (APIENTRY *FF_PFNGLUNIFORM1FPROC) (GLint location, GLfloat v0); +typedef void (APIENTRY *FF_PFNGLUNIFORM1IPROC) (GLint location, GLint v0); +typedef void (APIENTRY *FF_PFNGLUNIFORMMATRIX4FVPROC) (GLint location, GLsizei count, GLboolean transpose, const GLfloat *value); +typedef GLuint (APIENTRY *FF_PFNGLCREATEPROGRAMPROC) (void); +typedef void (APIENTRY *FF_PFNGLDELETEPROGRAMPROC) (GLuint program); +typedef void (APIENTRY *FF_PFNGLUSEPROGRAMPROC) (GLuint program); +typedef void (APIENTRY *FF_PFNGLLINKPROGRAMPROC) (GLuint program); +typedef void (APIENTRY *FF_PFNGLGETPROGRAMIVPROC) (GLuint program, GLenum pname, GLint *params); +typedef void (APIENTRY *FF_PFNGLGETPROGRAMINFOLOGPROC) (GLuint program, GLsizei bufSize, GLsizei *length, char *infoLog); +typedef void (APIENTRY *FF_PFNGLATTACHSHADERPROC) (GLuint program, GLuint shader); +typedef GLuint (APIENTRY *FF_PFNGLCREATESHADERPROC) (GLenum type); +typedef void (APIENTRY *FF_PFNGLDELETESHADERPROC) (GLuint shader); +typedef void (APIENTRY *FF_PFNGLCOMPILESHADERPROC) (GLuint shader); +typedef void (APIENTRY *FF_PFNGLSHADERSOURCEPROC) (GLuint shader, GLsizei count, const char* *string, const GLint *length); +typedef void (APIENTRY *FF_PFNGLGETSHADERIVPROC) (GLuint shader, GLenum pname, GLint *params); +typedef void (APIENTRY *FF_PFNGLGETSHADERINFOLOGPROC) (GLuint shader, GLsizei bufSize, GLsizei *length, char *infoLog); + +typedef struct FFOpenGLFunctions { + FF_PFNGLACTIVETEXTUREPROC glActiveTexture; //Require GL ARB multitexture + FF_PFNGLGENBUFFERSPROC glGenBuffers; //Require GL_ARB_vertex_buffer_object + FF_PFNGLDELETEBUFFERSPROC glDeleteBuffers; //Require GL_ARB_vertex_buffer_object + FF_PFNGLBUFFERDATAPROC glBufferData; //Require GL_ARB_vertex_buffer_object + FF_PFNGLBINDBUFFERPROC glBindBuffer; //Require GL_ARB_vertex_buffer_object + FF_PFNGLGETATTRIBLOCATIONPROC glGetAttribLocation; //Require GL_ARB_vertex_shader + FF_PFNGLENABLEVERTEXATTRIBARRAYPROC glEnableVertexAttribArray; //Require GL_ARB_vertex_shader + FF_PFNGLVERTEXATTRIBPOINTERPROC glVertexAttribPointer; //Require GL_ARB_vertex_shader + FF_PFNGLGETUNIFORMLOCATIONPROC glGetUniformLocation; //Require GL_ARB_shader_objects + FF_PFNGLUNIFORM1FPROC glUniform1f; //Require GL_ARB_shader_objects + FF_PFNGLUNIFORM1IPROC glUniform1i; //Require GL_ARB_shader_objects + FF_PFNGLUNIFORMMATRIX4FVPROC glUniformMatrix4fv; //Require GL_ARB_shader_objects + FF_PFNGLCREATEPROGRAMPROC glCreateProgram; //Require GL_ARB_shader_objects + FF_PFNGLDELETEPROGRAMPROC glDeleteProgram; //Require GL_ARB_shader_objects + FF_PFNGLUSEPROGRAMPROC glUseProgram; //Require GL_ARB_shader_objects + FF_PFNGLLINKPROGRAMPROC glLinkProgram; //Require GL_ARB_shader_objects + FF_PFNGLGETPROGRAMIVPROC glGetProgramiv; //Require GL_ARB_shader_objects + FF_PFNGLGETPROGRAMINFOLOGPROC glGetProgramInfoLog; //Require GL_ARB_shader_objects + FF_PFNGLATTACHSHADERPROC glAttachShader; //Require GL_ARB_shader_objects + FF_PFNGLCREATESHADERPROC glCreateShader; //Require GL_ARB_shader_objects + FF_PFNGLDELETESHADERPROC glDeleteShader; //Require GL_ARB_shader_objects + FF_PFNGLCOMPILESHADERPROC glCompileShader; //Require GL_ARB_shader_objects + FF_PFNGLSHADERSOURCEPROC glShaderSource; //Require GL_ARB_shader_objects + FF_PFNGLGETSHADERIVPROC glGetShaderiv; //Require GL_ARB_shader_objects + FF_PFNGLGETSHADERINFOLOGPROC glGetShaderInfoLog; //Require GL_ARB_shader_objects +} FFOpenGLFunctions; + +#define OPENGL_ERROR_CHECK(ctx) \ +{\ + GLenum err_code; \ + if ((err_code = glGetError()) != GL_NO_ERROR) { \ + av_log(ctx, AV_LOG_ERROR, "OpenGL error occurred in '%s', line %d: %d\n", __FUNCTION__, __LINE__, err_code); \ + goto fail; \ + } \ +}\ + +typedef struct OpenGLVertexInfo +{ + float x, y, z; ///<Position + float s0, t0; ///<Texture coords +} OpenGLVertexInfo; + +/* defines 2 triangles to display */ +static const GLushort g_index[6] = +{ + 0, 1, 2, + 0, 3, 2, +}; + +typedef struct OpenGLContext { + AVClass *class; ///< class for private options + +#if HAVE_SDL + SDL_Surface *surface; +#endif + FFOpenGLFunctions glprocs; + + int inited; ///< Set to 1 when write_header was successfully called. + uint8_t background[4]; ///< Background color + int no_window; ///< 0 for create default window + char *window_title; ///< Title of the window + + /* OpenGL implementation limits */ + GLint max_texture_size; ///< Maximum texture size + GLint max_viewport_width; ///< Maximum viewport size + GLint max_viewport_height; ///< Maximum viewport size + int non_pow_2_textures; ///< 1 when non power of 2 textures are supported + int unpack_subimage; ///< 1 when GL_EXT_unpack_subimage is available + + /* Current OpenGL configuration */ + GLuint program; ///< Shader program + GLuint vertex_shader; ///< Vertex shader + GLuint fragment_shader; ///< Fragment shader for current pix_pmt + GLuint texture_name[4]; ///< Textures' IDs + GLuint index_buffer; ///< Index buffer + GLuint vertex_buffer; ///< Vertex buffer + OpenGLVertexInfo vertex[4]; ///< VBO + GLint projection_matrix_location; ///< Uniforms' locations + GLint model_view_matrix_location; + GLint color_map_location; + GLint chroma_div_w_location; + GLint chroma_div_h_location; + GLint texture_location[4]; + GLint position_attrib; ///< Attibutes' locations + GLint texture_coords_attrib; + + GLfloat projection_matrix[16]; ///< Projection matrix + GLfloat model_view_matrix[16]; ///< Modev view matrix + GLfloat color_map[16]; ///< RGBA color map matrix + GLfloat chroma_div_w; ///< Chroma subsampling w ratio + GLfloat chroma_div_h; ///< Chroma subsampling h ratio + + /* Stream information */ + GLenum format; + GLenum type; + int width; ///< Stream width + int height; ///< Stream height + enum AVPixelFormat pix_fmt; ///< Stream pixel format + int picture_width; ///< Rendered width + int picture_height; ///< Rendered height + int window_width; + int window_height; +} OpenGLContext; + +static const struct OpenGLFormatDesc { + enum AVPixelFormat fixel_format; + const char * const * fragment_shader; + GLenum format; + GLenum type; +} opengl_format_desc[] = { + { AV_PIX_FMT_YUV420P, &FF_OPENGL_FRAGMENT_SHADER_YUV_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_YUV444P, &FF_OPENGL_FRAGMENT_SHADER_YUV_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_YUV422P, &FF_OPENGL_FRAGMENT_SHADER_YUV_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_YUV410P, &FF_OPENGL_FRAGMENT_SHADER_YUV_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_YUV411P, &FF_OPENGL_FRAGMENT_SHADER_YUV_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_YUV440P, &FF_OPENGL_FRAGMENT_SHADER_YUV_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_YUV420P16, &FF_OPENGL_FRAGMENT_SHADER_YUV_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_SHORT }, + { AV_PIX_FMT_YUV422P16, &FF_OPENGL_FRAGMENT_SHADER_YUV_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_SHORT }, + { AV_PIX_FMT_YUV444P16, &FF_OPENGL_FRAGMENT_SHADER_YUV_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_SHORT }, + { AV_PIX_FMT_YUVA420P, &FF_OPENGL_FRAGMENT_SHADER_YUVA_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_YUVA444P, &FF_OPENGL_FRAGMENT_SHADER_YUVA_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_YUVA422P, &FF_OPENGL_FRAGMENT_SHADER_YUVA_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_YUVA420P16, &FF_OPENGL_FRAGMENT_SHADER_YUVA_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_SHORT }, + { AV_PIX_FMT_YUVA422P16, &FF_OPENGL_FRAGMENT_SHADER_YUVA_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_SHORT }, + { AV_PIX_FMT_YUVA444P16, &FF_OPENGL_FRAGMENT_SHADER_YUVA_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_SHORT }, + { AV_PIX_FMT_RGB24, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGB, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_BGR24, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGB, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_0RGB, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGBA, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_RGB0, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGBA, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_0BGR, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGBA, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_BGR0, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGBA, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_RGB565, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGB, GL_UNSIGNED_SHORT_5_6_5 }, + { AV_PIX_FMT_BGR565, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGB, GL_UNSIGNED_SHORT_5_6_5 }, + { AV_PIX_FMT_RGB555, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGBA, FF_GL_UNSIGNED_SHORT_1_5_5_5_REV }, + { AV_PIX_FMT_BGR555, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGBA, FF_GL_UNSIGNED_SHORT_1_5_5_5_REV }, + { AV_PIX_FMT_RGB8, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGB, FF_GL_UNSIGNED_BYTE_3_3_2 }, + { AV_PIX_FMT_BGR8, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGB, FF_GL_UNSIGNED_BYTE_2_3_3_REV }, + { AV_PIX_FMT_RGB48, &FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET, GL_RGB, GL_UNSIGNED_SHORT }, + { AV_PIX_FMT_ARGB, &FF_OPENGL_FRAGMENT_SHADER_RGBA_PACKET, GL_RGBA, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_RGBA, &FF_OPENGL_FRAGMENT_SHADER_RGBA_PACKET, GL_RGBA, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_ABGR, &FF_OPENGL_FRAGMENT_SHADER_RGBA_PACKET, GL_RGBA, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_BGRA, &FF_OPENGL_FRAGMENT_SHADER_RGBA_PACKET, GL_RGBA, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_RGBA64, &FF_OPENGL_FRAGMENT_SHADER_RGBA_PACKET, GL_RGBA, GL_UNSIGNED_SHORT }, + { AV_PIX_FMT_BGRA64, &FF_OPENGL_FRAGMENT_SHADER_RGBA_PACKET, GL_RGBA, GL_UNSIGNED_SHORT }, + { AV_PIX_FMT_GBRP, &FF_OPENGL_FRAGMENT_SHADER_RGB_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_GBRP16, &FF_OPENGL_FRAGMENT_SHADER_RGB_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_SHORT }, + { AV_PIX_FMT_GBRAP, &FF_OPENGL_FRAGMENT_SHADER_RGBA_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_GBRAP16, &FF_OPENGL_FRAGMENT_SHADER_RGBA_PLANAR, FF_GL_RED_COMPONENT, GL_UNSIGNED_SHORT }, + { AV_PIX_FMT_GRAY8, &FF_OPENGL_FRAGMENT_SHADER_GRAY, FF_GL_RED_COMPONENT, GL_UNSIGNED_BYTE }, + { AV_PIX_FMT_GRAY16, &FF_OPENGL_FRAGMENT_SHADER_GRAY, FF_GL_RED_COMPONENT, GL_UNSIGNED_SHORT }, + { AV_PIX_FMT_NONE, NULL } +}; + +static av_cold int opengl_prepare_vertex(AVFormatContext *s); +static int opengl_draw(AVFormatContext *h, void *intput, int repaint, int is_pkt); +static av_cold int opengl_init_context(OpenGLContext *opengl); + +static av_cold void opengl_deinit_context(OpenGLContext *opengl) +{ + glDeleteTextures(4, opengl->texture_name); + opengl->texture_name[0] = opengl->texture_name[1] = + opengl->texture_name[2] = opengl->texture_name[3] = 0; + if (opengl->glprocs.glUseProgram) + opengl->glprocs.glUseProgram(0); + if (opengl->glprocs.glDeleteProgram) { + opengl->glprocs.glDeleteProgram(opengl->program); + opengl->program = 0; + } + if (opengl->glprocs.glDeleteShader) { + opengl->glprocs.glDeleteShader(opengl->vertex_shader); + opengl->glprocs.glDeleteShader(opengl->fragment_shader); + opengl->vertex_shader = opengl->fragment_shader = 0; + } + if (opengl->glprocs.glBindBuffer) { + opengl->glprocs.glBindBuffer(FF_GL_ARRAY_BUFFER, 0); + opengl->glprocs.glBindBuffer(FF_GL_ELEMENT_ARRAY_BUFFER, 0); + } + if (opengl->glprocs.glDeleteBuffers) { + opengl->glprocs.glDeleteBuffers(2, &opengl->index_buffer); + opengl->vertex_buffer = opengl->index_buffer = 0; + } +} + +static int opengl_resize(AVFormatContext *h, int width, int height) +{ + int ret = 0; + OpenGLContext *opengl = h->priv_data; + opengl->window_width = width; + opengl->window_height = height; + if (opengl->inited) { + if (opengl->no_window && + (ret = avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_PREPARE_WINDOW_BUFFER, NULL , 0)) < 0) { + av_log(opengl, AV_LOG_ERROR, "Application failed to prepare window buffer.\n"); + goto end; + } + if ((ret = opengl_prepare_vertex(h)) < 0) + goto end; + ret = opengl_draw(h, NULL, 1, 0); + } + end: + return ret; +} + +static int opengl_control_message(AVFormatContext *h, int type, void *data, size_t data_size) +{ + OpenGLContext *opengl = h->priv_data; + switch(type) { + case AV_APP_TO_DEV_WINDOW_SIZE: + if (data) { + AVDeviceRect *message = data; + return opengl_resize(h, message->width, message->height); + } + return AVERROR(EINVAL); + case AV_APP_TO_DEV_WINDOW_REPAINT: + return opengl_resize(h, opengl->window_width, opengl->window_height); + } + return AVERROR(ENOSYS); +} + +#if HAVE_SDL +static int opengl_sdl_recreate_window(OpenGLContext *opengl, int width, int height) +{ + opengl->surface = SDL_SetVideoMode(width, height, + 32, SDL_OPENGL | SDL_RESIZABLE); + if (!opengl->surface) { + av_log(opengl, AV_LOG_ERROR, "Unable to set video mode: %s\n", SDL_GetError()); + return AVERROR_EXTERNAL; + } + SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8); + SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8); + SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8); + SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 8); + SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1); + return 0; +} + +static int opengl_sdl_process_events(AVFormatContext *h) +{ + int ret; + OpenGLContext *opengl = h->priv_data; + SDL_Event event; + SDL_PumpEvents(); + while (SDL_PeepEvents(&event, 1, SDL_GETEVENT, SDL_ALLEVENTS) > 0) { + switch (event.type) { + case SDL_QUIT: + return AVERROR(EIO); + case SDL_KEYDOWN: + switch (event.key.keysym.sym) { + case SDLK_ESCAPE: + case SDLK_q: + return AVERROR(EIO); + } + return 0; + case SDL_VIDEORESIZE: { + char buffer[100]; + int reinit; + AVDeviceRect message; + /* clean up old context because SDL_SetVideoMode may lose its state. */ + SDL_VideoDriverName(buffer, sizeof(buffer)); + reinit = !av_strncasecmp(buffer, "quartz", sizeof(buffer)); + if (reinit) { + opengl_deinit_context(opengl); + } + if ((ret = opengl_sdl_recreate_window(opengl, event.resize.w, event.resize.h)) < 0) + return ret; + if (reinit && (ret = opengl_init_context(opengl)) < 0) + return ret; + message.width = opengl->surface->w; + message.height = opengl->surface->h; + return opengl_control_message(h, AV_APP_TO_DEV_WINDOW_SIZE, &message, sizeof(AVDeviceRect)); + } + } + } + return 0; +} + +static int av_cold opengl_sdl_create_window(AVFormatContext *h) +{ + int ret; + char buffer[100]; + OpenGLContext *opengl = h->priv_data; + AVDeviceRect message; + if (SDL_Init(SDL_INIT_VIDEO)) { + av_log(opengl, AV_LOG_ERROR, "Unable to initialize SDL: %s\n", SDL_GetError()); + return AVERROR_EXTERNAL; + } + if ((ret = opengl_sdl_recreate_window(opengl, opengl->window_width, + opengl->window_height)) < 0) + return ret; + av_log(opengl, AV_LOG_INFO, "SDL driver: '%s'.\n", SDL_VideoDriverName(buffer, sizeof(buffer))); + message.width = opengl->surface->w; + message.height = opengl->surface->h; + SDL_WM_SetCaption(opengl->window_title, NULL); + opengl_control_message(h, AV_APP_TO_DEV_WINDOW_SIZE, &message, sizeof(AVDeviceRect)); + return 0; +} + +static int av_cold opengl_sdl_load_procedures(OpenGLContext *opengl) +{ + FFOpenGLFunctions *procs = &opengl->glprocs; + +#define LOAD_OPENGL_FUN(name, type) \ + procs->name = (type)SDL_GL_GetProcAddress(#name); \ + if (!procs->name) { \ + av_log(opengl, AV_LOG_ERROR, "Cannot load OpenGL function: '%s'\n", #name); \ + return AVERROR(ENOSYS); \ + } + + LOAD_OPENGL_FUN(glActiveTexture, FF_PFNGLACTIVETEXTUREPROC) + LOAD_OPENGL_FUN(glGenBuffers, FF_PFNGLGENBUFFERSPROC) + LOAD_OPENGL_FUN(glDeleteBuffers, FF_PFNGLDELETEBUFFERSPROC) + LOAD_OPENGL_FUN(glBufferData, FF_PFNGLBUFFERDATAPROC) + LOAD_OPENGL_FUN(glBindBuffer, FF_PFNGLBINDBUFFERPROC) + LOAD_OPENGL_FUN(glGetAttribLocation, FF_PFNGLGETATTRIBLOCATIONPROC) + LOAD_OPENGL_FUN(glGetUniformLocation, FF_PFNGLGETUNIFORMLOCATIONPROC) + LOAD_OPENGL_FUN(glUniform1f, FF_PFNGLUNIFORM1FPROC) + LOAD_OPENGL_FUN(glUniform1i, FF_PFNGLUNIFORM1IPROC) + LOAD_OPENGL_FUN(glUniformMatrix4fv, FF_PFNGLUNIFORMMATRIX4FVPROC) + LOAD_OPENGL_FUN(glCreateProgram, FF_PFNGLCREATEPROGRAMPROC) + LOAD_OPENGL_FUN(glDeleteProgram, FF_PFNGLDELETEPROGRAMPROC) + LOAD_OPENGL_FUN(glUseProgram, FF_PFNGLUSEPROGRAMPROC) + LOAD_OPENGL_FUN(glLinkProgram, FF_PFNGLLINKPROGRAMPROC) + LOAD_OPENGL_FUN(glGetProgramiv, FF_PFNGLGETPROGRAMIVPROC) + LOAD_OPENGL_FUN(glGetProgramInfoLog, FF_PFNGLGETPROGRAMINFOLOGPROC) + LOAD_OPENGL_FUN(glAttachShader, FF_PFNGLATTACHSHADERPROC) + LOAD_OPENGL_FUN(glCreateShader, FF_PFNGLCREATESHADERPROC) + LOAD_OPENGL_FUN(glDeleteShader, FF_PFNGLDELETESHADERPROC) + LOAD_OPENGL_FUN(glCompileShader, FF_PFNGLCOMPILESHADERPROC) + LOAD_OPENGL_FUN(glShaderSource, FF_PFNGLSHADERSOURCEPROC) + LOAD_OPENGL_FUN(glGetShaderiv, FF_PFNGLGETSHADERIVPROC) + LOAD_OPENGL_FUN(glGetShaderInfoLog, FF_PFNGLGETSHADERINFOLOGPROC) + LOAD_OPENGL_FUN(glEnableVertexAttribArray, FF_PFNGLENABLEVERTEXATTRIBARRAYPROC) + LOAD_OPENGL_FUN(glVertexAttribPointer, FF_PFNGLVERTEXATTRIBPOINTERPROC) + + return 0; + +#undef LOAD_OPENGL_FUN +} +#endif /* HAVE_SDL */ + +#if defined(__APPLE__) +static int av_cold opengl_load_procedures(OpenGLContext *opengl) +{ + FFOpenGLFunctions *procs = &opengl->glprocs; + +#if HAVE_SDL + if (!opengl->no_window) + return opengl_sdl_load_procedures(opengl); +#endif + + procs->glActiveTexture = glActiveTexture; + procs->glGenBuffers = glGenBuffers; + procs->glDeleteBuffers = glDeleteBuffers; + procs->glBufferData = glBufferData; + procs->glBindBuffer = glBindBuffer; + procs->glGetAttribLocation = glGetAttribLocation; + procs->glGetUniformLocation = glGetUniformLocation; + procs->glUniform1f = glUniform1f; + procs->glUniform1i = glUniform1i; + procs->glUniformMatrix4fv = glUniformMatrix4fv; + procs->glCreateProgram = glCreateProgram; + procs->glDeleteProgram = glDeleteProgram; + procs->glUseProgram = glUseProgram; + procs->glLinkProgram = glLinkProgram; + procs->glGetProgramiv = glGetProgramiv; + procs->glGetProgramInfoLog = glGetProgramInfoLog; + procs->glAttachShader = glAttachShader; + procs->glCreateShader = glCreateShader; + procs->glDeleteShader = glDeleteShader; + procs->glCompileShader = glCompileShader; + procs->glShaderSource = glShaderSource; + procs->glGetShaderiv = glGetShaderiv; + procs->glGetShaderInfoLog = glGetShaderInfoLog; + procs->glEnableVertexAttribArray = glEnableVertexAttribArray; + procs->glVertexAttribPointer = (FF_PFNGLVERTEXATTRIBPOINTERPROC) glVertexAttribPointer; + return 0; +} +#else +static int av_cold opengl_load_procedures(OpenGLContext *opengl) +{ + FFOpenGLFunctions *procs = &opengl->glprocs; + +#if HAVE_GLXGETPROCADDRESS +#define SelectedGetProcAddress glXGetProcAddress +#elif HAVE_WGLGETPROCADDRESS +#define SelectedGetProcAddress wglGetProcAddress +#endif + +#define LOAD_OPENGL_FUN(name, type) \ + procs->name = (type)SelectedGetProcAddress(#name); \ + if (!procs->name) { \ + av_log(opengl, AV_LOG_ERROR, "Cannot load OpenGL function: '%s'\n", #name); \ + return AVERROR(ENOSYS); \ + } + +#if HAVE_SDL + if (!opengl->no_window) + return opengl_sdl_load_procedures(opengl); +#endif + + LOAD_OPENGL_FUN(glActiveTexture, FF_PFNGLACTIVETEXTUREPROC) + LOAD_OPENGL_FUN(glGenBuffers, FF_PFNGLGENBUFFERSPROC) + LOAD_OPENGL_FUN(glDeleteBuffers, FF_PFNGLDELETEBUFFERSPROC) + LOAD_OPENGL_FUN(glBufferData, FF_PFNGLBUFFERDATAPROC) + LOAD_OPENGL_FUN(glBindBuffer, FF_PFNGLBINDBUFFERPROC) + LOAD_OPENGL_FUN(glGetAttribLocation, FF_PFNGLGETATTRIBLOCATIONPROC) + LOAD_OPENGL_FUN(glGetUniformLocation, FF_PFNGLGETUNIFORMLOCATIONPROC) + LOAD_OPENGL_FUN(glUniform1f, FF_PFNGLUNIFORM1FPROC) + LOAD_OPENGL_FUN(glUniform1i, FF_PFNGLUNIFORM1IPROC) + LOAD_OPENGL_FUN(glUniformMatrix4fv, FF_PFNGLUNIFORMMATRIX4FVPROC) + LOAD_OPENGL_FUN(glCreateProgram, FF_PFNGLCREATEPROGRAMPROC) + LOAD_OPENGL_FUN(glDeleteProgram, FF_PFNGLDELETEPROGRAMPROC) + LOAD_OPENGL_FUN(glUseProgram, FF_PFNGLUSEPROGRAMPROC) + LOAD_OPENGL_FUN(glLinkProgram, FF_PFNGLLINKPROGRAMPROC) + LOAD_OPENGL_FUN(glGetProgramiv, FF_PFNGLGETPROGRAMIVPROC) + LOAD_OPENGL_FUN(glGetProgramInfoLog, FF_PFNGLGETPROGRAMINFOLOGPROC) + LOAD_OPENGL_FUN(glAttachShader, FF_PFNGLATTACHSHADERPROC) + LOAD_OPENGL_FUN(glCreateShader, FF_PFNGLCREATESHADERPROC) + LOAD_OPENGL_FUN(glDeleteShader, FF_PFNGLDELETESHADERPROC) + LOAD_OPENGL_FUN(glCompileShader, FF_PFNGLCOMPILESHADERPROC) + LOAD_OPENGL_FUN(glShaderSource, FF_PFNGLSHADERSOURCEPROC) + LOAD_OPENGL_FUN(glGetShaderiv, FF_PFNGLGETSHADERIVPROC) + LOAD_OPENGL_FUN(glGetShaderInfoLog, FF_PFNGLGETSHADERINFOLOGPROC) + LOAD_OPENGL_FUN(glEnableVertexAttribArray, FF_PFNGLENABLEVERTEXATTRIBARRAYPROC) + LOAD_OPENGL_FUN(glVertexAttribPointer, FF_PFNGLVERTEXATTRIBPOINTERPROC) + + return 0; + +#undef SelectedGetProcAddress +#undef LOAD_OPENGL_FUN +} +#endif + +static void opengl_make_identity(float matrix[16]) +{ + memset(matrix, 0, 16 * sizeof(float)); + matrix[0] = matrix[5] = matrix[10] = matrix[15] = 1.0f; +} + +static void opengl_make_ortho(float matrix[16], float left, float right, + float bottom, float top, float nearZ, float farZ) +{ + float ral = right + left; + float rsl = right - left; + float tab = top + bottom; + float tsb = top - bottom; + float fan = farZ + nearZ; + float fsn = farZ - nearZ; + + memset(matrix, 0, 16 * sizeof(float)); + matrix[0] = 2.0f / rsl; + matrix[5] = 2.0f / tsb; + matrix[10] = -2.0f / fsn; + matrix[12] = -ral / rsl; + matrix[13] = -tab / tsb; + matrix[14] = -fan / fsn; + matrix[15] = 1.0f; +} + +static av_cold int opengl_read_limits(OpenGLContext *opengl) +{ + static const struct{ + const char *extension; + int major; + int minor; + } required_extensions[] = { + { "GL_ARB_multitexture", 1, 3 }, + { "GL_ARB_vertex_buffer_object", 1, 5 }, //GLX_ARB_vertex_buffer_object + { "GL_ARB_vertex_shader", 2, 0 }, + { "GL_ARB_fragment_shader", 2, 0 }, + { "GL_ARB_shader_objects", 2, 0 }, + { NULL, 0, 0 } + }; + int i, major, minor; + const char *extensions, *version; + + version = glGetString(GL_VERSION); + extensions = glGetString(GL_EXTENSIONS); + + av_log(opengl, AV_LOG_DEBUG, "OpenGL version: %s\n", version); + sscanf(version, "%d.%d", &major, &minor); + + for (i = 0; required_extensions[i].extension; i++) { + if (major < required_extensions[i].major && + (major == required_extensions[i].major && minor < required_extensions[i].minor) && + !strstr(extensions, required_extensions[i].extension)) { + av_log(opengl, AV_LOG_ERROR, "Required extension %s is not supported.\n", + required_extensions[i].extension); + av_log(opengl, AV_LOG_DEBUG, "Supported extensions are: %s\n", extensions); + return AVERROR(ENOSYS); + } + } + glGetIntegerv(GL_MAX_TEXTURE_SIZE, &opengl->max_texture_size); + glGetIntegerv(GL_MAX_VIEWPORT_DIMS, &opengl->max_viewport_width); + opengl->non_pow_2_textures = major >= 2 || strstr(extensions, "GL_ARB_texture_non_power_of_two"); +#if defined(GL_ES_VERSION_2_0) + opengl->unpack_subimage = !!strstr(extensions, "GL_EXT_unpack_subimage"); +#else + opengl->unpack_subimage = 1; +#endif + + av_log(opengl, AV_LOG_DEBUG, "Non Power of 2 textures support: %s\n", opengl->non_pow_2_textures ? "Yes" : "No"); + av_log(opengl, AV_LOG_DEBUG, "Unpack Subimage extension support: %s\n", opengl->unpack_subimage ? "Yes" : "No"); + av_log(opengl, AV_LOG_DEBUG, "Max texture size: %dx%d\n", opengl->max_texture_size, opengl->max_texture_size); + av_log(opengl, AV_LOG_DEBUG, "Max viewport size: %dx%d\n", + opengl->max_viewport_width, opengl->max_viewport_height); + + OPENGL_ERROR_CHECK(opengl); + return 0; + fail: + return AVERROR_EXTERNAL; +} + +static const char* opengl_get_fragment_shader_code(enum AVPixelFormat format) +{ + int i; + for (i = 0; i < FF_ARRAY_ELEMS(opengl_format_desc); i++) { + if (opengl_format_desc[i].fixel_format == format) + return *opengl_format_desc[i].fragment_shader; + } + return NULL; +} + +static int opengl_type_size(GLenum type) +{ + switch(type) { + case GL_UNSIGNED_SHORT: + case FF_GL_UNSIGNED_SHORT_1_5_5_5_REV: + case GL_UNSIGNED_SHORT_5_6_5: + return 2; + case GL_UNSIGNED_BYTE: + case FF_GL_UNSIGNED_BYTE_3_3_2: + case FF_GL_UNSIGNED_BYTE_2_3_3_REV: + default: + break; + } + return 1; +} + +static av_cold void opengl_get_texture_params(OpenGLContext *opengl) +{ + int i; + for (i = 0; i < FF_ARRAY_ELEMS(opengl_format_desc); i++) { + if (opengl_format_desc[i].fixel_format == opengl->pix_fmt) { + opengl->format = opengl_format_desc[i].format; + opengl->type = opengl_format_desc[i].type; + break; + } + } +} + +static void opengl_compute_display_area(AVFormatContext *s) +{ + AVRational sar, dar; /* sample and display aspect ratios */ + OpenGLContext *opengl = s->priv_data; + AVStream *st = s->streams[0]; + AVCodecContext *encctx = st->codec; + + /* compute overlay width and height from the codec context information */ + sar = st->sample_aspect_ratio.num ? st->sample_aspect_ratio : (AVRational){ 1, 1 }; + dar = av_mul_q(sar, (AVRational){ encctx->width, encctx->height }); + + /* we suppose the screen has a 1/1 sample aspect ratio */ + /* fit in the window */ + if (av_cmp_q(dar, (AVRational){ opengl->window_width, opengl->window_height }) > 0) { + /* fit in width */ + opengl->picture_width = opengl->window_width; + opengl->picture_height = av_rescale(opengl->picture_width, dar.den, dar.num); + } else { + /* fit in height */ + opengl->picture_height = opengl->window_height; + opengl->picture_width = av_rescale(opengl->picture_height, dar.num, dar.den); + } +} + +static av_cold void opengl_get_texture_size(OpenGLContext *opengl, int in_width, int in_height, + int *out_width, int *out_height) +{ + if (opengl->non_pow_2_textures) { + *out_width = in_width; + *out_height = in_height; + } else { + int max = FFMIN(FFMAX(in_width, in_height), opengl->max_texture_size); + unsigned power_of_2 = 1; + while (power_of_2 < max) + power_of_2 *= 2; + *out_height = power_of_2; + *out_width = power_of_2; + av_log(opengl, AV_LOG_DEBUG, "Texture size calculated from %dx%d into %dx%d\n", + in_width, in_height, *out_width, *out_height); + } +} + +static av_cold void opengl_fill_color_map(OpenGLContext *opengl) +{ + const AVPixFmtDescriptor *desc; + int shift; + enum AVPixelFormat pix_fmt = opengl->pix_fmt; + + /* We need order of components, not exact position, some minor HACKs here */ + if (pix_fmt == AV_PIX_FMT_RGB565 || pix_fmt == AV_PIX_FMT_BGR555 || + pix_fmt == AV_PIX_FMT_BGR8 || pix_fmt == AV_PIX_FMT_RGB8) + pix_fmt = AV_PIX_FMT_RGB24; + else if (pix_fmt == AV_PIX_FMT_BGR565 || pix_fmt == AV_PIX_FMT_RGB555) + pix_fmt = AV_PIX_FMT_BGR24; + + desc = av_pix_fmt_desc_get(pix_fmt); + if (!(desc->flags & AV_PIX_FMT_FLAG_RGB)) + return; + +#define FILL_COMPONENT(i) { \ + shift = desc->comp[i].depth_minus1 >> 3; \ + opengl->color_map[(i << 2) + ((desc->comp[i].offset_plus1 - 1) >> shift)] = 1.0; \ + } + + memset(opengl->color_map, 0, sizeof(opengl->color_map)); + FILL_COMPONENT(0); + FILL_COMPONENT(1); + FILL_COMPONENT(2); + if (desc->flags & AV_PIX_FMT_FLAG_ALPHA) + FILL_COMPONENT(3); + +#undef FILL_COMPONENT +} + +static av_cold GLuint opengl_load_shader(OpenGLContext *opengl, GLenum type, const char *source) +{ + GLuint shader = opengl->glprocs.glCreateShader(type); + GLint result; + if (!shader) { + av_log(opengl, AV_LOG_ERROR, "glCreateShader() failed\n"); + return 0; + } + opengl->glprocs.glShaderSource(shader, 1, &source, NULL); + opengl->glprocs.glCompileShader(shader); + + opengl->glprocs.glGetShaderiv(shader, FF_GL_COMPILE_STATUS, &result); + if (!result) { + char *log; + opengl->glprocs.glGetShaderiv(shader, FF_GL_INFO_LOG_LENGTH, &result); + if (result) { + if ((log = av_malloc(result))) { + opengl->glprocs.glGetShaderInfoLog(shader, result, NULL, log); + av_log(opengl, AV_LOG_ERROR, "Compile error: %s\n", log); + av_free(log); + } + } + goto fail; + } + OPENGL_ERROR_CHECK(opengl); + return shader; + fail: + opengl->glprocs.glDeleteShader(shader); + return 0; +} + +static av_cold int opengl_compile_shaders(OpenGLContext *opengl, enum AVPixelFormat pix_fmt) +{ + GLint result; + const char *fragment_shader_code = opengl_get_fragment_shader_code(pix_fmt); + + if (!fragment_shader_code) { + av_log(opengl, AV_LOG_ERROR, "Provided pixel format '%s' is not supported\n", + av_get_pix_fmt_name(pix_fmt)); + return AVERROR(EINVAL); + } + + opengl->vertex_shader = opengl_load_shader(opengl, FF_GL_VERTEX_SHADER, + FF_OPENGL_VERTEX_SHADER); + if (!opengl->vertex_shader) { + av_log(opengl, AV_LOG_ERROR, "Vertex shader loading failed.\n"); + goto fail; + } + opengl->fragment_shader = opengl_load_shader(opengl, FF_GL_FRAGMENT_SHADER, + fragment_shader_code); + if (!opengl->fragment_shader) { + av_log(opengl, AV_LOG_ERROR, "Fragment shader loading failed.\n"); + goto fail; + } + + opengl->program = opengl->glprocs.glCreateProgram(); + if (!opengl->program) + goto fail; + + opengl->glprocs.glAttachShader(opengl->program, opengl->vertex_shader); + opengl->glprocs.glAttachShader(opengl->program, opengl->fragment_shader); + opengl->glprocs.glLinkProgram(opengl->program); + + opengl->glprocs.glGetProgramiv(opengl->program, FF_GL_LINK_STATUS, &result); + if (!result) { + char *log; + opengl->glprocs.glGetProgramiv(opengl->program, FF_GL_INFO_LOG_LENGTH, &result); + if (result) { + log = av_malloc(result); + if (!log) + goto fail; + opengl->glprocs.glGetProgramInfoLog(opengl->program, result, NULL, log); + av_log(opengl, AV_LOG_ERROR, "Link error: %s\n", log); + av_free(log); + } + goto fail; + } + + opengl->position_attrib = opengl->glprocs.glGetAttribLocation(opengl->program, "a_position"); + opengl->texture_coords_attrib = opengl->glprocs.glGetAttribLocation(opengl->program, "a_textureCoords"); + opengl->projection_matrix_location = opengl->glprocs.glGetUniformLocation(opengl->program, "u_projectionMatrix"); + opengl->model_view_matrix_location = opengl->glprocs.glGetUniformLocation(opengl->program, "u_modelViewMatrix"); + opengl->color_map_location = opengl->glprocs.glGetUniformLocation(opengl->program, "u_colorMap"); + opengl->texture_location[0] = opengl->glprocs.glGetUniformLocation(opengl->program, "u_texture0"); + opengl->texture_location[1] = opengl->glprocs.glGetUniformLocation(opengl->program, "u_texture1"); + opengl->texture_location[2] = opengl->glprocs.glGetUniformLocation(opengl->program, "u_texture2"); + opengl->texture_location[3] = opengl->glprocs.glGetUniformLocation(opengl->program, "u_texture3"); + opengl->chroma_div_w_location = opengl->glprocs.glGetUniformLocation(opengl->program, "u_chroma_div_w"); + opengl->chroma_div_h_location = opengl->glprocs.glGetUniformLocation(opengl->program, "u_chroma_div_h"); + + OPENGL_ERROR_CHECK(opengl); + return 0; + fail: + opengl->glprocs.glDeleteShader(opengl->vertex_shader); + opengl->glprocs.glDeleteShader(opengl->fragment_shader); + opengl->glprocs.glDeleteProgram(opengl->program); + opengl->fragment_shader = opengl->vertex_shader = opengl->program = 0; + return AVERROR_EXTERNAL; +} + +static av_cold int opengl_configure_texture(OpenGLContext *opengl, GLuint texture, + GLsizei width, GLsizei height) +{ + if (texture) { + int new_width, new_height; + opengl_get_texture_size(opengl, width, height, &new_width, &new_height); + glBindTexture(GL_TEXTURE_2D, texture); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexImage2D(GL_TEXTURE_2D, 0, opengl->format, new_width, new_height, 0, + opengl->format, opengl->type, NULL); + OPENGL_ERROR_CHECK(NULL); + } + return 0; + fail: + return AVERROR_EXTERNAL; +} + +static av_cold int opengl_prepare_vertex(AVFormatContext *s) +{ + OpenGLContext *opengl = s->priv_data; + int tex_w, tex_h; + + if (opengl->window_width > opengl->max_viewport_width || opengl->window_height > opengl->max_viewport_height) { + opengl->window_width = FFMIN(opengl->window_width, opengl->max_viewport_width); + opengl->window_height = FFMIN(opengl->window_height, opengl->max_viewport_height); + av_log(opengl, AV_LOG_WARNING, "Too big viewport requested, limited to %dx%d", opengl->window_width, opengl->window_height); + } + glViewport(0, 0, opengl->window_width, opengl->window_height); + opengl_make_ortho(opengl->projection_matrix, + - (float)opengl->window_width / 2.0f, (float)opengl->window_width / 2.0f, + - (float)opengl->window_height / 2.0f, (float)opengl->window_height / 2.0f, + 1.0f, -1.0f); + opengl_make_identity(opengl->model_view_matrix); + + opengl_compute_display_area(s); + + opengl->vertex[0].z = opengl->vertex[1].z = opengl->vertex[2].z = opengl->vertex[3].z = 0.0f; + opengl->vertex[0].x = opengl->vertex[1].x = - (float)opengl->picture_width / 2.0f; + opengl->vertex[2].x = opengl->vertex[3].x = (float)opengl->picture_width / 2.0f; + opengl->vertex[1].y = opengl->vertex[2].y = - (float)opengl->picture_height / 2.0f; + opengl->vertex[0].y = opengl->vertex[3].y = (float)opengl->picture_height / 2.0f; + + opengl_get_texture_size(opengl, opengl->width, opengl->height, &tex_w, &tex_h); + + opengl->vertex[0].s0 = 0.0f; + opengl->vertex[0].t0 = 0.0f; + opengl->vertex[1].s0 = 0.0f; + opengl->vertex[1].t0 = (float)opengl->height / (float)tex_h; + opengl->vertex[2].s0 = (float)opengl->width / (float)tex_w; + opengl->vertex[2].t0 = (float)opengl->height / (float)tex_h; + opengl->vertex[3].s0 = (float)opengl->width / (float)tex_w; + opengl->vertex[3].t0 = 0.0f; + + opengl->glprocs.glBindBuffer(FF_GL_ARRAY_BUFFER, opengl->vertex_buffer); + opengl->glprocs.glBufferData(FF_GL_ARRAY_BUFFER, sizeof(opengl->vertex), opengl->vertex, FF_GL_STATIC_DRAW); + opengl->glprocs.glBindBuffer(FF_GL_ARRAY_BUFFER, 0); + OPENGL_ERROR_CHECK(opengl); + return 0; + fail: + return AVERROR_EXTERNAL; +} + +static int opengl_prepare(OpenGLContext *opengl) +{ + int i; + opengl->glprocs.glUseProgram(opengl->program); + opengl->glprocs.glUniformMatrix4fv(opengl->projection_matrix_location, 1, GL_FALSE, opengl->projection_matrix); + opengl->glprocs.glUniformMatrix4fv(opengl->model_view_matrix_location, 1, GL_FALSE, opengl->model_view_matrix); + for (i = 0; i < 4; i++) + if (opengl->texture_location[i] != -1) { + opengl->glprocs.glActiveTexture(GL_TEXTURE0 + i); + glBindTexture(GL_TEXTURE_2D, opengl->texture_name[i]); + opengl->glprocs.glUniform1i(opengl->texture_location[i], i); + } + if (opengl->color_map_location != -1) + opengl->glprocs.glUniformMatrix4fv(opengl->color_map_location, 1, GL_FALSE, opengl->color_map); + if (opengl->chroma_div_h_location != -1) + opengl->glprocs.glUniform1f(opengl->chroma_div_h_location, opengl->chroma_div_h); + if (opengl->chroma_div_w_location != -1) + opengl->glprocs.glUniform1f(opengl->chroma_div_w_location, opengl->chroma_div_w); + + OPENGL_ERROR_CHECK(opengl); + return 0; + fail: + return AVERROR_EXTERNAL; +} + +static int opengl_create_window(AVFormatContext *h) +{ + OpenGLContext *opengl = h->priv_data; + int ret; + + if (!opengl->no_window) { +#if HAVE_SDL + if ((ret = opengl_sdl_create_window(h)) < 0) { + av_log(opengl, AV_LOG_ERROR, "Cannot create default SDL window.\n"); + return ret; + } +#else + av_log(opengl, AV_LOG_ERROR, "FFmpeg is compiled without SDL. Cannot create default window.\n"); + return AVERROR(ENOSYS); +#endif + } else { + AVDeviceRect message; + message.x = message.y = 0; + message.width = opengl->window_width; + message.height = opengl->window_height; + if ((ret = avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_CREATE_WINDOW_BUFFER, + &message , sizeof(message))) < 0) { + av_log(opengl, AV_LOG_ERROR, "Application failed to create window buffer.\n"); + return ret; + } + if ((ret = avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_PREPARE_WINDOW_BUFFER, NULL , 0)) < 0) { + av_log(opengl, AV_LOG_ERROR, "Application failed to prepare window buffer.\n"); + return ret; + } + } + return 0; +} + +static int opengl_release_window(AVFormatContext *h) +{ + int ret; + OpenGLContext *opengl = h->priv_data; + if (!opengl->no_window) { +#if HAVE_SDL + SDL_Quit(); +#endif + } else if ((ret = avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_DESTROY_WINDOW_BUFFER, NULL , 0)) < 0) { + av_log(opengl, AV_LOG_ERROR, "Application failed to release window buffer.\n"); + return ret; + } + return 0; +} + +static av_cold int opengl_write_trailer(AVFormatContext *h) +{ + OpenGLContext *opengl = h->priv_data; + + if (opengl->no_window && + avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_PREPARE_WINDOW_BUFFER, NULL , 0) < 0) + av_log(opengl, AV_LOG_ERROR, "Application failed to prepare window buffer.\n"); + + opengl_deinit_context(opengl); + opengl_release_window(h); + + return 0; +} + +static av_cold int opengl_init_context(OpenGLContext *opengl) +{ + int i, ret; + const AVPixFmtDescriptor *desc; + + if ((ret = opengl_compile_shaders(opengl, opengl->pix_fmt)) < 0) + goto fail; + + desc = av_pix_fmt_desc_get(opengl->pix_fmt); + av_assert0(desc->nb_components > 0 && desc->nb_components <= 4); + glGenTextures(desc->nb_components, opengl->texture_name); + + opengl->glprocs.glGenBuffers(2, &opengl->index_buffer); + if (!opengl->index_buffer || !opengl->vertex_buffer) { + av_log(opengl, AV_LOG_ERROR, "Buffer generation failed.\n"); + ret = AVERROR_EXTERNAL; + goto fail; + } + + opengl_configure_texture(opengl, opengl->texture_name[0], opengl->width, opengl->height); + if (desc->nb_components > 1) { + int has_alpha = desc->flags & AV_PIX_FMT_FLAG_ALPHA; + int num_planes = desc->nb_components - (has_alpha ? 1 : 0); + if (opengl->non_pow_2_textures) { + opengl->chroma_div_w = 1.0f; + opengl->chroma_div_h = 1.0f; + } else { + opengl->chroma_div_w = 1 << desc->log2_chroma_w; + opengl->chroma_div_h = 1 << desc->log2_chroma_h; + } + for (i = 1; i < num_planes; i++) + if (opengl->non_pow_2_textures) + opengl_configure_texture(opengl, opengl->texture_name[i], + FF_CEIL_RSHIFT(opengl->width, desc->log2_chroma_w), + FF_CEIL_RSHIFT(opengl->height, desc->log2_chroma_h)); + else + opengl_configure_texture(opengl, opengl->texture_name[i], opengl->width, opengl->height); + if (has_alpha) + opengl_configure_texture(opengl, opengl->texture_name[3], opengl->width, opengl->height); + } + + opengl->glprocs.glBindBuffer(FF_GL_ELEMENT_ARRAY_BUFFER, opengl->index_buffer); + opengl->glprocs.glBufferData(FF_GL_ELEMENT_ARRAY_BUFFER, sizeof(g_index), g_index, FF_GL_STATIC_DRAW); + opengl->glprocs.glBindBuffer(FF_GL_ELEMENT_ARRAY_BUFFER, 0); + + glEnable(GL_BLEND); + glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); + + glClearColor((float)opengl->background[0] / 255.0f, (float)opengl->background[1] / 255.0f, + (float)opengl->background[2] / 255.0f, 1.0f); + + ret = AVERROR_EXTERNAL; + OPENGL_ERROR_CHECK(opengl); + + return 0; + fail: + return ret; +} + +static av_cold int opengl_write_header(AVFormatContext *h) +{ + OpenGLContext *opengl = h->priv_data; + AVStream *st; + int ret; + + if (h->nb_streams != 1 || + h->streams[0]->codec->codec_type != AVMEDIA_TYPE_VIDEO || + h->streams[0]->codec->codec_id != AV_CODEC_ID_RAWVIDEO) { + av_log(opengl, AV_LOG_ERROR, "Only a single video stream is supported.\n"); + return AVERROR(EINVAL); + } + st = h->streams[0]; + opengl->width = st->codec->width; + opengl->height = st->codec->height; + opengl->pix_fmt = st->codec->pix_fmt; + if (!opengl->window_width) + opengl->window_width = opengl->width; + if (!opengl->window_height) + opengl->window_height = opengl->height; + + if (!opengl->window_title && !opengl->no_window) + opengl->window_title = av_strdup(h->filename); + + if ((ret = opengl_create_window(h))) + goto fail; + + if ((ret = opengl_read_limits(opengl)) < 0) + goto fail; + + if (opengl->width > opengl->max_texture_size || opengl->height > opengl->max_texture_size) { + av_log(opengl, AV_LOG_ERROR, "Too big picture %dx%d, max supported size is %dx%d\n", + opengl->width, opengl->height, opengl->max_texture_size, opengl->max_texture_size); + ret = AVERROR(EINVAL); + goto fail; + } + + if ((ret = opengl_load_procedures(opengl)) < 0) + goto fail; + + opengl_fill_color_map(opengl); + opengl_get_texture_params(opengl); + + if ((ret = opengl_init_context(opengl)) < 0) + goto fail; + + if ((ret = opengl_prepare_vertex(h)) < 0) + goto fail; + + glClear(GL_COLOR_BUFFER_BIT); + +#if HAVE_SDL + if (!opengl->no_window) + SDL_GL_SwapBuffers(); +#endif + if (opengl->no_window && + (ret = avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_DISPLAY_WINDOW_BUFFER, NULL , 0)) < 0) { + av_log(opengl, AV_LOG_ERROR, "Application failed to display window buffer.\n"); + goto fail; + } + + ret = AVERROR_EXTERNAL; + OPENGL_ERROR_CHECK(opengl); + + opengl->inited = 1; + return 0; + + fail: + opengl_write_trailer(h); + return ret; +} + +static uint8_t* opengl_get_plane_pointer(OpenGLContext *opengl, AVPacket *pkt, int comp_index, + const AVPixFmtDescriptor *desc) +{ + uint8_t *data = pkt->data; + int wordsize = opengl_type_size(opengl->type); + int width_chroma = FF_CEIL_RSHIFT(opengl->width, desc->log2_chroma_w); + int height_chroma = FF_CEIL_RSHIFT(opengl->height, desc->log2_chroma_h); + int plane = desc->comp[comp_index].plane; + + switch(plane) { + case 0: + break; + case 1: + data += opengl->width * opengl->height * wordsize; + break; + case 2: + data += opengl->width * opengl->height * wordsize; + data += width_chroma * height_chroma * wordsize; + break; + case 3: + data += opengl->width * opengl->height * wordsize; + data += 2 * width_chroma * height_chroma * wordsize; + break; + default: + return NULL; + } + return data; +} + +#define LOAD_TEXTURE_DATA(comp_index, sub) \ +{ \ + int width = sub ? FF_CEIL_RSHIFT(opengl->width, desc->log2_chroma_w) : opengl->width; \ + int height = sub ? FF_CEIL_RSHIFT(opengl->height, desc->log2_chroma_h): opengl->height; \ + uint8_t *data; \ + int plane = desc->comp[comp_index].plane; \ + \ + glBindTexture(GL_TEXTURE_2D, opengl->texture_name[comp_index]); \ + if (!is_pkt) { \ + GLint length = ((AVFrame *)input)->linesize[plane]; \ + int bytes_per_pixel = opengl_type_size(opengl->type); \ + if (!(desc->flags & AV_PIX_FMT_FLAG_PLANAR)) \ + bytes_per_pixel *= desc->nb_components; \ + data = ((AVFrame *)input)->data[plane]; \ + if (!(length % bytes_per_pixel) && \ + (opengl->unpack_subimage || ((length / bytes_per_pixel) == width))) { \ + length /= bytes_per_pixel; \ + if (length != width) \ + glPixelStorei(FF_GL_UNPACK_ROW_LENGTH, length); \ + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, \ + opengl->format, opengl->type, data); \ + if (length != width) \ + glPixelStorei(FF_GL_UNPACK_ROW_LENGTH, 0); \ + } else { \ + int h; \ + for (h = 0; h < height; h++) { \ + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, h, width, 1, \ + opengl->format, opengl->type, data); \ + data += length; \ + } \ + } \ + } else { \ + data = opengl_get_plane_pointer(opengl, input, comp_index, desc); \ + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, \ + opengl->format, opengl->type, data); \ + } \ +} + +static int opengl_draw(AVFormatContext *h, void *input, int repaint, int is_pkt) +{ + OpenGLContext *opengl = h->priv_data; + enum AVPixelFormat pix_fmt = h->streams[0]->codec->pix_fmt; + const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt); + int ret; + +#if HAVE_SDL + if (!opengl->no_window && (ret = opengl_sdl_process_events(h)) < 0) + goto fail; +#endif + if (opengl->no_window && + (ret = avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_PREPARE_WINDOW_BUFFER, NULL , 0)) < 0) { + av_log(opengl, AV_LOG_ERROR, "Application failed to prepare window buffer.\n"); + goto fail; + } + + glClear(GL_COLOR_BUFFER_BIT); + + if (!repaint) { + if (is_pkt) + glPixelStorei(GL_UNPACK_ALIGNMENT, 1); + LOAD_TEXTURE_DATA(0, 0) + if (desc->flags & AV_PIX_FMT_FLAG_PLANAR) { + LOAD_TEXTURE_DATA(1, 1) + LOAD_TEXTURE_DATA(2, 1) + if (desc->flags & AV_PIX_FMT_FLAG_ALPHA) + LOAD_TEXTURE_DATA(3, 0) + } + } + ret = AVERROR_EXTERNAL; + OPENGL_ERROR_CHECK(opengl); + + if ((ret = opengl_prepare(opengl)) < 0) + goto fail; + + opengl->glprocs.glBindBuffer(FF_GL_ARRAY_BUFFER, opengl->vertex_buffer); + opengl->glprocs.glBindBuffer(FF_GL_ELEMENT_ARRAY_BUFFER, opengl->index_buffer); + opengl->glprocs.glVertexAttribPointer(opengl->position_attrib, 3, GL_FLOAT, GL_FALSE, sizeof(OpenGLVertexInfo), 0); + opengl->glprocs.glEnableVertexAttribArray(opengl->position_attrib); + opengl->glprocs.glVertexAttribPointer(opengl->texture_coords_attrib, 2, GL_FLOAT, GL_FALSE, sizeof(OpenGLVertexInfo), 12); + opengl->glprocs.glEnableVertexAttribArray(opengl->texture_coords_attrib); + + glDrawElements(GL_TRIANGLES, FF_ARRAY_ELEMS(g_index), GL_UNSIGNED_SHORT, 0); + + ret = AVERROR_EXTERNAL; + OPENGL_ERROR_CHECK(opengl); + +#if HAVE_SDL + if (!opengl->no_window) + SDL_GL_SwapBuffers(); +#endif + if (opengl->no_window && + (ret = avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_DISPLAY_WINDOW_BUFFER, NULL , 0)) < 0) { + av_log(opengl, AV_LOG_ERROR, "Application failed to display window buffer.\n"); + goto fail; + } + + return 0; + fail: + return ret; +} + +static int opengl_write_packet(AVFormatContext *h, AVPacket *pkt) +{ + return opengl_draw(h, pkt, 0, 1); +} + +static int opengl_write_frame(AVFormatContext *h, int stream_index, + AVFrame **frame, unsigned flags) +{ + if ((flags & AV_WRITE_UNCODED_FRAME_QUERY)) + return 0; + return opengl_draw(h, *frame, 0, 0); +} + +#define OFFSET(x) offsetof(OpenGLContext, x) +#define ENC AV_OPT_FLAG_ENCODING_PARAM +static const AVOption options[] = { + { "background", "set background color", OFFSET(background), AV_OPT_TYPE_COLOR, {.str = "black"}, CHAR_MIN, CHAR_MAX, ENC }, + { "no_window", "disable default window", OFFSET(no_window), AV_OPT_TYPE_INT, {.i64 = 0}, INT_MIN, INT_MAX, ENC }, + { "window_title", "set window title", OFFSET(window_title), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, ENC }, + { "window_size", "set window size", OFFSET(window_width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, ENC }, + { NULL } +}; + +static const AVClass opengl_class = { + .class_name = "opengl outdev", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT, +}; + +AVOutputFormat ff_opengl_muxer = { + .name = "opengl", + .long_name = NULL_IF_CONFIG_SMALL("OpenGL output"), + .priv_data_size = sizeof(OpenGLContext), + .audio_codec = AV_CODEC_ID_NONE, + .video_codec = AV_CODEC_ID_RAWVIDEO, + .write_header = opengl_write_header, + .write_packet = opengl_write_packet, + .write_uncoded_frame = opengl_write_frame, + .write_trailer = opengl_write_trailer, + .control_message = opengl_control_message, + .flags = AVFMT_NOFILE | AVFMT_VARIABLE_FPS | AVFMT_NOTIMESTAMPS, + .priv_class = &opengl_class, +}; diff --git a/libavdevice/opengl_enc_shaders.h b/libavdevice/opengl_enc_shaders.h new file mode 100644 index 0000000..ed8b3d3 --- /dev/null +++ b/libavdevice/opengl_enc_shaders.h @@ -0,0 +1,188 @@ +/* + * Copyright (c) 2014 Lukasz Marek + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVDEVICE_OPENGL_SHADERS_H +#define AVDEVICE_OPENGL_SHADERS_H + +#include "libavutil/pixfmt.h" + +static const char * const FF_OPENGL_VERTEX_SHADER = + "uniform mat4 u_projectionMatrix;" + "uniform mat4 u_modelViewMatrix;" + + "attribute vec4 a_position;" + "attribute vec2 a_textureCoords;" + + "varying vec2 texture_coordinate;" + + "void main()" + "{" + "gl_Position = u_projectionMatrix * (a_position * u_modelViewMatrix);" + "texture_coordinate = a_textureCoords;" + "}"; + +/** + * Fragment shader for packet RGBA formats. + */ +static const char * const FF_OPENGL_FRAGMENT_SHADER_RGBA_PACKET = +#if defined(GL_ES_VERSION_2_0) + "precision mediump float;" +#endif + "uniform sampler2D u_texture0;" + "uniform mat4 u_colorMap;" + + "varying vec2 texture_coordinate;" + + "void main()" + "{" + "gl_FragColor = texture2D(u_texture0, texture_coordinate) * u_colorMap;" + "}"; + +/** + * Fragment shader for packet RGB formats. + */ +static const char * const FF_OPENGL_FRAGMENT_SHADER_RGB_PACKET = +#if defined(GL_ES_VERSION_2_0) + "precision mediump float;" +#endif + "uniform sampler2D u_texture0;" + "uniform mat4 u_colorMap;" + + "varying vec2 texture_coordinate;" + + "void main()" + "{" + "gl_FragColor = vec4((texture2D(u_texture0, texture_coordinate) * u_colorMap).rgb, 1.0);" + "}"; + +/** + * Fragment shader for planar RGBA formats. + */ +static const char * const FF_OPENGL_FRAGMENT_SHADER_RGBA_PLANAR = +#if defined(GL_ES_VERSION_2_0) + "precision mediump float;" +#endif + "uniform sampler2D u_texture0;" + "uniform sampler2D u_texture1;" + "uniform sampler2D u_texture2;" + "uniform sampler2D u_texture3;" + + "varying vec2 texture_coordinate;" + + "void main()" + "{" + "gl_FragColor = vec4(texture2D(u_texture0, texture_coordinate).r," + "texture2D(u_texture1, texture_coordinate).r," + "texture2D(u_texture2, texture_coordinate).r," + "texture2D(u_texture3, texture_coordinate).r);" + "}"; + +/** + * Fragment shader for planar RGB formats. + */ +static const char * const FF_OPENGL_FRAGMENT_SHADER_RGB_PLANAR = +#if defined(GL_ES_VERSION_2_0) + "precision mediump float;" +#endif + "uniform sampler2D u_texture0;" + "uniform sampler2D u_texture1;" + "uniform sampler2D u_texture2;" + + "varying vec2 texture_coordinate;" + + "void main()" + "{" + "gl_FragColor = vec4(texture2D(u_texture0, texture_coordinate).r," + "texture2D(u_texture1, texture_coordinate).r," + "texture2D(u_texture2, texture_coordinate).r," + "1.0);" + "}"; + +/** + * Fragment shader for planar YUV formats. + */ +static const char * const FF_OPENGL_FRAGMENT_SHADER_YUV_PLANAR = +#if defined(GL_ES_VERSION_2_0) + "precision mediump float;" +#endif + "uniform sampler2D u_texture0;" + "uniform sampler2D u_texture1;" + "uniform sampler2D u_texture2;" + "uniform float u_chroma_div_w;" + "uniform float u_chroma_div_h;" + + "varying vec2 texture_coordinate;" + + "void main()" + "{" + "vec3 yuv;" + + "yuv.r = texture2D(u_texture0, texture_coordinate).r - 0.0625;" + "yuv.g = texture2D(u_texture1, vec2(texture_coordinate.x / u_chroma_div_w, texture_coordinate.y / u_chroma_div_h)).r - 0.5;" + "yuv.b = texture2D(u_texture2, vec2(texture_coordinate.x / u_chroma_div_w, texture_coordinate.y / u_chroma_div_h)).r - 0.5;" + + "gl_FragColor = clamp(vec4(mat3(1.1643, 1.16430, 1.1643," + "0.0, -0.39173, 2.0170," + "1.5958, -0.81290, 0.0) * yuv, 1.0), 0.0, 1.0);" + + "}"; + +/** + * Fragment shader for planar YUVA formats. + */ +static const char * const FF_OPENGL_FRAGMENT_SHADER_YUVA_PLANAR = +#if defined(GL_ES_VERSION_2_0) + "precision mediump float;" +#endif + "uniform sampler2D u_texture0;" + "uniform sampler2D u_texture1;" + "uniform sampler2D u_texture2;" + "uniform sampler2D u_texture3;" + "uniform float u_chroma_div_w;" + "uniform float u_chroma_div_h;" + + "varying vec2 texture_coordinate;" + + "void main()" + "{" + "vec3 yuv;" + + "yuv.r = texture2D(u_texture0, texture_coordinate).r - 0.0625;" + "yuv.g = texture2D(u_texture1, vec2(texture_coordinate.x / u_chroma_div_w, texture_coordinate.y / u_chroma_div_h)).r - 0.5;" + "yuv.b = texture2D(u_texture2, vec2(texture_coordinate.x / u_chroma_div_w, texture_coordinate.y / u_chroma_div_h)).r - 0.5;" + + "gl_FragColor = clamp(vec4(mat3(1.1643, 1.16430, 1.1643," + "0.0, -0.39173, 2.0170," + "1.5958, -0.81290, 0.0) * yuv, texture2D(u_texture3, texture_coordinate).r), 0.0, 1.0);" + "}"; + +static const char * const FF_OPENGL_FRAGMENT_SHADER_GRAY = +#if defined(GL_ES_VERSION_2_0) + "precision mediump float;" +#endif + "uniform sampler2D u_texture0;" + "varying vec2 texture_coordinate;" + "void main()" + "{" + "float c = texture2D(u_texture0, texture_coordinate).r;" + "gl_FragColor = vec4(c, c, c, 1.0);" + "}"; + +#endif /* AVDEVICE_OPENGL_SHADERS_H */ diff --git a/libavdevice/oss_audio.c b/libavdevice/oss_audio.c index ad52d78..951acbc 100644 --- a/libavdevice/oss_audio.c +++ b/libavdevice/oss_audio.c @@ -2,20 +2,20 @@ * Linux audio play and grab interface * Copyright (c) 2000, 2001 Fabrice Bellard * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -29,15 +29,16 @@ #include <sys/soundcard.h> #endif +#if HAVE_UNISTD_H #include <unistd.h> +#endif #include <fcntl.h> #include <sys/ioctl.h> #include "libavutil/log.h" #include "libavcodec/avcodec.h" - -#include "libavformat/avformat.h" +#include "avdevice.h" #include "oss_audio.h" @@ -63,13 +64,28 @@ int ff_oss_audio_open(AVFormatContext *s1, int is_output, } /* non blocking mode */ - if (!is_output) - fcntl(audio_fd, F_SETFL, O_NONBLOCK); + if (!is_output) { + if (fcntl(audio_fd, F_SETFL, O_NONBLOCK) < 0) { + av_log(s1, AV_LOG_WARNING, "%s: Could not enable non block mode (%s)\n", audio_device, strerror(errno)); + } + } s->frame_size = OSS_AUDIO_BLOCK_SIZE; - /* select format : favour native format */ +#define CHECK_IOCTL_ERROR(event) \ + if (err < 0) { \ + av_log(s1, AV_LOG_ERROR, #event ": %s\n", strerror(errno)); \ + goto fail; \ + } + + /* select format : favour native format + * We don't CHECK_IOCTL_ERROR here because even if failed OSS still may be + * usable. If OSS is not usable the SNDCTL_DSP_SETFMTS later is going to + * fail anyway. `err =` kept to eliminate compiler warning. */ err = ioctl(audio_fd, SNDCTL_DSP_GETFMTS, &tmp); + if (err < 0) { + av_log(s1, AV_LOG_WARNING, "SNDCTL_DSP_GETFMTS: %s\n", strerror(errno)); + } #if HAVE_BIGENDIAN if (tmp & AFMT_S16_BE) { @@ -102,24 +118,15 @@ int ff_oss_audio_open(AVFormatContext *s1, int is_output, return AVERROR(EIO); } err=ioctl(audio_fd, SNDCTL_DSP_SETFMT, &tmp); - if (err < 0) { - av_log(s1, AV_LOG_ERROR, "SNDCTL_DSP_SETFMT: %s\n", strerror(errno)); - goto fail; - } + CHECK_IOCTL_ERROR(SNDCTL_DSP_SETFMTS) tmp = (s->channels == 2); err = ioctl(audio_fd, SNDCTL_DSP_STEREO, &tmp); - if (err < 0) { - av_log(s1, AV_LOG_ERROR, "SNDCTL_DSP_STEREO: %s\n", strerror(errno)); - goto fail; - } + CHECK_IOCTL_ERROR(SNDCTL_DSP_STEREO) tmp = s->sample_rate; err = ioctl(audio_fd, SNDCTL_DSP_SPEED, &tmp); - if (err < 0) { - av_log(s1, AV_LOG_ERROR, "SNDCTL_DSP_SPEED: %s\n", strerror(errno)); - goto fail; - } + CHECK_IOCTL_ERROR(SNDCTL_DSP_SPEED) s->sample_rate = tmp; /* store real sample rate */ s->fd = audio_fd; @@ -127,6 +134,7 @@ int ff_oss_audio_open(AVFormatContext *s1, int is_output, fail: close(audio_fd); return AVERROR(EIO); +#undef CHECK_IOCTL_ERROR } int ff_oss_audio_close(OSSAudioData *s) diff --git a/libavdevice/oss_audio.h b/libavdevice/oss_audio.h index 87ac4ad..8258028 100644 --- a/libavdevice/oss_audio.h +++ b/libavdevice/oss_audio.h @@ -1,18 +1,18 @@ /* - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ diff --git a/libavdevice/oss_audio_dec.c b/libavdevice/oss_audio_dec.c index 601d91c..1f86d06 100644 --- a/libavdevice/oss_audio_dec.c +++ b/libavdevice/oss_audio_dec.c @@ -2,20 +2,20 @@ * Linux audio play interface * Copyright (c) 2000, 2001 Fabrice Bellard * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -29,7 +29,9 @@ #include <sys/soundcard.h> #endif +#if HAVE_UNISTD_H #include <unistd.h> +#endif #include <fcntl.h> #include <sys/ioctl.h> @@ -39,7 +41,7 @@ #include "libavcodec/avcodec.h" -#include "libavformat/avformat.h" +#include "avdevice.h" #include "libavformat/internal.h" #include "oss_audio.h" @@ -132,6 +134,7 @@ static const AVClass oss_demuxer_class = { .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT, }; AVInputFormat ff_oss_demuxer = { diff --git a/libavdevice/oss_audio_enc.c b/libavdevice/oss_audio_enc.c index 688982a..5d3bded 100644 --- a/libavdevice/oss_audio_enc.c +++ b/libavdevice/oss_audio_enc.c @@ -2,20 +2,20 @@ * Linux audio grab interface * Copyright (c) 2000, 2001 Fabrice Bellard * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -27,7 +27,9 @@ #include <sys/soundcard.h> #endif +#if HAVE_UNISTD_H #include <unistd.h> +#endif #include <fcntl.h> #include <sys/ioctl.h> @@ -35,7 +37,7 @@ #include "libavcodec/avcodec.h" -#include "libavformat/avformat.h" +#include "avdevice.h" #include "libavformat/internal.h" #include "oss_audio.h" @@ -92,6 +94,13 @@ static int audio_write_trailer(AVFormatContext *s1) return 0; } +static const AVClass oss_muxer_class = { + .class_name = "OSS muxer", + .item_name = av_default_item_name, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_AUDIO_OUTPUT, +}; + AVOutputFormat ff_oss_muxer = { .name = "oss", .long_name = NULL_IF_CONFIG_SMALL("OSS (Open Sound System) playback"), @@ -105,4 +114,5 @@ AVOutputFormat ff_oss_muxer = { .write_packet = audio_write_packet, .write_trailer = audio_write_trailer, .flags = AVFMT_NOFILE, + .priv_class = &oss_muxer_class, }; diff --git a/libavdevice/pulse.c b/libavdevice/pulse.c deleted file mode 100644 index 2136ee3..0000000 --- a/libavdevice/pulse.c +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Pulseaudio input - * Copyright (c) 2011 Luca Barbato <lu_zero@gentoo.org> - * - * This file is part of Libav. - * - * Libav is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * Libav is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -/** - * @file - * PulseAudio input using the simple API. - * @author Luca Barbato <lu_zero@gentoo.org> - */ - -#include <pulse/simple.h> -#include <pulse/rtclock.h> -#include <pulse/error.h> - -#include "libavformat/avformat.h" -#include "libavformat/internal.h" -#include "libavutil/time.h" -#include "libavutil/opt.h" - -#define DEFAULT_CODEC_ID AV_NE(AV_CODEC_ID_PCM_S16BE, AV_CODEC_ID_PCM_S16LE) - -typedef struct PulseData { - AVClass *class; - char *server; - char *name; - char *stream_name; - int sample_rate; - int channels; - int frame_size; - int fragment_size; - pa_simple *s; - int64_t pts; - int64_t frame_duration; - int wallclock; -} PulseData; - -static pa_sample_format_t codec_id_to_pulse_format(int codec_id) { - switch (codec_id) { - case AV_CODEC_ID_PCM_U8: return PA_SAMPLE_U8; - case AV_CODEC_ID_PCM_ALAW: return PA_SAMPLE_ALAW; - case AV_CODEC_ID_PCM_MULAW: return PA_SAMPLE_ULAW; - case AV_CODEC_ID_PCM_S16LE: return PA_SAMPLE_S16LE; - case AV_CODEC_ID_PCM_S16BE: return PA_SAMPLE_S16BE; - case AV_CODEC_ID_PCM_F32LE: return PA_SAMPLE_FLOAT32LE; - case AV_CODEC_ID_PCM_F32BE: return PA_SAMPLE_FLOAT32BE; - case AV_CODEC_ID_PCM_S32LE: return PA_SAMPLE_S32LE; - case AV_CODEC_ID_PCM_S32BE: return PA_SAMPLE_S32BE; - case AV_CODEC_ID_PCM_S24LE: return PA_SAMPLE_S24LE; - case AV_CODEC_ID_PCM_S24BE: return PA_SAMPLE_S24BE; - default: return PA_SAMPLE_INVALID; - } -} - -static av_cold int pulse_read_header(AVFormatContext *s) -{ - PulseData *pd = s->priv_data; - AVStream *st; - char *device = NULL; - int ret; - enum AVCodecID codec_id = - s->audio_codec_id == AV_CODEC_ID_NONE ? DEFAULT_CODEC_ID : s->audio_codec_id; - const pa_sample_spec ss = { codec_id_to_pulse_format(codec_id), - pd->sample_rate, - pd->channels }; - - pa_buffer_attr attr = { -1 }; - - st = avformat_new_stream(s, NULL); - - if (!st) { - av_log(s, AV_LOG_ERROR, "Cannot add stream\n"); - return AVERROR(ENOMEM); - } - - attr.fragsize = pd->fragment_size; - - if (strcmp(s->filename, "default")) - device = s->filename; - - pd->s = pa_simple_new(pd->server, pd->name, - PA_STREAM_RECORD, - device, pd->stream_name, &ss, - NULL, &attr, &ret); - - if (!pd->s) { - av_log(s, AV_LOG_ERROR, "pa_simple_new failed: %s\n", - pa_strerror(ret)); - return AVERROR(EIO); - } - /* take real parameters */ - st->codec->codec_type = AVMEDIA_TYPE_AUDIO; - st->codec->codec_id = codec_id; - st->codec->sample_rate = pd->sample_rate; - st->codec->channels = pd->channels; - avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */ - - pd->pts = AV_NOPTS_VALUE; - pd->frame_duration = (pd->frame_size * 1000000LL * 8) / - (pd->sample_rate * pd->channels * av_get_bits_per_sample(codec_id)); - - return 0; -} - -static int pulse_read_packet(AVFormatContext *s, AVPacket *pkt) -{ - PulseData *pd = s->priv_data; - int res; - pa_usec_t latency; - - if (av_new_packet(pkt, pd->frame_size) < 0) { - return AVERROR(ENOMEM); - } - - if ((pa_simple_read(pd->s, pkt->data, pkt->size, &res)) < 0) { - av_log(s, AV_LOG_ERROR, "pa_simple_read failed: %s\n", - pa_strerror(res)); - av_free_packet(pkt); - return AVERROR(EIO); - } - - if ((latency = pa_simple_get_latency(pd->s, &res)) == (pa_usec_t) -1) { - av_log(s, AV_LOG_ERROR, "pa_simple_get_latency() failed: %s\n", - pa_strerror(res)); - return AVERROR(EIO); - } - - if (pd->pts == AV_NOPTS_VALUE) { - pd->pts = -latency; - if (pd->wallclock) - pd->pts += av_gettime(); - } - - pkt->pts = pd->pts; - - pd->pts += pd->frame_duration; - - return 0; -} - -static av_cold int pulse_close(AVFormatContext *s) -{ - PulseData *pd = s->priv_data; - pa_simple_free(pd->s); - return 0; -} - -#define OFFSET(a) offsetof(PulseData, a) -#define D AV_OPT_FLAG_DECODING_PARAM - -static const AVOption options[] = { - { "server", "pulse server name", OFFSET(server), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, D }, - { "name", "application name", OFFSET(name), AV_OPT_TYPE_STRING, {.str = "libav"}, 0, 0, D }, - { "stream_name", "stream description", OFFSET(stream_name), AV_OPT_TYPE_STRING, {.str = "record"}, 0, 0, D }, - { "sample_rate", "sample rate in Hz", OFFSET(sample_rate), AV_OPT_TYPE_INT, {.i64 = 48000}, 1, INT_MAX, D }, - { "channels", "number of audio channels", OFFSET(channels), AV_OPT_TYPE_INT, {.i64 = 2}, 1, INT_MAX, D }, - { "frame_size", "number of bytes per frame", OFFSET(frame_size), AV_OPT_TYPE_INT, {.i64 = 1024}, 1, INT_MAX, D }, - { "fragment_size", "buffering size, affects latency and cpu usage", OFFSET(fragment_size), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, D }, - { "wallclock", "set the initial pts using the current time", OFFSET(wallclock), AV_OPT_TYPE_INT, {.i64 = 1}, -1, 1, D }, - { NULL }, -}; - -static const AVClass pulse_demuxer_class = { - .class_name = "Pulse demuxer", - .item_name = av_default_item_name, - .option = options, - .version = LIBAVUTIL_VERSION_INT, -}; - -AVInputFormat ff_pulse_demuxer = { - .name = "pulse", - .long_name = NULL_IF_CONFIG_SMALL("Pulse audio input"), - .priv_data_size = sizeof(PulseData), - .read_header = pulse_read_header, - .read_packet = pulse_read_packet, - .read_close = pulse_close, - .flags = AVFMT_NOFILE, - .priv_class = &pulse_demuxer_class, -}; diff --git a/libavdevice/pulse_audio_common.c b/libavdevice/pulse_audio_common.c new file mode 100644 index 0000000..5a2568b --- /dev/null +++ b/libavdevice/pulse_audio_common.c @@ -0,0 +1,249 @@ +/* + * Pulseaudio common + * Copyright (c) 2014 Lukasz Marek + * Copyright (c) 2011 Luca Barbato <lu_zero@gentoo.org> + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "pulse_audio_common.h" +#include "libavutil/attributes.h" +#include "libavutil/avstring.h" +#include "libavutil/mem.h" +#include "libavutil/avassert.h" + +pa_sample_format_t av_cold ff_codec_id_to_pulse_format(enum AVCodecID codec_id) +{ + switch (codec_id) { + case AV_CODEC_ID_PCM_U8: return PA_SAMPLE_U8; + case AV_CODEC_ID_PCM_ALAW: return PA_SAMPLE_ALAW; + case AV_CODEC_ID_PCM_MULAW: return PA_SAMPLE_ULAW; + case AV_CODEC_ID_PCM_S16LE: return PA_SAMPLE_S16LE; + case AV_CODEC_ID_PCM_S16BE: return PA_SAMPLE_S16BE; + case AV_CODEC_ID_PCM_F32LE: return PA_SAMPLE_FLOAT32LE; + case AV_CODEC_ID_PCM_F32BE: return PA_SAMPLE_FLOAT32BE; + case AV_CODEC_ID_PCM_S32LE: return PA_SAMPLE_S32LE; + case AV_CODEC_ID_PCM_S32BE: return PA_SAMPLE_S32BE; + case AV_CODEC_ID_PCM_S24LE: return PA_SAMPLE_S24LE; + case AV_CODEC_ID_PCM_S24BE: return PA_SAMPLE_S24BE; + default: return PA_SAMPLE_INVALID; + } +} + +enum PulseAudioContextState { + PULSE_CONTEXT_INITIALIZING, + PULSE_CONTEXT_READY, + PULSE_CONTEXT_FINISHED +}; + +typedef struct PulseAudioDeviceList { + AVDeviceInfoList *devices; + int error_code; + int output; + char *default_device; +} PulseAudioDeviceList; + +static void pa_state_cb(pa_context *c, void *userdata) +{ + enum PulseAudioContextState *context_state = userdata; + + switch (pa_context_get_state(c)) { + case PA_CONTEXT_FAILED: + case PA_CONTEXT_TERMINATED: + *context_state = PULSE_CONTEXT_FINISHED; + break; + case PA_CONTEXT_READY: + *context_state = PULSE_CONTEXT_READY; + break; + default: + break; + } +} + +void ff_pulse_audio_disconnect_context(pa_mainloop **pa_ml, pa_context **pa_ctx) +{ + av_assert0(pa_ml); + av_assert0(pa_ctx); + + if (*pa_ctx) { + pa_context_set_state_callback(*pa_ctx, NULL, NULL); + pa_context_disconnect(*pa_ctx); + pa_context_unref(*pa_ctx); + } + if (*pa_ml) + pa_mainloop_free(*pa_ml); + *pa_ml = NULL; + *pa_ctx = NULL; +} + +int ff_pulse_audio_connect_context(pa_mainloop **pa_ml, pa_context **pa_ctx, + const char *server, const char *description) +{ + int ret; + pa_mainloop_api *pa_mlapi = NULL; + enum PulseAudioContextState context_state = PULSE_CONTEXT_INITIALIZING; + + av_assert0(pa_ml); + av_assert0(pa_ctx); + + *pa_ml = NULL; + *pa_ctx = NULL; + + if (!(*pa_ml = pa_mainloop_new())) + return AVERROR(ENOMEM); + if (!(pa_mlapi = pa_mainloop_get_api(*pa_ml))) { + ret = AVERROR_EXTERNAL; + goto fail; + } + if (!(*pa_ctx = pa_context_new(pa_mlapi, description))) { + ret = AVERROR(ENOMEM); + goto fail; + } + pa_context_set_state_callback(*pa_ctx, pa_state_cb, &context_state); + if (pa_context_connect(*pa_ctx, server, 0, NULL) < 0) { + ret = AVERROR_EXTERNAL; + goto fail; + } + + while (context_state == PULSE_CONTEXT_INITIALIZING) + pa_mainloop_iterate(*pa_ml, 1, NULL); + if (context_state == PULSE_CONTEXT_FINISHED) { + ret = AVERROR_EXTERNAL; + goto fail; + } + return 0; + + fail: + ff_pulse_audio_disconnect_context(pa_ml, pa_ctx); + return ret; +} + +static void pulse_add_detected_device(PulseAudioDeviceList *info, + const char *name, const char *description) +{ + int ret; + AVDeviceInfo *new_device = NULL; + + if (info->error_code) + return; + + new_device = av_mallocz(sizeof(AVDeviceInfo)); + if (!new_device) { + info->error_code = AVERROR(ENOMEM); + return; + } + + new_device->device_description = av_strdup(description); + new_device->device_name = av_strdup(name); + + if (!new_device->device_description || !new_device->device_name) { + info->error_code = AVERROR(ENOMEM); + goto fail; + } + + if ((ret = av_dynarray_add_nofree(&info->devices->devices, + &info->devices->nb_devices, new_device)) < 0) { + info->error_code = ret; + goto fail; + } + return; + + fail: + av_free(new_device->device_description); + av_free(new_device->device_name); + av_free(new_device); + +} + +static void pulse_audio_source_device_cb(pa_context *c, const pa_source_info *dev, + int eol, void *userdata) +{ + if (!eol) + pulse_add_detected_device(userdata, dev->name, dev->description); +} + +static void pulse_audio_sink_device_cb(pa_context *c, const pa_sink_info *dev, + int eol, void *userdata) +{ + if (!eol) + pulse_add_detected_device(userdata, dev->name, dev->description); +} + +static void pulse_server_info_cb(pa_context *c, const pa_server_info *i, void *userdata) +{ + PulseAudioDeviceList *info = userdata; + if (info->output) + info->default_device = av_strdup(i->default_sink_name); + else + info->default_device = av_strdup(i->default_source_name); + if (!info->default_device) + info->error_code = AVERROR(ENOMEM); +} + +int ff_pulse_audio_get_devices(AVDeviceInfoList *devices, const char *server, int output) +{ + pa_mainloop *pa_ml = NULL; + pa_operation *pa_op = NULL; + pa_context *pa_ctx = NULL; + enum pa_operation_state op_state; + PulseAudioDeviceList dev_list = { 0 }; + int i; + + dev_list.output = output; + dev_list.devices = devices; + if (!devices) + return AVERROR(EINVAL); + devices->nb_devices = 0; + devices->devices = NULL; + + if ((dev_list.error_code = ff_pulse_audio_connect_context(&pa_ml, &pa_ctx, server, "Query devices")) < 0) + goto fail; + + if (output) + pa_op = pa_context_get_sink_info_list(pa_ctx, pulse_audio_sink_device_cb, &dev_list); + else + pa_op = pa_context_get_source_info_list(pa_ctx, pulse_audio_source_device_cb, &dev_list); + while ((op_state = pa_operation_get_state(pa_op)) == PA_OPERATION_RUNNING) + pa_mainloop_iterate(pa_ml, 1, NULL); + if (op_state != PA_OPERATION_DONE) + dev_list.error_code = AVERROR_EXTERNAL; + pa_operation_unref(pa_op); + if (dev_list.error_code < 0) + goto fail; + + pa_op = pa_context_get_server_info(pa_ctx, pulse_server_info_cb, &dev_list); + while ((op_state = pa_operation_get_state(pa_op)) == PA_OPERATION_RUNNING) + pa_mainloop_iterate(pa_ml, 1, NULL); + if (op_state != PA_OPERATION_DONE) + dev_list.error_code = AVERROR_EXTERNAL; + pa_operation_unref(pa_op); + if (dev_list.error_code < 0) + goto fail; + + devices->default_device = -1; + for (i = 0; i < devices->nb_devices; i++) { + if (!strcmp(devices->devices[i]->device_name, dev_list.default_device)) { + devices->default_device = i; + break; + } + } + + fail: + av_free(dev_list.default_device); + ff_pulse_audio_disconnect_context(&pa_ml, &pa_ctx); + return dev_list.error_code; +} diff --git a/libavdevice/pulse_audio_common.h b/libavdevice/pulse_audio_common.h new file mode 100644 index 0000000..02534f7 --- /dev/null +++ b/libavdevice/pulse_audio_common.h @@ -0,0 +1,38 @@ +/* + * Pulseaudio input + * Copyright (c) 2011 Luca Barbato <lu_zero@gentoo.org> + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVDEVICE_PULSE_AUDIO_COMMON_H +#define AVDEVICE_PULSE_AUDIO_COMMON_H + +#include <pulse/pulseaudio.h> +#include "libavcodec/avcodec.h" +#include "avdevice.h" + +pa_sample_format_t ff_codec_id_to_pulse_format(enum AVCodecID codec_id); + +int ff_pulse_audio_get_devices(AVDeviceInfoList *devices, const char *server, int output); + +int ff_pulse_audio_connect_context(pa_mainloop **pa_ml, pa_context **pa_ctx, + const char *server, const char *description); + +void ff_pulse_audio_disconnect_context(pa_mainloop **pa_ml, pa_context **pa_ctx); + +#endif /* AVDEVICE_PULSE_AUDIO_COMMON_H */ diff --git a/libavdevice/pulse_audio_dec.c b/libavdevice/pulse_audio_dec.c new file mode 100644 index 0000000..316cc4a --- /dev/null +++ b/libavdevice/pulse_audio_dec.c @@ -0,0 +1,373 @@ +/* + * Pulseaudio input + * Copyright (c) 2011 Luca Barbato <lu_zero@gentoo.org> + * Copyright 2004-2006 Lennart Poettering + * Copyright (c) 2014 Michael Niedermayer <michaelni@gmx.at> + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include <pulse/rtclock.h> +#include <pulse/error.h> +#include "libavformat/avformat.h" +#include "libavformat/internal.h" +#include "libavutil/opt.h" +#include "libavutil/time.h" +#include "pulse_audio_common.h" +#include "timefilter.h" + +#define DEFAULT_CODEC_ID AV_NE(AV_CODEC_ID_PCM_S16BE, AV_CODEC_ID_PCM_S16LE) + +typedef struct PulseData { + AVClass *class; + char *server; + char *name; + char *stream_name; + int sample_rate; + int channels; + int frame_size; + int fragment_size; + + pa_threaded_mainloop *mainloop; + pa_context *context; + pa_stream *stream; + + TimeFilter *timefilter; + int last_period; + int wallclock; +} PulseData; + + +#define CHECK_SUCCESS_GOTO(rerror, expression, label) \ + do { \ + if (!(expression)) { \ + rerror = AVERROR_EXTERNAL; \ + goto label; \ + } \ + } while(0); + +#define CHECK_DEAD_GOTO(p, rerror, label) \ + do { \ + if (!(p)->context || !PA_CONTEXT_IS_GOOD(pa_context_get_state((p)->context)) || \ + !(p)->stream || !PA_STREAM_IS_GOOD(pa_stream_get_state((p)->stream))) { \ + rerror = AVERROR_EXTERNAL; \ + goto label; \ + } \ + } while(0); + +static void context_state_cb(pa_context *c, void *userdata) { + PulseData *p = userdata; + + switch (pa_context_get_state(c)) { + case PA_CONTEXT_READY: + case PA_CONTEXT_TERMINATED: + case PA_CONTEXT_FAILED: + pa_threaded_mainloop_signal(p->mainloop, 0); + break; + } +} + +static void stream_state_cb(pa_stream *s, void * userdata) { + PulseData *p = userdata; + + switch (pa_stream_get_state(s)) { + case PA_STREAM_READY: + case PA_STREAM_FAILED: + case PA_STREAM_TERMINATED: + pa_threaded_mainloop_signal(p->mainloop, 0); + break; + } +} + +static void stream_request_cb(pa_stream *s, size_t length, void *userdata) { + PulseData *p = userdata; + + pa_threaded_mainloop_signal(p->mainloop, 0); +} + +static void stream_latency_update_cb(pa_stream *s, void *userdata) { + PulseData *p = userdata; + + pa_threaded_mainloop_signal(p->mainloop, 0); +} + +static av_cold int pulse_close(AVFormatContext *s) +{ + PulseData *pd = s->priv_data; + + if (pd->mainloop) + pa_threaded_mainloop_stop(pd->mainloop); + + if (pd->stream) + pa_stream_unref(pd->stream); + pd->stream = NULL; + + if (pd->context) { + pa_context_disconnect(pd->context); + pa_context_unref(pd->context); + } + pd->context = NULL; + + if (pd->mainloop) + pa_threaded_mainloop_free(pd->mainloop); + pd->mainloop = NULL; + + ff_timefilter_destroy(pd->timefilter); + pd->timefilter = NULL; + + return 0; +} + +static av_cold int pulse_read_header(AVFormatContext *s) +{ + PulseData *pd = s->priv_data; + AVStream *st; + char *device = NULL; + int ret; + enum AVCodecID codec_id = + s->audio_codec_id == AV_CODEC_ID_NONE ? DEFAULT_CODEC_ID : s->audio_codec_id; + const pa_sample_spec ss = { ff_codec_id_to_pulse_format(codec_id), + pd->sample_rate, + pd->channels }; + + pa_buffer_attr attr = { -1 }; + + st = avformat_new_stream(s, NULL); + + if (!st) { + av_log(s, AV_LOG_ERROR, "Cannot add stream\n"); + return AVERROR(ENOMEM); + } + + attr.fragsize = pd->fragment_size; + + if (s->filename[0] != '\0' && strcmp(s->filename, "default")) + device = s->filename; + + if (!(pd->mainloop = pa_threaded_mainloop_new())) { + pulse_close(s); + return AVERROR_EXTERNAL; + } + + if (!(pd->context = pa_context_new(pa_threaded_mainloop_get_api(pd->mainloop), pd->name))) { + pulse_close(s); + return AVERROR_EXTERNAL; + } + + pa_context_set_state_callback(pd->context, context_state_cb, pd); + + if (pa_context_connect(pd->context, pd->server, 0, NULL) < 0) { + pulse_close(s); + return AVERROR(pa_context_errno(pd->context)); + } + + pa_threaded_mainloop_lock(pd->mainloop); + + if (pa_threaded_mainloop_start(pd->mainloop) < 0) { + ret = -1; + goto unlock_and_fail; + } + + for (;;) { + pa_context_state_t state; + + state = pa_context_get_state(pd->context); + + if (state == PA_CONTEXT_READY) + break; + + if (!PA_CONTEXT_IS_GOOD(state)) { + ret = AVERROR(pa_context_errno(pd->context)); + goto unlock_and_fail; + } + + /* Wait until the context is ready */ + pa_threaded_mainloop_wait(pd->mainloop); + } + + if (!(pd->stream = pa_stream_new(pd->context, pd->stream_name, &ss, NULL))) { + ret = AVERROR(pa_context_errno(pd->context)); + goto unlock_and_fail; + } + + pa_stream_set_state_callback(pd->stream, stream_state_cb, pd); + pa_stream_set_read_callback(pd->stream, stream_request_cb, pd); + pa_stream_set_write_callback(pd->stream, stream_request_cb, pd); + pa_stream_set_latency_update_callback(pd->stream, stream_latency_update_cb, pd); + + ret = pa_stream_connect_record(pd->stream, device, &attr, + PA_STREAM_INTERPOLATE_TIMING + |PA_STREAM_ADJUST_LATENCY + |PA_STREAM_AUTO_TIMING_UPDATE); + + if (ret < 0) { + ret = AVERROR(pa_context_errno(pd->context)); + goto unlock_and_fail; + } + + for (;;) { + pa_stream_state_t state; + + state = pa_stream_get_state(pd->stream); + + if (state == PA_STREAM_READY) + break; + + if (!PA_STREAM_IS_GOOD(state)) { + ret = AVERROR(pa_context_errno(pd->context)); + goto unlock_and_fail; + } + + /* Wait until the stream is ready */ + pa_threaded_mainloop_wait(pd->mainloop); + } + + pa_threaded_mainloop_unlock(pd->mainloop); + + /* take real parameters */ + st->codec->codec_type = AVMEDIA_TYPE_AUDIO; + st->codec->codec_id = codec_id; + st->codec->sample_rate = pd->sample_rate; + st->codec->channels = pd->channels; + avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */ + + pd->timefilter = ff_timefilter_new(1000000.0 / pd->sample_rate, + 1000, 1.5E-6); + + if (!pd->timefilter) { + pulse_close(s); + return AVERROR(ENOMEM); + } + + return 0; + +unlock_and_fail: + pa_threaded_mainloop_unlock(pd->mainloop); + + pulse_close(s); + return ret; +} + +static int pulse_read_packet(AVFormatContext *s, AVPacket *pkt) +{ + PulseData *pd = s->priv_data; + int ret; + size_t read_length; + const void *read_data = NULL; + int64_t dts; + pa_usec_t latency; + int negative; + + pa_threaded_mainloop_lock(pd->mainloop); + + CHECK_DEAD_GOTO(pd, ret, unlock_and_fail); + + while (!read_data) { + int r; + + r = pa_stream_peek(pd->stream, &read_data, &read_length); + CHECK_SUCCESS_GOTO(ret, r == 0, unlock_and_fail); + + if (read_length <= 0) { + pa_threaded_mainloop_wait(pd->mainloop); + CHECK_DEAD_GOTO(pd, ret, unlock_and_fail); + } else if (!read_data) { + /* There's a hole in the stream, skip it. We could generate + * silence, but that wouldn't work for compressed streams. */ + r = pa_stream_drop(pd->stream); + CHECK_SUCCESS_GOTO(ret, r == 0, unlock_and_fail); + } + } + + if (av_new_packet(pkt, read_length) < 0) { + ret = AVERROR(ENOMEM); + goto unlock_and_fail; + } + + dts = av_gettime(); + pa_operation_unref(pa_stream_update_timing_info(pd->stream, NULL, NULL)); + + if (pa_stream_get_latency(pd->stream, &latency, &negative) >= 0) { + enum AVCodecID codec_id = + s->audio_codec_id == AV_CODEC_ID_NONE ? DEFAULT_CODEC_ID : s->audio_codec_id; + int frame_size = ((av_get_bits_per_sample(codec_id) >> 3) * pd->channels); + int frame_duration = read_length / frame_size; + + + if (negative) { + dts += latency; + } else + dts -= latency; + if (pd->wallclock) + pkt->pts = ff_timefilter_update(pd->timefilter, dts, pd->last_period); + + pd->last_period = frame_duration; + } else { + av_log(s, AV_LOG_WARNING, "pa_stream_get_latency() failed\n"); + } + + memcpy(pkt->data, read_data, read_length); + pa_stream_drop(pd->stream); + + pa_threaded_mainloop_unlock(pd->mainloop); + return 0; + +unlock_and_fail: + pa_threaded_mainloop_unlock(pd->mainloop); + return ret; +} + +static int pulse_get_device_list(AVFormatContext *h, AVDeviceInfoList *device_list) +{ + PulseData *s = h->priv_data; + return ff_pulse_audio_get_devices(device_list, s->server, 0); +} + +#define OFFSET(a) offsetof(PulseData, a) +#define D AV_OPT_FLAG_DECODING_PARAM + +static const AVOption options[] = { + { "server", "set PulseAudio server", OFFSET(server), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, D }, + { "name", "set application name", OFFSET(name), AV_OPT_TYPE_STRING, {.str = LIBAVFORMAT_IDENT}, 0, 0, D }, + { "stream_name", "set stream description", OFFSET(stream_name), AV_OPT_TYPE_STRING, {.str = "record"}, 0, 0, D }, + { "sample_rate", "set sample rate in Hz", OFFSET(sample_rate), AV_OPT_TYPE_INT, {.i64 = 48000}, 1, INT_MAX, D }, + { "channels", "set number of audio channels", OFFSET(channels), AV_OPT_TYPE_INT, {.i64 = 2}, 1, INT_MAX, D }, + { "frame_size", "set number of bytes per frame", OFFSET(frame_size), AV_OPT_TYPE_INT, {.i64 = 1024}, 1, INT_MAX, D }, + { "fragment_size", "set buffering size, affects latency and cpu usage", OFFSET(fragment_size), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, D }, + { "wallclock", "set the initial pts using the current time", OFFSET(wallclock), AV_OPT_TYPE_INT, {.i64 = 1}, -1, 1, D }, + { NULL }, +}; + +static const AVClass pulse_demuxer_class = { + .class_name = "Pulse demuxer", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT, +}; + +AVInputFormat ff_pulse_demuxer = { + .name = "pulse", + .long_name = NULL_IF_CONFIG_SMALL("Pulse audio input"), + .priv_data_size = sizeof(PulseData), + .read_header = pulse_read_header, + .read_packet = pulse_read_packet, + .read_close = pulse_close, + .get_device_list = pulse_get_device_list, + .flags = AVFMT_NOFILE, + .priv_class = &pulse_demuxer_class, +}; diff --git a/libavdevice/pulse_audio_enc.c b/libavdevice/pulse_audio_enc.c new file mode 100644 index 0000000..bc4d1f0 --- /dev/null +++ b/libavdevice/pulse_audio_enc.c @@ -0,0 +1,796 @@ +/* + * Copyright (c) 2013 Lukasz Marek <lukasz.m.luki@gmail.com> + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include <math.h> +#include <pulse/pulseaudio.h> +#include <pulse/error.h> +#include "libavformat/avformat.h" +#include "libavformat/internal.h" +#include "libavutil/opt.h" +#include "libavutil/time.h" +#include "libavutil/log.h" +#include "libavutil/attributes.h" +#include "pulse_audio_common.h" + +typedef struct PulseData { + AVClass *class; + const char *server; + const char *name; + const char *stream_name; + const char *device; + int64_t timestamp; + int buffer_size; /**< Buffer size in bytes */ + int buffer_duration; /**< Buffer size in ms, recalculated to buffer_size */ + int prebuf; + int minreq; + int last_result; + pa_threaded_mainloop *mainloop; + pa_context *ctx; + pa_stream *stream; + int nonblocking; + int mute; + pa_volume_t base_volume; + pa_volume_t last_volume; +} PulseData; + +static void pulse_audio_sink_device_cb(pa_context *ctx, const pa_sink_info *dev, + int eol, void *userdata) +{ + PulseData *s = userdata; + + if (s->ctx != ctx) + return; + + if (eol) { + pa_threaded_mainloop_signal(s->mainloop, 0); + } else { + if (dev->flags & PA_SINK_FLAT_VOLUME) + s->base_volume = dev->base_volume; + else + s->base_volume = PA_VOLUME_NORM; + av_log(s, AV_LOG_DEBUG, "base volume: %u\n", s->base_volume); + } +} + +/* Mainloop must be locked before calling this function as it uses pa_threaded_mainloop_wait. */ +static int pulse_update_sink_info(AVFormatContext *h) +{ + PulseData *s = h->priv_data; + pa_operation *op; + if (!(op = pa_context_get_sink_info_by_name(s->ctx, s->device, + pulse_audio_sink_device_cb, s))) { + av_log(s, AV_LOG_ERROR, "pa_context_get_sink_info_by_name failed.\n"); + return AVERROR_EXTERNAL; + } + while (pa_operation_get_state(op) == PA_OPERATION_RUNNING) + pa_threaded_mainloop_wait(s->mainloop); + pa_operation_unref(op); + return 0; +} + +static void pulse_audio_sink_input_cb(pa_context *ctx, const pa_sink_input_info *i, + int eol, void *userdata) +{ + AVFormatContext *h = userdata; + PulseData *s = h->priv_data; + + if (s->ctx != ctx) + return; + + if (!eol) { + double val; + pa_volume_t vol = pa_cvolume_avg(&i->volume); + if (s->mute < 0 || (s->mute && !i->mute) || (!s->mute && i->mute)) { + s->mute = i->mute; + avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_MUTE_STATE_CHANGED, &s->mute, sizeof(s->mute)); + } + + vol = pa_sw_volume_divide(vol, s->base_volume); + if (s->last_volume != vol) { + val = (double)vol / PA_VOLUME_NORM; + avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_VOLUME_LEVEL_CHANGED, &val, sizeof(val)); + s->last_volume = vol; + } + } +} + +/* This function creates new loop so may be called from PA callbacks. + Mainloop must be locked before calling this function as it operates on streams. */ +static int pulse_update_sink_input_info(AVFormatContext *h) +{ + PulseData *s = h->priv_data; + pa_operation *op; + enum pa_operation_state op_state; + pa_mainloop *ml = NULL; + pa_context *ctx = NULL; + int ret = 0; + + if ((ret = ff_pulse_audio_connect_context(&ml, &ctx, s->server, "Update sink input information")) < 0) + return ret; + + if (!(op = pa_context_get_sink_input_info(ctx, pa_stream_get_index(s->stream), + pulse_audio_sink_input_cb, h))) { + ret = AVERROR_EXTERNAL; + goto fail; + } + + while ((op_state = pa_operation_get_state(op)) == PA_OPERATION_RUNNING) + pa_mainloop_iterate(ml, 1, NULL); + pa_operation_unref(op); + if (op_state != PA_OPERATION_DONE) { + ret = AVERROR_EXTERNAL; + goto fail; + } + + fail: + ff_pulse_audio_disconnect_context(&ml, &ctx); + if (ret) + av_log(s, AV_LOG_ERROR, "pa_context_get_sink_input_info failed.\n"); + return ret; +} + +static void pulse_event(pa_context *ctx, pa_subscription_event_type_t t, + uint32_t idx, void *userdata) +{ + AVFormatContext *h = userdata; + PulseData *s = h->priv_data; + + if (s->ctx != ctx) + return; + + if ((t & PA_SUBSCRIPTION_EVENT_FACILITY_MASK) == PA_SUBSCRIPTION_EVENT_SINK_INPUT) { + if ((t & PA_SUBSCRIPTION_EVENT_TYPE_MASK) == PA_SUBSCRIPTION_EVENT_CHANGE) + // Calling from mainloop callback. No need to lock mainloop. + pulse_update_sink_input_info(h); + } +} + +static void pulse_stream_writable(pa_stream *stream, size_t nbytes, void *userdata) +{ + AVFormatContext *h = userdata; + PulseData *s = h->priv_data; + int64_t val = nbytes; + + if (stream != s->stream) + return; + + avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_BUFFER_WRITABLE, &val, sizeof(val)); + pa_threaded_mainloop_signal(s->mainloop, 0); +} + +static void pulse_overflow(pa_stream *stream, void *userdata) +{ + AVFormatContext *h = userdata; + avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_BUFFER_OVERFLOW, NULL, 0); +} + +static void pulse_underflow(pa_stream *stream, void *userdata) +{ + AVFormatContext *h = userdata; + avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_BUFFER_UNDERFLOW, NULL, 0); +} + +static void pulse_stream_state(pa_stream *stream, void *userdata) +{ + PulseData *s = userdata; + + if (stream != s->stream) + return; + + switch (pa_stream_get_state(s->stream)) { + case PA_STREAM_READY: + case PA_STREAM_FAILED: + case PA_STREAM_TERMINATED: + pa_threaded_mainloop_signal(s->mainloop, 0); + default: + break; + } +} + +static int pulse_stream_wait(PulseData *s) +{ + pa_stream_state_t state; + + while ((state = pa_stream_get_state(s->stream)) != PA_STREAM_READY) { + if (state == PA_STREAM_FAILED || state == PA_STREAM_TERMINATED) + return AVERROR_EXTERNAL; + pa_threaded_mainloop_wait(s->mainloop); + } + return 0; +} + +static void pulse_context_state(pa_context *ctx, void *userdata) +{ + PulseData *s = userdata; + + if (s->ctx != ctx) + return; + + switch (pa_context_get_state(ctx)) { + case PA_CONTEXT_READY: + case PA_CONTEXT_FAILED: + case PA_CONTEXT_TERMINATED: + pa_threaded_mainloop_signal(s->mainloop, 0); + default: + break; + } +} + +static int pulse_context_wait(PulseData *s) +{ + pa_context_state_t state; + + while ((state = pa_context_get_state(s->ctx)) != PA_CONTEXT_READY) { + if (state == PA_CONTEXT_FAILED || state == PA_CONTEXT_TERMINATED) + return AVERROR_EXTERNAL; + pa_threaded_mainloop_wait(s->mainloop); + } + return 0; +} + +static void pulse_stream_result(pa_stream *stream, int success, void *userdata) +{ + PulseData *s = userdata; + + if (stream != s->stream) + return; + + s->last_result = success ? 0 : AVERROR_EXTERNAL; + pa_threaded_mainloop_signal(s->mainloop, 0); +} + +static int pulse_finish_stream_operation(PulseData *s, pa_operation *op, const char *name) +{ + if (!op) { + pa_threaded_mainloop_unlock(s->mainloop); + av_log(s, AV_LOG_ERROR, "%s failed.\n", name); + return AVERROR_EXTERNAL; + } + s->last_result = 2; + while (s->last_result == 2) + pa_threaded_mainloop_wait(s->mainloop); + pa_operation_unref(op); + pa_threaded_mainloop_unlock(s->mainloop); + if (s->last_result != 0) + av_log(s, AV_LOG_ERROR, "%s failed.\n", name); + return s->last_result; +} + +static int pulse_set_pause(PulseData *s, int pause) +{ + pa_operation *op; + pa_threaded_mainloop_lock(s->mainloop); + op = pa_stream_cork(s->stream, pause, pulse_stream_result, s); + return pulse_finish_stream_operation(s, op, "pa_stream_cork"); +} + +static int pulse_flash_stream(PulseData *s) +{ + pa_operation *op; + pa_threaded_mainloop_lock(s->mainloop); + op = pa_stream_flush(s->stream, pulse_stream_result, s); + return pulse_finish_stream_operation(s, op, "pa_stream_flush"); +} + +static void pulse_context_result(pa_context *ctx, int success, void *userdata) +{ + PulseData *s = userdata; + + if (s->ctx != ctx) + return; + + s->last_result = success ? 0 : AVERROR_EXTERNAL; + pa_threaded_mainloop_signal(s->mainloop, 0); +} + +static int pulse_finish_context_operation(PulseData *s, pa_operation *op, const char *name) +{ + if (!op) { + pa_threaded_mainloop_unlock(s->mainloop); + av_log(s, AV_LOG_ERROR, "%s failed.\n", name); + return AVERROR_EXTERNAL; + } + s->last_result = 2; + while (s->last_result == 2) + pa_threaded_mainloop_wait(s->mainloop); + pa_operation_unref(op); + pa_threaded_mainloop_unlock(s->mainloop); + if (s->last_result != 0) + av_log(s, AV_LOG_ERROR, "%s failed.\n", name); + return s->last_result; +} + +static int pulse_set_mute(PulseData *s) +{ + pa_operation *op; + pa_threaded_mainloop_lock(s->mainloop); + op = pa_context_set_sink_input_mute(s->ctx, pa_stream_get_index(s->stream), + s->mute, pulse_context_result, s); + return pulse_finish_context_operation(s, op, "pa_context_set_sink_input_mute"); +} + +static int pulse_set_volume(PulseData *s, double volume) +{ + pa_operation *op; + pa_cvolume cvol; + pa_volume_t vol; + const pa_sample_spec *ss = pa_stream_get_sample_spec(s->stream); + + vol = pa_sw_volume_multiply(lround(volume * PA_VOLUME_NORM), s->base_volume); + pa_cvolume_set(&cvol, ss->channels, PA_VOLUME_NORM); + pa_sw_cvolume_multiply_scalar(&cvol, &cvol, vol); + pa_threaded_mainloop_lock(s->mainloop); + op = pa_context_set_sink_input_volume(s->ctx, pa_stream_get_index(s->stream), + &cvol, pulse_context_result, s); + return pulse_finish_context_operation(s, op, "pa_context_set_sink_input_volume"); +} + +static int pulse_subscribe_events(PulseData *s) +{ + pa_operation *op; + + pa_threaded_mainloop_lock(s->mainloop); + op = pa_context_subscribe(s->ctx, PA_SUBSCRIPTION_MASK_SINK_INPUT, pulse_context_result, s); + return pulse_finish_context_operation(s, op, "pa_context_subscribe"); +} + +static void pulse_map_channels_to_pulse(int64_t channel_layout, pa_channel_map *channel_map) +{ + channel_map->channels = 0; + if (channel_layout & AV_CH_FRONT_LEFT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_FRONT_LEFT; + if (channel_layout & AV_CH_FRONT_RIGHT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_FRONT_RIGHT; + if (channel_layout & AV_CH_FRONT_CENTER) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_FRONT_CENTER; + if (channel_layout & AV_CH_LOW_FREQUENCY) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_LFE; + if (channel_layout & AV_CH_BACK_LEFT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_REAR_LEFT; + if (channel_layout & AV_CH_BACK_RIGHT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_REAR_RIGHT; + if (channel_layout & AV_CH_FRONT_LEFT_OF_CENTER) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER; + if (channel_layout & AV_CH_FRONT_RIGHT_OF_CENTER) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER; + if (channel_layout & AV_CH_BACK_CENTER) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_REAR_CENTER; + if (channel_layout & AV_CH_SIDE_LEFT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_SIDE_LEFT; + if (channel_layout & AV_CH_SIDE_RIGHT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_SIDE_RIGHT; + if (channel_layout & AV_CH_TOP_CENTER) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_TOP_CENTER; + if (channel_layout & AV_CH_TOP_FRONT_LEFT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_TOP_FRONT_LEFT; + if (channel_layout & AV_CH_TOP_FRONT_CENTER) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_TOP_FRONT_CENTER; + if (channel_layout & AV_CH_TOP_FRONT_RIGHT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_TOP_FRONT_RIGHT; + if (channel_layout & AV_CH_TOP_BACK_LEFT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_TOP_REAR_LEFT; + if (channel_layout & AV_CH_TOP_BACK_CENTER) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_TOP_REAR_CENTER; + if (channel_layout & AV_CH_TOP_BACK_RIGHT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_TOP_REAR_RIGHT; + if (channel_layout & AV_CH_STEREO_LEFT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_FRONT_LEFT; + if (channel_layout & AV_CH_STEREO_RIGHT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_FRONT_RIGHT; + if (channel_layout & AV_CH_WIDE_LEFT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_AUX0; + if (channel_layout & AV_CH_WIDE_RIGHT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_AUX1; + if (channel_layout & AV_CH_SURROUND_DIRECT_LEFT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_AUX2; + if (channel_layout & AV_CH_SURROUND_DIRECT_RIGHT) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_AUX3; + if (channel_layout & AV_CH_LOW_FREQUENCY_2) + channel_map->map[channel_map->channels++] = PA_CHANNEL_POSITION_LFE; +} + +static av_cold int pulse_write_trailer(AVFormatContext *h) +{ + PulseData *s = h->priv_data; + + if (s->mainloop) { + pa_threaded_mainloop_lock(s->mainloop); + if (s->stream) { + pa_stream_disconnect(s->stream); + pa_stream_set_state_callback(s->stream, NULL, NULL); + pa_stream_set_write_callback(s->stream, NULL, NULL); + pa_stream_set_overflow_callback(s->stream, NULL, NULL); + pa_stream_set_underflow_callback(s->stream, NULL, NULL); + pa_stream_unref(s->stream); + s->stream = NULL; + } + if (s->ctx) { + pa_context_disconnect(s->ctx); + pa_context_set_state_callback(s->ctx, NULL, NULL); + pa_context_set_subscribe_callback(s->ctx, NULL, NULL); + pa_context_unref(s->ctx); + s->ctx = NULL; + } + pa_threaded_mainloop_unlock(s->mainloop); + pa_threaded_mainloop_stop(s->mainloop); + pa_threaded_mainloop_free(s->mainloop); + s->mainloop = NULL; + } + + return 0; +} + +static av_cold int pulse_write_header(AVFormatContext *h) +{ + PulseData *s = h->priv_data; + AVStream *st = NULL; + int ret; + pa_sample_spec sample_spec; + pa_buffer_attr buffer_attributes = { -1, -1, -1, -1, -1 }; + pa_channel_map channel_map; + pa_mainloop_api *mainloop_api; + const char *stream_name = s->stream_name; + static const pa_stream_flags_t stream_flags = PA_STREAM_INTERPOLATE_TIMING | + PA_STREAM_AUTO_TIMING_UPDATE | + PA_STREAM_NOT_MONOTONIC; + + if (h->nb_streams != 1 || h->streams[0]->codec->codec_type != AVMEDIA_TYPE_AUDIO) { + av_log(s, AV_LOG_ERROR, "Only a single audio stream is supported.\n"); + return AVERROR(EINVAL); + } + st = h->streams[0]; + + if (!stream_name) { + if (h->filename[0]) + stream_name = h->filename; + else + stream_name = "Playback"; + } + s->nonblocking = (h->flags & AVFMT_FLAG_NONBLOCK); + + if (s->buffer_duration) { + int64_t bytes = s->buffer_duration; + bytes *= st->codec->channels * st->codec->sample_rate * + av_get_bytes_per_sample(st->codec->sample_fmt); + bytes /= 1000; + buffer_attributes.tlength = FFMAX(s->buffer_size, av_clip64(bytes, 0, UINT32_MAX - 1)); + av_log(s, AV_LOG_DEBUG, + "Buffer duration: %ums recalculated into %"PRId64" bytes buffer.\n", + s->buffer_duration, bytes); + av_log(s, AV_LOG_DEBUG, "Real buffer length is %u bytes\n", buffer_attributes.tlength); + } else if (s->buffer_size) + buffer_attributes.tlength = s->buffer_size; + if (s->prebuf) + buffer_attributes.prebuf = s->prebuf; + if (s->minreq) + buffer_attributes.minreq = s->minreq; + + sample_spec.format = ff_codec_id_to_pulse_format(st->codec->codec_id); + sample_spec.rate = st->codec->sample_rate; + sample_spec.channels = st->codec->channels; + if (!pa_sample_spec_valid(&sample_spec)) { + av_log(s, AV_LOG_ERROR, "Invalid sample spec.\n"); + return AVERROR(EINVAL); + } + + if (sample_spec.channels == 1) { + channel_map.channels = 1; + channel_map.map[0] = PA_CHANNEL_POSITION_MONO; + } else if (st->codec->channel_layout) { + if (av_get_channel_layout_nb_channels(st->codec->channel_layout) != st->codec->channels) + return AVERROR(EINVAL); + pulse_map_channels_to_pulse(st->codec->channel_layout, &channel_map); + /* Unknown channel is present in channel_layout, let PulseAudio use its default. */ + if (channel_map.channels != sample_spec.channels) { + av_log(s, AV_LOG_WARNING, "Unknown channel. Using defaul channel map.\n"); + channel_map.channels = 0; + } + } else + channel_map.channels = 0; + + if (!channel_map.channels) + av_log(s, AV_LOG_WARNING, "Using PulseAudio's default channel map.\n"); + else if (!pa_channel_map_valid(&channel_map)) { + av_log(s, AV_LOG_ERROR, "Invalid channel map.\n"); + return AVERROR(EINVAL); + } + + /* start main loop */ + s->mainloop = pa_threaded_mainloop_new(); + if (!s->mainloop) { + av_log(s, AV_LOG_ERROR, "Cannot create threaded mainloop.\n"); + return AVERROR(ENOMEM); + } + if ((ret = pa_threaded_mainloop_start(s->mainloop)) < 0) { + av_log(s, AV_LOG_ERROR, "Cannot start threaded mainloop: %s.\n", pa_strerror(ret)); + pa_threaded_mainloop_free(s->mainloop); + s->mainloop = NULL; + return AVERROR_EXTERNAL; + } + + pa_threaded_mainloop_lock(s->mainloop); + + mainloop_api = pa_threaded_mainloop_get_api(s->mainloop); + if (!mainloop_api) { + av_log(s, AV_LOG_ERROR, "Cannot get mainloop API.\n"); + ret = AVERROR_EXTERNAL; + goto fail; + } + + s->ctx = pa_context_new(mainloop_api, s->name); + if (!s->ctx) { + av_log(s, AV_LOG_ERROR, "Cannot create context.\n"); + ret = AVERROR(ENOMEM); + goto fail; + } + pa_context_set_state_callback(s->ctx, pulse_context_state, s); + pa_context_set_subscribe_callback(s->ctx, pulse_event, h); + + if ((ret = pa_context_connect(s->ctx, s->server, 0, NULL)) < 0) { + av_log(s, AV_LOG_ERROR, "Cannot connect context: %s.\n", pa_strerror(ret)); + ret = AVERROR_EXTERNAL; + goto fail; + } + + if ((ret = pulse_context_wait(s)) < 0) { + av_log(s, AV_LOG_ERROR, "Context failed.\n"); + goto fail; + } + + s->stream = pa_stream_new(s->ctx, stream_name, &sample_spec, + channel_map.channels ? &channel_map : NULL); + + if ((ret = pulse_update_sink_info(h)) < 0) { + av_log(s, AV_LOG_ERROR, "Updating sink info failed.\n"); + goto fail; + } + + if (!s->stream) { + av_log(s, AV_LOG_ERROR, "Cannot create stream.\n"); + ret = AVERROR(ENOMEM); + goto fail; + } + pa_stream_set_state_callback(s->stream, pulse_stream_state, s); + pa_stream_set_write_callback(s->stream, pulse_stream_writable, h); + pa_stream_set_overflow_callback(s->stream, pulse_overflow, h); + pa_stream_set_underflow_callback(s->stream, pulse_underflow, h); + + if ((ret = pa_stream_connect_playback(s->stream, s->device, &buffer_attributes, + stream_flags, NULL, NULL)) < 0) { + av_log(s, AV_LOG_ERROR, "pa_stream_connect_playback failed: %s.\n", pa_strerror(ret)); + ret = AVERROR_EXTERNAL; + goto fail; + } + + if ((ret = pulse_stream_wait(s)) < 0) { + av_log(s, AV_LOG_ERROR, "Stream failed.\n"); + goto fail; + } + + /* read back buffer attributes for future use */ + buffer_attributes = *pa_stream_get_buffer_attr(s->stream); + s->buffer_size = buffer_attributes.tlength; + s->prebuf = buffer_attributes.prebuf; + s->minreq = buffer_attributes.minreq; + av_log(s, AV_LOG_DEBUG, "Real buffer attributes: size: %d, prebuf: %d, minreq: %d\n", + s->buffer_size, s->prebuf, s->minreq); + + pa_threaded_mainloop_unlock(s->mainloop); + + if ((ret = pulse_subscribe_events(s)) < 0) { + av_log(s, AV_LOG_ERROR, "Event subscription failed.\n"); + /* a bit ugly but the simplest to lock here*/ + pa_threaded_mainloop_lock(s->mainloop); + goto fail; + } + + /* force control messages */ + s->mute = -1; + s->last_volume = PA_VOLUME_INVALID; + pa_threaded_mainloop_lock(s->mainloop); + if ((ret = pulse_update_sink_input_info(h)) < 0) { + av_log(s, AV_LOG_ERROR, "Updating sink input info failed.\n"); + goto fail; + } + pa_threaded_mainloop_unlock(s->mainloop); + + avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */ + + return 0; + fail: + pa_threaded_mainloop_unlock(s->mainloop); + pulse_write_trailer(h); + return ret; +} + +static int pulse_write_packet(AVFormatContext *h, AVPacket *pkt) +{ + PulseData *s = h->priv_data; + int ret; + int64_t writable_size; + + if (!pkt) + return pulse_flash_stream(s); + + if (pkt->dts != AV_NOPTS_VALUE) + s->timestamp = pkt->dts; + + if (pkt->duration) { + s->timestamp += pkt->duration; + } else { + AVStream *st = h->streams[0]; + AVCodecContext *codec_ctx = st->codec; + AVRational r = { 1, codec_ctx->sample_rate }; + int64_t samples = pkt->size / (av_get_bytes_per_sample(codec_ctx->sample_fmt) * codec_ctx->channels); + s->timestamp += av_rescale_q(samples, r, st->time_base); + } + + pa_threaded_mainloop_lock(s->mainloop); + if (!PA_STREAM_IS_GOOD(pa_stream_get_state(s->stream))) { + av_log(s, AV_LOG_ERROR, "PulseAudio stream is in invalid state.\n"); + goto fail; + } + while (pa_stream_writable_size(s->stream) < s->minreq) { + if (s->nonblocking) { + pa_threaded_mainloop_unlock(s->mainloop); + return AVERROR(EAGAIN); + } else + pa_threaded_mainloop_wait(s->mainloop); + } + + if ((ret = pa_stream_write(s->stream, pkt->data, pkt->size, NULL, 0, PA_SEEK_RELATIVE)) < 0) { + av_log(s, AV_LOG_ERROR, "pa_stream_write failed: %s\n", pa_strerror(ret)); + goto fail; + } + if ((writable_size = pa_stream_writable_size(s->stream)) >= s->minreq) + avdevice_dev_to_app_control_message(h, AV_DEV_TO_APP_BUFFER_WRITABLE, &writable_size, sizeof(writable_size)); + + pa_threaded_mainloop_unlock(s->mainloop); + + return 0; + fail: + pa_threaded_mainloop_unlock(s->mainloop); + return AVERROR_EXTERNAL; +} + +static int pulse_write_frame(AVFormatContext *h, int stream_index, + AVFrame **frame, unsigned flags) +{ + AVPacket pkt; + + /* Planar formats are not supported yet. */ + if (flags & AV_WRITE_UNCODED_FRAME_QUERY) + return av_sample_fmt_is_planar(h->streams[stream_index]->codec->sample_fmt) ? + AVERROR(EINVAL) : 0; + + pkt.data = (*frame)->data[0]; + pkt.size = (*frame)->nb_samples * av_get_bytes_per_sample((*frame)->format) * av_frame_get_channels(*frame); + pkt.dts = (*frame)->pkt_dts; + pkt.duration = av_frame_get_pkt_duration(*frame); + return pulse_write_packet(h, &pkt); +} + + +static void pulse_get_output_timestamp(AVFormatContext *h, int stream, int64_t *dts, int64_t *wall) +{ + PulseData *s = h->priv_data; + pa_usec_t latency; + int neg; + pa_threaded_mainloop_lock(s->mainloop); + pa_stream_get_latency(s->stream, &latency, &neg); + pa_threaded_mainloop_unlock(s->mainloop); + if (wall) + *wall = av_gettime(); + if (dts) + *dts = s->timestamp - (neg ? -latency : latency); +} + +static int pulse_get_device_list(AVFormatContext *h, AVDeviceInfoList *device_list) +{ + PulseData *s = h->priv_data; + return ff_pulse_audio_get_devices(device_list, s->server, 1); +} + +static int pulse_control_message(AVFormatContext *h, int type, + void *data, size_t data_size) +{ + PulseData *s = h->priv_data; + int ret; + + switch(type) { + case AV_APP_TO_DEV_PAUSE: + return pulse_set_pause(s, 1); + case AV_APP_TO_DEV_PLAY: + return pulse_set_pause(s, 0); + case AV_APP_TO_DEV_TOGGLE_PAUSE: + return pulse_set_pause(s, !pa_stream_is_corked(s->stream)); + case AV_APP_TO_DEV_MUTE: + if (!s->mute) { + s->mute = 1; + return pulse_set_mute(s); + } + return 0; + case AV_APP_TO_DEV_UNMUTE: + if (s->mute) { + s->mute = 0; + return pulse_set_mute(s); + } + return 0; + case AV_APP_TO_DEV_TOGGLE_MUTE: + s->mute = !s->mute; + return pulse_set_mute(s); + case AV_APP_TO_DEV_SET_VOLUME: + return pulse_set_volume(s, *(double *)data); + case AV_APP_TO_DEV_GET_VOLUME: + s->last_volume = PA_VOLUME_INVALID; + pa_threaded_mainloop_lock(s->mainloop); + ret = pulse_update_sink_input_info(h); + pa_threaded_mainloop_unlock(s->mainloop); + return ret; + case AV_APP_TO_DEV_GET_MUTE: + s->mute = -1; + pa_threaded_mainloop_lock(s->mainloop); + ret = pulse_update_sink_input_info(h); + pa_threaded_mainloop_unlock(s->mainloop); + return ret; + default: + break; + } + return AVERROR(ENOSYS); +} + +#define OFFSET(a) offsetof(PulseData, a) +#define E AV_OPT_FLAG_ENCODING_PARAM +static const AVOption options[] = { + { "server", "set PulseAudio server", OFFSET(server), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, E }, + { "name", "set application name", OFFSET(name), AV_OPT_TYPE_STRING, {.str = LIBAVFORMAT_IDENT}, 0, 0, E }, + { "stream_name", "set stream description", OFFSET(stream_name), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, E }, + { "device", "set device name", OFFSET(device), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, E }, + { "buffer_size", "set buffer size in bytes", OFFSET(buffer_size), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, E }, + { "buffer_duration", "set buffer duration in millisecs", OFFSET(buffer_duration), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, E }, + { "prebuf", "set pre-buffering size", OFFSET(prebuf), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, E }, + { "minreq", "set minimum request size", OFFSET(minreq), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, E }, + { NULL } +}; + +static const AVClass pulse_muxer_class = { + .class_name = "PulseAudio muxer", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_AUDIO_OUTPUT, +}; + +AVOutputFormat ff_pulse_muxer = { + .name = "pulse", + .long_name = NULL_IF_CONFIG_SMALL("Pulse audio output"), + .priv_data_size = sizeof(PulseData), + .audio_codec = AV_NE(AV_CODEC_ID_PCM_S16BE, AV_CODEC_ID_PCM_S16LE), + .video_codec = AV_CODEC_ID_NONE, + .write_header = pulse_write_header, + .write_packet = pulse_write_packet, + .write_uncoded_frame = pulse_write_frame, + .write_trailer = pulse_write_trailer, + .get_output_timestamp = pulse_get_output_timestamp, + .get_device_list = pulse_get_device_list, + .control_message = pulse_control_message, + .flags = AVFMT_NOFILE | AVFMT_ALLOW_FLUSH, + .priv_class = &pulse_muxer_class, +}; diff --git a/libavdevice/qtkit.m b/libavdevice/qtkit.m new file mode 100644 index 0000000..aa8fa16 --- /dev/null +++ b/libavdevice/qtkit.m @@ -0,0 +1,358 @@ +/* + * QTKit input device + * Copyright (c) 2013 Vadim Kalinsky <vadim@kalinsky.ru> + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * QTKit input device + * @author Vadim Kalinsky <vadim@kalinsky.ru> + */ + +#import <QTKit/QTKit.h> +#include <pthread.h> + +#include "libavutil/pixdesc.h" +#include "libavutil/opt.h" +#include "libavformat/internal.h" +#include "libavutil/internal.h" +#include "libavutil/time.h" +#include "avdevice.h" + +#define QTKIT_TIMEBASE 100 + +static const AVRational kQTKitTimeBase_q = { + .num = 1, + .den = QTKIT_TIMEBASE +}; + +typedef struct +{ + AVClass* class; + + float frame_rate; + int frames_captured; + int64_t first_pts; + pthread_mutex_t frame_lock; + pthread_cond_t frame_wait_cond; + id qt_delegate; + + int list_devices; + int video_device_index; + + QTCaptureSession* capture_session; + QTCaptureDecompressedVideoOutput* video_output; + CVImageBufferRef current_frame; +} CaptureContext; + +static void lock_frames(CaptureContext* ctx) +{ + pthread_mutex_lock(&ctx->frame_lock); +} + +static void unlock_frames(CaptureContext* ctx) +{ + pthread_mutex_unlock(&ctx->frame_lock); +} + +/** FrameReciever class - delegate for QTCaptureSession + */ +@interface FFMPEG_FrameReceiver : NSObject +{ + CaptureContext* _context; +} + +- (id)initWithContext:(CaptureContext*)context; + +- (void)captureOutput:(QTCaptureOutput *)captureOutput + didOutputVideoFrame:(CVImageBufferRef)videoFrame + withSampleBuffer:(QTSampleBuffer *)sampleBuffer + fromConnection:(QTCaptureConnection *)connection; + +@end + +@implementation FFMPEG_FrameReceiver + +- (id)initWithContext:(CaptureContext*)context +{ + if (self = [super init]) { + _context = context; + } + return self; +} + +- (void)captureOutput:(QTCaptureOutput *)captureOutput + didOutputVideoFrame:(CVImageBufferRef)videoFrame + withSampleBuffer:(QTSampleBuffer *)sampleBuffer + fromConnection:(QTCaptureConnection *)connection +{ + lock_frames(_context); + if (_context->current_frame != nil) { + CVBufferRelease(_context->current_frame); + } + + _context->current_frame = CVBufferRetain(videoFrame); + + pthread_cond_signal(&_context->frame_wait_cond); + + unlock_frames(_context); + + ++_context->frames_captured; +} + +@end + +static void destroy_context(CaptureContext* ctx) +{ + [ctx->capture_session stopRunning]; + + [ctx->capture_session release]; + [ctx->video_output release]; + [ctx->qt_delegate release]; + + ctx->capture_session = NULL; + ctx->video_output = NULL; + ctx->qt_delegate = NULL; + + pthread_mutex_destroy(&ctx->frame_lock); + pthread_cond_destroy(&ctx->frame_wait_cond); + + if (ctx->current_frame) + CVBufferRelease(ctx->current_frame); +} + +static int qtkit_read_header(AVFormatContext *s) +{ + NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; + + CaptureContext* ctx = (CaptureContext*)s->priv_data; + + ctx->first_pts = av_gettime(); + + pthread_mutex_init(&ctx->frame_lock, NULL); + pthread_cond_init(&ctx->frame_wait_cond, NULL); + + // List devices if requested + if (ctx->list_devices) { + av_log(ctx, AV_LOG_INFO, "QTKit video devices:\n"); + NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]; + for (QTCaptureDevice *device in devices) { + const char *name = [[device localizedDisplayName] UTF8String]; + int index = [devices indexOfObject:device]; + av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name); + } + goto fail; + } + + // Find capture device + QTCaptureDevice *video_device = nil; + + // check for device index given in filename + if (ctx->video_device_index == -1) { + sscanf(s->filename, "%d", &ctx->video_device_index); + } + + if (ctx->video_device_index >= 0) { + NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]; + + if (ctx->video_device_index >= [devices count]) { + av_log(ctx, AV_LOG_ERROR, "Invalid device index\n"); + goto fail; + } + + video_device = [devices objectAtIndex:ctx->video_device_index]; + } else if (strncmp(s->filename, "", 1) && + strncmp(s->filename, "default", 7)) { + NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]; + + for (QTCaptureDevice *device in devices) { + if (!strncmp(s->filename, [[device localizedDisplayName] UTF8String], strlen(s->filename))) { + video_device = device; + break; + } + } + if (!video_device) { + av_log(ctx, AV_LOG_ERROR, "Video device not found\n"); + goto fail; + } + } else { + video_device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeMuxed]; + } + + BOOL success = [video_device open:nil]; + + // Video capture device not found, looking for QTMediaTypeVideo + if (!success) { + video_device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo]; + success = [video_device open:nil]; + + if (!success) { + av_log(s, AV_LOG_ERROR, "No QT capture device found\n"); + goto fail; + } + } + + NSString* dev_display_name = [video_device localizedDisplayName]; + av_log (s, AV_LOG_DEBUG, "'%s' opened\n", [dev_display_name UTF8String]); + + // Initialize capture session + ctx->capture_session = [[QTCaptureSession alloc] init]; + + QTCaptureDeviceInput* capture_dev_input = [[[QTCaptureDeviceInput alloc] initWithDevice:video_device] autorelease]; + success = [ctx->capture_session addInput:capture_dev_input error:nil]; + + if (!success) { + av_log (s, AV_LOG_ERROR, "Failed to add QT capture device to session\n"); + goto fail; + } + + // Attaching output + // FIXME: Allow for a user defined pixel format + ctx->video_output = [[QTCaptureDecompressedVideoOutput alloc] init]; + + NSDictionary *captureDictionary = [NSDictionary dictionaryWithObject: + [NSNumber numberWithUnsignedInt:kCVPixelFormatType_24RGB] + forKey:(id)kCVPixelBufferPixelFormatTypeKey]; + + [ctx->video_output setPixelBufferAttributes:captureDictionary]; + + ctx->qt_delegate = [[FFMPEG_FrameReceiver alloc] initWithContext:ctx]; + + [ctx->video_output setDelegate:ctx->qt_delegate]; + [ctx->video_output setAutomaticallyDropsLateVideoFrames:YES]; + [ctx->video_output setMinimumVideoFrameInterval:1.0/ctx->frame_rate]; + + success = [ctx->capture_session addOutput:ctx->video_output error:nil]; + + if (!success) { + av_log (s, AV_LOG_ERROR, "can't add video output to capture session\n"); + goto fail; + } + + [ctx->capture_session startRunning]; + + // Take stream info from the first frame. + while (ctx->frames_captured < 1) { + CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES); + } + + lock_frames(ctx); + + AVStream* stream = avformat_new_stream(s, NULL); + + if (!stream) { + goto fail; + } + + avpriv_set_pts_info(stream, 64, 1, QTKIT_TIMEBASE); + + stream->codec->codec_id = AV_CODEC_ID_RAWVIDEO; + stream->codec->codec_type = AVMEDIA_TYPE_VIDEO; + stream->codec->width = (int)CVPixelBufferGetWidth (ctx->current_frame); + stream->codec->height = (int)CVPixelBufferGetHeight(ctx->current_frame); + stream->codec->pix_fmt = AV_PIX_FMT_RGB24; + + CVBufferRelease(ctx->current_frame); + ctx->current_frame = nil; + + unlock_frames(ctx); + + [pool release]; + + return 0; + +fail: + [pool release]; + + destroy_context(ctx); + + return AVERROR(EIO); +} + +static int qtkit_read_packet(AVFormatContext *s, AVPacket *pkt) +{ + CaptureContext* ctx = (CaptureContext*)s->priv_data; + + do { + lock_frames(ctx); + + if (ctx->current_frame != nil) { + if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(ctx->current_frame)) < 0) { + return AVERROR(EIO); + } + + pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts, AV_TIME_BASE_Q, kQTKitTimeBase_q); + pkt->stream_index = 0; + pkt->flags |= AV_PKT_FLAG_KEY; + + CVPixelBufferLockBaseAddress(ctx->current_frame, 0); + + void* data = CVPixelBufferGetBaseAddress(ctx->current_frame); + memcpy(pkt->data, data, pkt->size); + + CVPixelBufferUnlockBaseAddress(ctx->current_frame, 0); + CVBufferRelease(ctx->current_frame); + ctx->current_frame = nil; + } else { + pkt->data = NULL; + pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock); + } + + unlock_frames(ctx); + } while (!pkt->data); + + return 0; +} + +static int qtkit_close(AVFormatContext *s) +{ + CaptureContext* ctx = (CaptureContext*)s->priv_data; + + destroy_context(ctx); + + return 0; +} + +static const AVOption options[] = { + { "frame_rate", "set frame rate", offsetof(CaptureContext, frame_rate), AV_OPT_TYPE_FLOAT, { .dbl = 30.0 }, 0.1, 30.0, AV_OPT_TYPE_VIDEO_RATE, NULL }, + { "list_devices", "list available devices", offsetof(CaptureContext, list_devices), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM, "list_devices" }, + { "true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" }, + { "false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" }, + { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(CaptureContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM }, + { NULL }, +}; + +static const AVClass qtkit_class = { + .class_name = "QTKit input device", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, +}; + +AVInputFormat ff_qtkit_demuxer = { + .name = "qtkit", + .long_name = NULL_IF_CONFIG_SMALL("QTKit input device"), + .priv_data_size = sizeof(CaptureContext), + .read_header = qtkit_read_header, + .read_packet = qtkit_read_packet, + .read_close = qtkit_close, + .flags = AVFMT_NOFILE, + .priv_class = &qtkit_class, +}; diff --git a/libavdevice/sdl.c b/libavdevice/sdl.c new file mode 100644 index 0000000..b98aae5 --- /dev/null +++ b/libavdevice/sdl.c @@ -0,0 +1,375 @@ +/* + * Copyright (c) 2011 Stefano Sabatini + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * libSDL output device + */ + +#include <SDL.h> +#include <SDL_thread.h> + +#include "libavutil/avstring.h" +#include "libavutil/opt.h" +#include "libavutil/parseutils.h" +#include "libavutil/pixdesc.h" +#include "libavutil/time.h" +#include "avdevice.h" + +typedef struct { + AVClass *class; + SDL_Surface *surface; + SDL_Overlay *overlay; + char *window_title; + char *icon_title; + int window_width, window_height; /**< size of the window */ + int window_fullscreen; + + SDL_Rect overlay_rect; + int overlay_fmt; + + int sdl_was_already_inited; + SDL_Thread *event_thread; + SDL_mutex *mutex; + SDL_cond *init_cond; + int init_ret; /* return code used to signal initialization errors */ + int inited; + int quit; +} SDLContext; + +static const struct sdl_overlay_pix_fmt_entry { + enum AVPixelFormat pix_fmt; int overlay_fmt; +} sdl_overlay_pix_fmt_map[] = { + { AV_PIX_FMT_YUV420P, SDL_IYUV_OVERLAY }, + { AV_PIX_FMT_YUYV422, SDL_YUY2_OVERLAY }, + { AV_PIX_FMT_UYVY422, SDL_UYVY_OVERLAY }, + { AV_PIX_FMT_NONE, 0 }, +}; + +static int sdl_write_trailer(AVFormatContext *s) +{ + SDLContext *sdl = s->priv_data; + + sdl->quit = 1; + + if (sdl->overlay) + SDL_FreeYUVOverlay(sdl->overlay); + sdl->overlay = NULL; + if (sdl->event_thread) + SDL_WaitThread(sdl->event_thread, NULL); + sdl->event_thread = NULL; + if (sdl->mutex) + SDL_DestroyMutex(sdl->mutex); + sdl->mutex = NULL; + if (sdl->init_cond) + SDL_DestroyCond(sdl->init_cond); + sdl->init_cond = NULL; + + if (!sdl->sdl_was_already_inited) + SDL_Quit(); + + return 0; +} + +static void compute_overlay_rect(AVFormatContext *s) +{ + AVRational sar, dar; /* sample and display aspect ratios */ + SDLContext *sdl = s->priv_data; + AVStream *st = s->streams[0]; + AVCodecContext *encctx = st->codec; + SDL_Rect *overlay_rect = &sdl->overlay_rect; + + /* compute overlay width and height from the codec context information */ + sar = st->sample_aspect_ratio.num ? st->sample_aspect_ratio : (AVRational){ 1, 1 }; + dar = av_mul_q(sar, (AVRational){ encctx->width, encctx->height }); + + /* we suppose the screen has a 1/1 sample aspect ratio */ + if (sdl->window_width && sdl->window_height) { + /* fit in the window */ + if (av_cmp_q(dar, (AVRational){ sdl->window_width, sdl->window_height }) > 0) { + /* fit in width */ + overlay_rect->w = sdl->window_width; + overlay_rect->h = av_rescale(overlay_rect->w, dar.den, dar.num); + } else { + /* fit in height */ + overlay_rect->h = sdl->window_height; + overlay_rect->w = av_rescale(overlay_rect->h, dar.num, dar.den); + } + } else { + if (sar.num > sar.den) { + overlay_rect->w = encctx->width; + overlay_rect->h = av_rescale(overlay_rect->w, dar.den, dar.num); + } else { + overlay_rect->h = encctx->height; + overlay_rect->w = av_rescale(overlay_rect->h, dar.num, dar.den); + } + sdl->window_width = overlay_rect->w; + sdl->window_height = overlay_rect->h; + } + + overlay_rect->x = (sdl->window_width - overlay_rect->w) / 2; + overlay_rect->y = (sdl->window_height - overlay_rect->h) / 2; +} + +#define SDL_BASE_FLAGS (SDL_SWSURFACE|SDL_RESIZABLE) + +static int event_thread(void *arg) +{ + AVFormatContext *s = arg; + SDLContext *sdl = s->priv_data; + int flags = SDL_BASE_FLAGS | (sdl->window_fullscreen ? SDL_FULLSCREEN : 0); + AVStream *st = s->streams[0]; + AVCodecContext *encctx = st->codec; + + /* initialization */ + if (SDL_Init(SDL_INIT_VIDEO) != 0) { + av_log(s, AV_LOG_ERROR, "Unable to initialize SDL: %s\n", SDL_GetError()); + sdl->init_ret = AVERROR(EINVAL); + goto init_end; + } + + SDL_WM_SetCaption(sdl->window_title, sdl->icon_title); + sdl->surface = SDL_SetVideoMode(sdl->window_width, sdl->window_height, + 24, flags); + if (!sdl->surface) { + av_log(sdl, AV_LOG_ERROR, "Unable to set video mode: %s\n", SDL_GetError()); + sdl->init_ret = AVERROR(EINVAL); + goto init_end; + } + + sdl->overlay = SDL_CreateYUVOverlay(encctx->width, encctx->height, + sdl->overlay_fmt, sdl->surface); + if (!sdl->overlay || sdl->overlay->pitches[0] < encctx->width) { + av_log(s, AV_LOG_ERROR, + "SDL does not support an overlay with size of %dx%d pixels\n", + encctx->width, encctx->height); + sdl->init_ret = AVERROR(EINVAL); + goto init_end; + } + + sdl->init_ret = 0; + av_log(s, AV_LOG_VERBOSE, "w:%d h:%d fmt:%s -> w:%d h:%d\n", + encctx->width, encctx->height, av_get_pix_fmt_name(encctx->pix_fmt), + sdl->overlay_rect.w, sdl->overlay_rect.h); + +init_end: + SDL_LockMutex(sdl->mutex); + sdl->inited = 1; + SDL_UnlockMutex(sdl->mutex); + SDL_CondSignal(sdl->init_cond); + + if (sdl->init_ret < 0) + return sdl->init_ret; + + /* event loop */ + while (!sdl->quit) { + int ret; + SDL_Event event; + SDL_PumpEvents(); + ret = SDL_PeepEvents(&event, 1, SDL_GETEVENT, SDL_ALLEVENTS); + if (ret < 0) { + av_log(s, AV_LOG_ERROR, "Error when getting SDL event: %s\n", SDL_GetError()); + continue; + } + if (ret == 0) { + SDL_Delay(10); + continue; + } + + switch (event.type) { + case SDL_KEYDOWN: + switch (event.key.keysym.sym) { + case SDLK_ESCAPE: + case SDLK_q: + sdl->quit = 1; + break; + } + break; + case SDL_QUIT: + sdl->quit = 1; + break; + + case SDL_VIDEORESIZE: + sdl->window_width = event.resize.w; + sdl->window_height = event.resize.h; + + SDL_LockMutex(sdl->mutex); + sdl->surface = SDL_SetVideoMode(sdl->window_width, sdl->window_height, 24, SDL_BASE_FLAGS); + if (!sdl->surface) { + av_log(s, AV_LOG_ERROR, "Failed to set SDL video mode: %s\n", SDL_GetError()); + sdl->quit = 1; + } else { + compute_overlay_rect(s); + } + SDL_UnlockMutex(sdl->mutex); + break; + + default: + break; + } + } + + return 0; +} + +static int sdl_write_header(AVFormatContext *s) +{ + SDLContext *sdl = s->priv_data; + AVStream *st = s->streams[0]; + AVCodecContext *encctx = st->codec; + int i, ret; + + if (!sdl->window_title) + sdl->window_title = av_strdup(s->filename); + if (!sdl->icon_title) + sdl->icon_title = av_strdup(sdl->window_title); + + if (SDL_WasInit(SDL_INIT_VIDEO)) { + av_log(s, AV_LOG_ERROR, + "SDL video subsystem was already inited, aborting\n"); + sdl->sdl_was_already_inited = 1; + ret = AVERROR(EINVAL); + goto fail; + } + + if ( s->nb_streams > 1 + || encctx->codec_type != AVMEDIA_TYPE_VIDEO + || encctx->codec_id != AV_CODEC_ID_RAWVIDEO) { + av_log(s, AV_LOG_ERROR, "Only supports one rawvideo stream\n"); + ret = AVERROR(EINVAL); + goto fail; + } + + for (i = 0; sdl_overlay_pix_fmt_map[i].pix_fmt != AV_PIX_FMT_NONE; i++) { + if (sdl_overlay_pix_fmt_map[i].pix_fmt == encctx->pix_fmt) { + sdl->overlay_fmt = sdl_overlay_pix_fmt_map[i].overlay_fmt; + break; + } + } + + if (!sdl->overlay_fmt) { + av_log(s, AV_LOG_ERROR, + "Unsupported pixel format '%s', choose one of yuv420p, yuyv422, or uyvy422\n", + av_get_pix_fmt_name(encctx->pix_fmt)); + ret = AVERROR(EINVAL); + goto fail; + } + + /* compute overlay width and height from the codec context information */ + compute_overlay_rect(s); + + sdl->init_cond = SDL_CreateCond(); + if (!sdl->init_cond) { + av_log(s, AV_LOG_ERROR, "Could not create SDL condition variable: %s\n", SDL_GetError()); + ret = AVERROR_EXTERNAL; + goto fail; + } + sdl->mutex = SDL_CreateMutex(); + if (!sdl->mutex) { + av_log(s, AV_LOG_ERROR, "Could not create SDL mutex: %s\n", SDL_GetError()); + ret = AVERROR_EXTERNAL; + goto fail; + } + sdl->event_thread = SDL_CreateThread(event_thread, s); + if (!sdl->event_thread) { + av_log(s, AV_LOG_ERROR, "Could not create SDL event thread: %s\n", SDL_GetError()); + ret = AVERROR_EXTERNAL; + goto fail; + } + + /* wait until the video system has been inited */ + SDL_LockMutex(sdl->mutex); + while (!sdl->inited) { + SDL_CondWait(sdl->init_cond, sdl->mutex); + } + SDL_UnlockMutex(sdl->mutex); + if (sdl->init_ret < 0) { + ret = sdl->init_ret; + goto fail; + } + return 0; + +fail: + sdl_write_trailer(s); + return ret; +} + +static int sdl_write_packet(AVFormatContext *s, AVPacket *pkt) +{ + SDLContext *sdl = s->priv_data; + AVCodecContext *encctx = s->streams[0]->codec; + AVPicture pict; + int i; + + if (sdl->quit) { + sdl_write_trailer(s); + return AVERROR(EIO); + } + avpicture_fill(&pict, pkt->data, encctx->pix_fmt, encctx->width, encctx->height); + + SDL_LockMutex(sdl->mutex); + SDL_FillRect(sdl->surface, &sdl->surface->clip_rect, + SDL_MapRGB(sdl->surface->format, 0, 0, 0)); + SDL_LockYUVOverlay(sdl->overlay); + for (i = 0; i < 3; i++) { + sdl->overlay->pixels [i] = pict.data [i]; + sdl->overlay->pitches[i] = pict.linesize[i]; + } + SDL_DisplayYUVOverlay(sdl->overlay, &sdl->overlay_rect); + SDL_UnlockYUVOverlay(sdl->overlay); + + SDL_UpdateRect(sdl->surface, + sdl->overlay_rect.x, sdl->overlay_rect.y, + sdl->overlay_rect.w, sdl->overlay_rect.h); + SDL_UnlockMutex(sdl->mutex); + + return 0; +} + +#define OFFSET(x) offsetof(SDLContext,x) + +static const AVOption options[] = { + { "window_title", "set SDL window title", OFFSET(window_title), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, AV_OPT_FLAG_ENCODING_PARAM }, + { "icon_title", "set SDL iconified window title", OFFSET(icon_title) , AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, AV_OPT_FLAG_ENCODING_PARAM }, + { "window_size", "set SDL window forced size", OFFSET(window_width), AV_OPT_TYPE_IMAGE_SIZE, { .str = NULL }, 0, 0, AV_OPT_FLAG_ENCODING_PARAM }, + { "window_fullscreen", "set SDL window fullscreen", OFFSET(window_fullscreen), AV_OPT_TYPE_INT, { .i64 = 0 }, INT_MIN, INT_MAX, AV_OPT_FLAG_ENCODING_PARAM }, + { NULL }, +}; + +static const AVClass sdl_class = { + .class_name = "sdl outdev", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT, +}; + +AVOutputFormat ff_sdl_muxer = { + .name = "sdl", + .long_name = NULL_IF_CONFIG_SMALL("SDL output device"), + .priv_data_size = sizeof(SDLContext), + .audio_codec = AV_CODEC_ID_NONE, + .video_codec = AV_CODEC_ID_RAWVIDEO, + .write_header = sdl_write_header, + .write_packet = sdl_write_packet, + .write_trailer = sdl_write_trailer, + .flags = AVFMT_NOFILE | AVFMT_VARIABLE_FPS | AVFMT_NOTIMESTAMPS, + .priv_class = &sdl_class, +}; diff --git a/libavdevice/sndio_common.c b/libavdevice/sndio_common.c index 1bea6c5..19f39be 100644 --- a/libavdevice/sndio_common.c +++ b/libavdevice/sndio_common.c @@ -2,27 +2,27 @@ * sndio play and grab interface * Copyright (c) 2010 Jacob Meuser * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ #include <stdint.h> #include <sndio.h> -#include "libavformat/avformat.h" +#include "avdevice.h" #include "sndio_common.h" diff --git a/libavdevice/sndio_common.h b/libavdevice/sndio_common.h index 2f70213..74f41f5 100644 --- a/libavdevice/sndio_common.h +++ b/libavdevice/sndio_common.h @@ -2,20 +2,20 @@ * sndio play and grab interface * Copyright (c) 2010 Jacob Meuser * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -25,8 +25,8 @@ #include <stdint.h> #include <sndio.h> -#include "libavformat/avformat.h" #include "libavutil/log.h" +#include "avdevice.h" typedef struct SndioData { AVClass *class; diff --git a/libavdevice/sndio_dec.c b/libavdevice/sndio_dec.c index 58caaa1..37c6983 100644 --- a/libavdevice/sndio_dec.c +++ b/libavdevice/sndio_dec.c @@ -2,20 +2,20 @@ * sndio play and grab interface * Copyright (c) 2010 Jacob Meuser * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -104,6 +104,7 @@ static const AVClass sndio_demuxer_class = { .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT, }; AVInputFormat ff_sndio_demuxer = { diff --git a/libavdevice/sndio_enc.c b/libavdevice/sndio_enc.c index 6f69b9e..205cf24 100644 --- a/libavdevice/sndio_enc.c +++ b/libavdevice/sndio_enc.c @@ -2,28 +2,27 @@ * sndio play and grab interface * Copyright (c) 2010 Jacob Meuser * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ #include <stdint.h> #include <sndio.h> -#include "libavformat/avformat.h" - +#include "avdevice.h" #include "sndio_common.h" static av_cold int audio_write_header(AVFormatContext *s1) @@ -77,6 +76,13 @@ static int audio_write_trailer(AVFormatContext *s1) return 0; } +static const AVClass sndio_muxer_class = { + .class_name = "sndio outdev", + .item_name = av_default_item_name, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_AUDIO_OUTPUT, +}; + AVOutputFormat ff_sndio_muxer = { .name = "sndio", .long_name = NULL_IF_CONFIG_SMALL("sndio audio playback"), @@ -90,4 +96,5 @@ AVOutputFormat ff_sndio_muxer = { .write_packet = audio_write_packet, .write_trailer = audio_write_trailer, .flags = AVFMT_NOFILE, + .priv_class = &sndio_muxer_class, }; diff --git a/libavdevice/timefilter.c b/libavdevice/timefilter.c index a497351..9d38f93 100644 --- a/libavdevice/timefilter.c +++ b/libavdevice/timefilter.c @@ -5,20 +5,20 @@ * Author: Olivier Guilyardi <olivier samalyse com> * Michael Niedermayer <michaelni gmx at> * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -37,18 +37,25 @@ struct TimeFilter { int count; }; -TimeFilter *ff_timefilter_new(double clock_period, - double feedback2_factor, - double feedback3_factor) +/* 1 - exp(-x) using a 3-order power series */ +static double qexpneg(double x) { - TimeFilter *self = av_mallocz(sizeof(TimeFilter)); + return 1 - 1 / (1 + x * (1 + x / 2 * (1 + x / 3))); +} + +TimeFilter *ff_timefilter_new(double time_base, + double period, + double bandwidth) +{ + TimeFilter *self = av_mallocz(sizeof(TimeFilter)); + double o = 2 * M_PI * bandwidth * period * time_base; if (!self) return NULL; - self->clock_period = clock_period; - self->feedback2_factor = feedback2_factor; - self->feedback3_factor = feedback3_factor; + self->clock_period = time_base; + self->feedback2_factor = qexpneg(M_SQRT2 * o); + self->feedback3_factor = qexpneg(o * o) / period; return self; } @@ -73,11 +80,16 @@ double ff_timefilter_update(TimeFilter *self, double system_time, double period) loop_error = system_time - self->cycle_time; self->cycle_time += FFMAX(self->feedback2_factor, 1.0 / self->count) * loop_error; - self->clock_period += self->feedback3_factor * loop_error / period; + self->clock_period += self->feedback3_factor * loop_error; } return self->cycle_time; } +double ff_timefilter_eval(TimeFilter *self, double delta) +{ + return self->cycle_time + self->clock_period * delta; +} + #ifdef TEST #include "libavutil/lfg.h" #define LFG_MAX ((1LL << 32) - 1) @@ -89,17 +101,21 @@ int main(void) #define SAMPLES 1000 double ideal[SAMPLES]; double samples[SAMPLES]; + double samplet[SAMPLES]; for (n0 = 0; n0 < 40; n0 = 2 * n0 + 1) { for (n1 = 0; n1 < 10; n1 = 2 * n1 + 1) { double best_error = 1000000000; - double bestpar0 = 1; - double bestpar1 = 0.001; + double bestpar0 = n0 ? 1 : 100000; + double bestpar1 = 1; int better, i; av_lfg_init(&prng, 123); for (i = 0; i < SAMPLES; i++) { - ideal[i] = 10 + i + n1 * i / (1000); + samplet[i] = 10 + i + (av_lfg_get(&prng) < LFG_MAX/2 ? 0 : 0.999); + ideal[i] = samplet[i] + n1 * i / (1000); samples[i] = ideal[i] + n0 * (av_lfg_get(&prng) - LFG_MAX / 2) / (LFG_MAX * 10LL); + if(i && samples[i]<samples[i-1]) + samples[i]=samples[i-1]+0.001; } do { @@ -115,7 +131,9 @@ int main(void) } for (i = 0; i < SAMPLES; i++) { double filtered; - filtered = ff_timefilter_update(tf, samples[i], 1); + filtered = ff_timefilter_update(tf, samples[i], i ? (samplet[i] - samplet[i-1]) : 1); + if(filtered < 0 || filtered > 1000000000) + printf("filter is unstable\n"); error += (filtered - ideal[i]) * (filtered - ideal[i]); } ff_timefilter_destroy(tf); @@ -140,7 +158,7 @@ int main(void) } ff_timefilter_destroy(tf); #else - printf(" [%f %f %9f]", bestpar0, bestpar1, best_error); + printf(" [%12f %11f %9f]", bestpar0, bestpar1, best_error); #endif } printf("\n"); diff --git a/libavdevice/timefilter.h b/libavdevice/timefilter.h index 2235db6..cb3d0a7 100644 --- a/libavdevice/timefilter.h +++ b/libavdevice/timefilter.h @@ -5,20 +5,20 @@ * Author: Olivier Guilyardi <olivier samalyse com> * Michael Niedermayer <michaelni gmx at> * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -45,16 +45,18 @@ typedef struct TimeFilter TimeFilter; * * Unless you know what you are doing, you should set these as follow: * - * o = 2 * M_PI * bandwidth * period - * feedback2_factor = sqrt(2 * o) + * o = 2 * M_PI * bandwidth * period_in_seconds + * feedback2_factor = sqrt(2) * o * feedback3_factor = o * o * * Where bandwidth is up to you to choose. Smaller values will filter out more * of the jitter, but also take a longer time for the loop to settle. A good * starting point is something between 0.3 and 3 Hz. * - * @param clock_period period of the hardware clock in seconds - * (for example 1.0/44100) + * @param time_base period of the hardware clock in seconds + * (for example 1.0/44100) + * @param period expected update interval, in input units + * @param brandwidth filtering bandwidth, in Hz * * @return a pointer to a TimeFilter struct, or NULL on error * @@ -82,6 +84,15 @@ TimeFilter * ff_timefilter_new(double clock_period, double feedback2_factor, dou double ff_timefilter_update(TimeFilter *self, double system_time, double period); /** + * Evaluate the filter at a specified time + * + * @param delta difference between the requested time and the current time + * (last call to ff_timefilter_update). + * @return the filtered time + */ +double ff_timefilter_eval(TimeFilter *self, double delta); + +/** * Reset the filter * * This function should mainly be called in case of XRUN. diff --git a/libavdevice/v4l.c b/libavdevice/v4l.c new file mode 100644 index 0000000..d33f714 --- /dev/null +++ b/libavdevice/v4l.c @@ -0,0 +1,363 @@ +/* + * Linux video grab interface + * Copyright (c) 2000,2001 Fabrice Bellard + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "avdevice.h" + +#undef __STRICT_ANSI__ //workaround due to broken kernel headers +#include "config.h" +#include "libavutil/rational.h" +#include "libavutil/imgutils.h" +#include "libavutil/log.h" +#include "libavutil/opt.h" +#include "libavformat/internal.h" +#include "libavcodec/dsputil.h" +#include <unistd.h> +#include <fcntl.h> +#include <sys/ioctl.h> +#include <sys/mman.h> +#include <sys/time.h> +#define _LINUX_TIME_H 1 +#include <linux/videodev.h> +#include <time.h> + +typedef struct { + AVClass *class; + int fd; + int frame_format; /* see VIDEO_PALETTE_xxx */ + int use_mmap; + AVRational time_base; + int64_t time_frame; + int frame_size; + struct video_capability video_cap; + struct video_audio audio_saved; + struct video_window video_win; + uint8_t *video_buf; + struct video_mbuf gb_buffers; + struct video_mmap gb_buf; + int gb_frame; + int standard; +} VideoData; + +static const struct { + int palette; + int depth; + enum AVPixelFormat pix_fmt; +} video_formats [] = { + {.palette = VIDEO_PALETTE_YUV420P, .depth = 12, .pix_fmt = AV_PIX_FMT_YUV420P }, + {.palette = VIDEO_PALETTE_YUV422, .depth = 16, .pix_fmt = AV_PIX_FMT_YUYV422 }, + {.palette = VIDEO_PALETTE_UYVY, .depth = 16, .pix_fmt = AV_PIX_FMT_UYVY422 }, + {.palette = VIDEO_PALETTE_YUYV, .depth = 16, .pix_fmt = AV_PIX_FMT_YUYV422 }, + /* NOTE: v4l uses BGR24, not RGB24 */ + {.palette = VIDEO_PALETTE_RGB24, .depth = 24, .pix_fmt = AV_PIX_FMT_BGR24 }, + {.palette = VIDEO_PALETTE_RGB565, .depth = 16, .pix_fmt = AV_PIX_FMT_BGR565 }, + {.palette = VIDEO_PALETTE_GREY, .depth = 8, .pix_fmt = AV_PIX_FMT_GRAY8 }, +}; + + +static int grab_read_header(AVFormatContext *s1, AVFormatParameters *ap) +{ + VideoData *s = s1->priv_data; + AVStream *st; + int video_fd; + int desired_palette, desired_depth; + struct video_tuner tuner; + struct video_audio audio; + struct video_picture pict; + int j; + int vformat_num = FF_ARRAY_ELEMS(video_formats); + + av_log(s1, AV_LOG_WARNING, "V4L input device is deprecated and will be removed in the next release."); + + if (ap->time_base.den <= 0) { + av_log(s1, AV_LOG_ERROR, "Wrong time base (%d)\n", ap->time_base.den); + return -1; + } + s->time_base = ap->time_base; + + s->video_win.width = ap->width; + s->video_win.height = ap->height; + + st = avformat_new_stream(s1, NULL); + if (!st) + return AVERROR(ENOMEM); + avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */ + + video_fd = open(s1->filename, O_RDWR); + if (video_fd < 0) { + av_log(s1, AV_LOG_ERROR, "%s: %s\n", s1->filename, strerror(errno)); + goto fail; + } + + if (ioctl(video_fd, VIDIOCGCAP, &s->video_cap) < 0) { + av_log(s1, AV_LOG_ERROR, "VIDIOCGCAP: %s\n", strerror(errno)); + goto fail; + } + + if (!(s->video_cap.type & VID_TYPE_CAPTURE)) { + av_log(s1, AV_LOG_ERROR, "Fatal: grab device does not handle capture\n"); + goto fail; + } + + /* no values set, autodetect them */ + if (s->video_win.width <= 0 || s->video_win.height <= 0) { + if (ioctl(video_fd, VIDIOCGWIN, &s->video_win, sizeof(s->video_win)) < 0) { + av_log(s1, AV_LOG_ERROR, "VIDIOCGWIN: %s\n", strerror(errno)); + goto fail; + } + } + + if(av_image_check_size(s->video_win.width, s->video_win.height, 0, s1) < 0) + return -1; + + desired_palette = -1; + desired_depth = -1; + for (j = 0; j < vformat_num; j++) { + if (ap->pix_fmt == video_formats[j].pix_fmt) { + desired_palette = video_formats[j].palette; + desired_depth = video_formats[j].depth; + break; + } + } + + /* set tv standard */ + if (!ioctl(video_fd, VIDIOCGTUNER, &tuner)) { + tuner.mode = s->standard; + ioctl(video_fd, VIDIOCSTUNER, &tuner); + } + + /* unmute audio */ + audio.audio = 0; + ioctl(video_fd, VIDIOCGAUDIO, &audio); + memcpy(&s->audio_saved, &audio, sizeof(audio)); + audio.flags &= ~VIDEO_AUDIO_MUTE; + ioctl(video_fd, VIDIOCSAUDIO, &audio); + + ioctl(video_fd, VIDIOCGPICT, &pict); + av_dlog(s1, "v4l: colour=%d hue=%d brightness=%d constrast=%d whiteness=%d\n", + pict.colour, pict.hue, pict.brightness, pict.contrast, pict.whiteness); + /* try to choose a suitable video format */ + pict.palette = desired_palette; + pict.depth= desired_depth; + if (desired_palette == -1 || ioctl(video_fd, VIDIOCSPICT, &pict) < 0) { + for (j = 0; j < vformat_num; j++) { + pict.palette = video_formats[j].palette; + pict.depth = video_formats[j].depth; + if (-1 != ioctl(video_fd, VIDIOCSPICT, &pict)) + break; + } + if (j >= vformat_num) + goto fail1; + } + + if (ioctl(video_fd, VIDIOCGMBUF, &s->gb_buffers) < 0) { + /* try to use read based access */ + int val; + + s->video_win.x = 0; + s->video_win.y = 0; + s->video_win.chromakey = -1; + s->video_win.flags = 0; + + if (ioctl(video_fd, VIDIOCSWIN, s->video_win) < 0) { + av_log(s1, AV_LOG_ERROR, "VIDIOCSWIN: %s\n", strerror(errno)); + goto fail; + } + + s->frame_format = pict.palette; + + val = 1; + if (ioctl(video_fd, VIDIOCCAPTURE, &val) < 0) { + av_log(s1, AV_LOG_ERROR, "VIDIOCCAPTURE: %s\n", strerror(errno)); + goto fail; + } + + s->time_frame = av_gettime() * s->time_base.den / s->time_base.num; + s->use_mmap = 0; + } else { + s->video_buf = mmap(0, s->gb_buffers.size, PROT_READ|PROT_WRITE, MAP_SHARED, video_fd, 0); + if ((unsigned char*)-1 == s->video_buf) { + s->video_buf = mmap(0, s->gb_buffers.size, PROT_READ|PROT_WRITE, MAP_PRIVATE, video_fd, 0); + if ((unsigned char*)-1 == s->video_buf) { + av_log(s1, AV_LOG_ERROR, "mmap: %s\n", strerror(errno)); + goto fail; + } + } + s->gb_frame = 0; + s->time_frame = av_gettime() * s->time_base.den / s->time_base.num; + + /* start to grab the first frame */ + s->gb_buf.frame = s->gb_frame % s->gb_buffers.frames; + s->gb_buf.height = s->video_win.height; + s->gb_buf.width = s->video_win.width; + s->gb_buf.format = pict.palette; + + if (ioctl(video_fd, VIDIOCMCAPTURE, &s->gb_buf) < 0) { + if (errno != EAGAIN) { + fail1: + av_log(s1, AV_LOG_ERROR, "VIDIOCMCAPTURE: %s\n", strerror(errno)); + } else { + av_log(s1, AV_LOG_ERROR, "Fatal: grab device does not receive any video signal\n"); + } + goto fail; + } + for (j = 1; j < s->gb_buffers.frames; j++) { + s->gb_buf.frame = j; + ioctl(video_fd, VIDIOCMCAPTURE, &s->gb_buf); + } + s->frame_format = s->gb_buf.format; + s->use_mmap = 1; + } + + for (j = 0; j < vformat_num; j++) { + if (s->frame_format == video_formats[j].palette) { + s->frame_size = s->video_win.width * s->video_win.height * video_formats[j].depth / 8; + st->codec->pix_fmt = video_formats[j].pix_fmt; + break; + } + } + + if (j >= vformat_num) + goto fail; + + s->fd = video_fd; + + st->codec->codec_type = AVMEDIA_TYPE_VIDEO; + st->codec->codec_id = AV_CODEC_ID_RAWVIDEO; + st->codec->width = s->video_win.width; + st->codec->height = s->video_win.height; + st->codec->time_base = s->time_base; + st->codec->bit_rate = s->frame_size * 1/av_q2d(st->codec->time_base) * 8; + + return 0; + fail: + if (video_fd >= 0) + close(video_fd); + return AVERROR(EIO); +} + +static int v4l_mm_read_picture(VideoData *s, uint8_t *buf) +{ + uint8_t *ptr; + + while (ioctl(s->fd, VIDIOCSYNC, &s->gb_frame) < 0 && + (errno == EAGAIN || errno == EINTR)); + + ptr = s->video_buf + s->gb_buffers.offsets[s->gb_frame]; + memcpy(buf, ptr, s->frame_size); + + /* Setup to capture the next frame */ + s->gb_buf.frame = s->gb_frame; + if (ioctl(s->fd, VIDIOCMCAPTURE, &s->gb_buf) < 0) { + if (errno == EAGAIN) + av_log(NULL, AV_LOG_ERROR, "Cannot Sync\n"); + else + av_log(NULL, AV_LOG_ERROR, "VIDIOCMCAPTURE: %s\n", strerror(errno)); + return AVERROR(EIO); + } + + /* This is now the grabbing frame */ + s->gb_frame = (s->gb_frame + 1) % s->gb_buffers.frames; + + return s->frame_size; +} + +static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt) +{ + VideoData *s = s1->priv_data; + int64_t curtime, delay; + struct timespec ts; + + /* Calculate the time of the next frame */ + s->time_frame += INT64_C(1000000); + + /* wait based on the frame rate */ + for(;;) { + curtime = av_gettime(); + delay = s->time_frame * s->time_base.num / s->time_base.den - curtime; + if (delay <= 0) { + if (delay < INT64_C(-1000000) * s->time_base.num / s->time_base.den) { + /* printf("grabbing is %d frames late (dropping)\n", (int) -(delay / 16666)); */ + s->time_frame += INT64_C(1000000); + } + break; + } + ts.tv_sec = delay / 1000000; + ts.tv_nsec = (delay % 1000000) * 1000; + nanosleep(&ts, NULL); + } + + if (av_new_packet(pkt, s->frame_size) < 0) + return AVERROR(EIO); + + pkt->pts = curtime; + + /* read one frame */ + if (s->use_mmap) { + return v4l_mm_read_picture(s, pkt->data); + } else { + if (read(s->fd, pkt->data, pkt->size) != pkt->size) + return AVERROR(EIO); + return s->frame_size; + } +} + +static int grab_read_close(AVFormatContext *s1) +{ + VideoData *s = s1->priv_data; + + if (s->use_mmap) + munmap(s->video_buf, s->gb_buffers.size); + + /* mute audio. we must force it because the BTTV driver does not + return its state correctly */ + s->audio_saved.flags |= VIDEO_AUDIO_MUTE; + ioctl(s->fd, VIDIOCSAUDIO, &s->audio_saved); + + close(s->fd); + return 0; +} + +static const AVOption options[] = { + { "standard", "", offsetof(VideoData, standard), AV_OPT_TYPE_INT, {.i64 = VIDEO_MODE_NTSC}, VIDEO_MODE_PAL, VIDEO_MODE_NTSC, AV_OPT_FLAG_DECODING_PARAM, "standard" }, + { "PAL", "", 0, AV_OPT_TYPE_CONST, {.i64 = VIDEO_MODE_PAL}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "standard" }, + { "SECAM", "", 0, AV_OPT_TYPE_CONST, {.i64 = VIDEO_MODE_SECAM}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "standard" }, + { "NTSC", "", 0, AV_OPT_TYPE_CONST, {.i64 = VIDEO_MODE_NTSC}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "standard" }, + { NULL }, +}; + +static const AVClass v4l_class = { + .class_name = "V4L indev", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, +}; + +AVInputFormat ff_v4l_demuxer = { + .name = "video4linux,v4l", + .long_name = NULL_IF_CONFIG_SMALL("Video4Linux device grab"), + .priv_data_size = sizeof(VideoData), + .read_header = grab_read_header, + .read_packet = grab_read_packet, + .read_close = grab_read_close, + .flags = AVFMT_NOFILE, + .priv_class = &v4l_class, +}; diff --git a/libavdevice/v4l2-common.c b/libavdevice/v4l2-common.c new file mode 100644 index 0000000..c4c75d7 --- /dev/null +++ b/libavdevice/v4l2-common.c @@ -0,0 +1,102 @@ +/* + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "v4l2-common.h" + +const struct fmt_map avpriv_fmt_conversion_table[] = { + //ff_fmt codec_id v4l2_fmt + { AV_PIX_FMT_YUV420P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV420 }, + { AV_PIX_FMT_YUV420P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YVU420 }, + { AV_PIX_FMT_YUV422P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV422P }, + { AV_PIX_FMT_YUYV422, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUYV }, + { AV_PIX_FMT_UYVY422, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_UYVY }, + { AV_PIX_FMT_YUV411P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV411P }, + { AV_PIX_FMT_YUV410P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV410 }, + { AV_PIX_FMT_YUV410P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YVU410 }, + { AV_PIX_FMT_RGB555LE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB555 }, + { AV_PIX_FMT_RGB555BE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB555X }, + { AV_PIX_FMT_RGB565LE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB565 }, + { AV_PIX_FMT_RGB565BE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB565X }, + { AV_PIX_FMT_BGR24, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_BGR24 }, + { AV_PIX_FMT_RGB24, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB24 }, + { AV_PIX_FMT_BGR0, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_BGR32 }, + { AV_PIX_FMT_0RGB, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB32 }, + { AV_PIX_FMT_GRAY8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_GREY }, +#ifdef V4L2_PIX_FMT_Y16 + { AV_PIX_FMT_GRAY16LE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_Y16 }, +#endif + { AV_PIX_FMT_NV12, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_NV12 }, + { AV_PIX_FMT_NONE, AV_CODEC_ID_MJPEG, V4L2_PIX_FMT_MJPEG }, + { AV_PIX_FMT_NONE, AV_CODEC_ID_MJPEG, V4L2_PIX_FMT_JPEG }, +#ifdef V4L2_PIX_FMT_H264 + { AV_PIX_FMT_NONE, AV_CODEC_ID_H264, V4L2_PIX_FMT_H264 }, +#endif +#ifdef V4L2_PIX_FMT_CPIA1 + { AV_PIX_FMT_NONE, AV_CODEC_ID_CPIA, V4L2_PIX_FMT_CPIA1 }, +#endif +#ifdef V4L2_PIX_FMT_SRGGB8 + { AV_PIX_FMT_BAYER_BGGR8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_SBGGR8 }, + { AV_PIX_FMT_BAYER_GBRG8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_SGBRG8 }, + { AV_PIX_FMT_BAYER_GRBG8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_SGRBG8 }, + { AV_PIX_FMT_BAYER_RGGB8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_SRGGB8 }, +#endif + { AV_PIX_FMT_NONE, AV_CODEC_ID_NONE, 0 }, +}; + +uint32_t avpriv_fmt_ff2v4l(enum AVPixelFormat pix_fmt, enum AVCodecID codec_id) +{ + int i; + + for (i = 0; avpriv_fmt_conversion_table[i].codec_id != AV_CODEC_ID_NONE; i++) { + if ((codec_id == AV_CODEC_ID_NONE || + avpriv_fmt_conversion_table[i].codec_id == codec_id) && + (pix_fmt == AV_PIX_FMT_NONE || + avpriv_fmt_conversion_table[i].ff_fmt == pix_fmt)) { + return avpriv_fmt_conversion_table[i].v4l2_fmt; + } + } + + return 0; +} + +enum AVPixelFormat avpriv_fmt_v4l2ff(uint32_t v4l2_fmt, enum AVCodecID codec_id) +{ + int i; + + for (i = 0; avpriv_fmt_conversion_table[i].codec_id != AV_CODEC_ID_NONE; i++) { + if (avpriv_fmt_conversion_table[i].v4l2_fmt == v4l2_fmt && + avpriv_fmt_conversion_table[i].codec_id == codec_id) { + return avpriv_fmt_conversion_table[i].ff_fmt; + } + } + + return AV_PIX_FMT_NONE; +} + +enum AVCodecID avpriv_fmt_v4l2codec(uint32_t v4l2_fmt) +{ + int i; + + for (i = 0; avpriv_fmt_conversion_table[i].codec_id != AV_CODEC_ID_NONE; i++) { + if (avpriv_fmt_conversion_table[i].v4l2_fmt == v4l2_fmt) { + return avpriv_fmt_conversion_table[i].codec_id; + } + } + + return AV_CODEC_ID_NONE; +} diff --git a/libavdevice/v4l2-common.h b/libavdevice/v4l2-common.h new file mode 100644 index 0000000..8aef234 --- /dev/null +++ b/libavdevice/v4l2-common.h @@ -0,0 +1,62 @@ +/* + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVDEVICE_V4L2_COMMON_H +#define AVDEVICE_V4L2_COMMON_H + +#undef __STRICT_ANSI__ //workaround due to broken kernel headers +#include "config.h" +#include "libavformat/internal.h" +#include <unistd.h> +#include <fcntl.h> +#include <sys/ioctl.h> +#include <sys/mman.h> +#include <sys/time.h> +#if HAVE_SYS_VIDEOIO_H +#include <sys/videoio.h> +#else +#if HAVE_ASM_TYPES_H +#include <asm/types.h> +#endif +#include <linux/videodev2.h> +#endif +#include "libavutil/atomic.h" +#include "libavutil/avassert.h" +#include "libavutil/imgutils.h" +#include "libavutil/log.h" +#include "libavutil/opt.h" +#include "avdevice.h" +#include "timefilter.h" +#include "libavutil/parseutils.h" +#include "libavutil/pixdesc.h" +#include "libavutil/time.h" +#include "libavutil/avstring.h" + +struct fmt_map { + enum AVPixelFormat ff_fmt; + enum AVCodecID codec_id; + uint32_t v4l2_fmt; +}; + +extern av_export const struct fmt_map avpriv_fmt_conversion_table[]; + +uint32_t avpriv_fmt_ff2v4l(enum AVPixelFormat pix_fmt, enum AVCodecID codec_id); +enum AVPixelFormat avpriv_fmt_v4l2ff(uint32_t v4l2_fmt, enum AVCodecID codec_id); +enum AVCodecID avpriv_fmt_v4l2codec(uint32_t v4l2_fmt); + +#endif /* AVDEVICE_V4L2_COMMON_H */ diff --git a/libavdevice/v4l2.c b/libavdevice/v4l2.c index e210dc4..cf7a92c 100644 --- a/libavdevice/v4l2.c +++ b/libavdevice/v4l2.c @@ -1,57 +1,40 @@ /* - * Video4Linux2 grab interface * Copyright (c) 2000,2001 Fabrice Bellard * Copyright (c) 2006 Luca Abeni * - * Part of this file is based on the V4L2 video capture example - * (http://v4l2spec.bytesex.org/v4l2spec/capture.c) - * - * Thanks to Michael Niedermayer for providing the mapping between - * V4L2_PIX_FMT_* and AV_PIX_FMT_* - * + * This file is part of FFmpeg. * - * This file is part of Libav. - * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -#undef __STRICT_ANSI__ //workaround due to broken kernel headers -#include "config.h" -#include "libavformat/avformat.h" -#include "libavformat/internal.h" -#include <unistd.h> -#include <fcntl.h> -#include <sys/ioctl.h> -#include <sys/mman.h> -#include <sys/time.h> -#include <poll.h> -#if HAVE_SYS_VIDEOIO_H -#include <sys/videoio.h> -#else -#include <linux/videodev2.h> +/** + * @file + * Video4Linux2 grab interface + * + * Part of this file is based on the V4L2 video capture example + * (http://linuxtv.org/downloads/v4l-dvb-apis/capture-example.html) + * + * Thanks to Michael Niedermayer for providing the mapping between + * V4L2_PIX_FMT_* and AV_PIX_FMT_* + */ + +#include "v4l2-common.h" + +#if CONFIG_LIBV4L2 +#include <libv4l2.h> #endif -#include "libavutil/atomic.h" -#include "libavutil/avassert.h" -#include "libavutil/imgutils.h" -#include "libavutil/internal.h" -#include "libavutil/log.h" -#include "libavutil/opt.h" -#include "libavutil/parseutils.h" -#include "libavutil/pixdesc.h" -#include "libavutil/avstring.h" -#include "libavutil/mathematics.h" static const int desired_video_buffers = 256; @@ -59,131 +42,161 @@ static const int desired_video_buffers = 256; #define V4L_RAWFORMATS 1 #define V4L_COMPFORMATS 2 +/** + * Return timestamps to the user exactly as returned by the kernel + */ +#define V4L_TS_DEFAULT 0 +/** + * Autodetect the kind of timestamps returned by the kernel and convert to + * absolute (wall clock) timestamps. + */ +#define V4L_TS_ABS 1 +/** + * Assume kernel timestamps are from the monotonic clock and convert to + * absolute timestamps. + */ +#define V4L_TS_MONO2ABS 2 + +/** + * Once the kind of timestamps returned by the kernel have been detected, + * the value of the timefilter (NULL or not) determines whether a conversion + * takes place. + */ +#define V4L_TS_CONVERT_READY V4L_TS_DEFAULT + struct video_data { AVClass *class; int fd; int frame_format; /* V4L2_PIX_FMT_* */ int width, height; int frame_size; - int timeout; int interlaced; int top_field_first; + int ts_mode; + TimeFilter *timefilter; + int64_t last_time_m; int buffers; volatile int buffers_queued; void **buf_start; unsigned int *buf_len; char *standard; + v4l2_std_id std_id; int channel; - char *video_size; /**< String describing video size, - set by a private option. */ char *pixel_format; /**< Set by a private option. */ int list_format; /**< Set by a private option. */ + int list_standard; /**< Set by a private option. */ char *framerate; /**< Set by a private option. */ + + int use_libv4l2; + int (*open_f)(const char *file, int oflag, ...); + int (*close_f)(int fd); + int (*dup_f)(int fd); + int (*ioctl_f)(int fd, unsigned long int request, ...); + ssize_t (*read_f)(int fd, void *buffer, size_t n); + void *(*mmap_f)(void *start, size_t length, int prot, int flags, int fd, int64_t offset); + int (*munmap_f)(void *_start, size_t length); }; struct buff_data { struct video_data *s; int index; - int fd; -}; - -struct fmt_map { - enum AVPixelFormat ff_fmt; - enum AVCodecID codec_id; - uint32_t v4l2_fmt; -}; - -static struct fmt_map fmt_conversion_table[] = { - //ff_fmt codec_id v4l2_fmt - { AV_PIX_FMT_YUV420P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV420 }, - { AV_PIX_FMT_YUV422P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV422P }, - { AV_PIX_FMT_YUYV422, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUYV }, - { AV_PIX_FMT_UYVY422, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_UYVY }, - { AV_PIX_FMT_YUV411P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV411P }, - { AV_PIX_FMT_YUV410P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV410 }, - { AV_PIX_FMT_RGB555, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB555 }, - { AV_PIX_FMT_RGB565, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB565 }, - { AV_PIX_FMT_BGR24, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_BGR24 }, - { AV_PIX_FMT_RGB24, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB24 }, - { AV_PIX_FMT_BGRA, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_BGR32 }, - { AV_PIX_FMT_GRAY8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_GREY }, - { AV_PIX_FMT_NV12, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_NV12 }, - { AV_PIX_FMT_NONE, AV_CODEC_ID_MJPEG, V4L2_PIX_FMT_MJPEG }, - { AV_PIX_FMT_NONE, AV_CODEC_ID_MJPEG, V4L2_PIX_FMT_JPEG }, }; static int device_open(AVFormatContext *ctx) { + struct video_data *s = ctx->priv_data; struct v4l2_capability cap; int fd; - int res, err; + int ret; int flags = O_RDWR; +#define SET_WRAPPERS(prefix) do { \ + s->open_f = prefix ## open; \ + s->close_f = prefix ## close; \ + s->dup_f = prefix ## dup; \ + s->ioctl_f = prefix ## ioctl; \ + s->read_f = prefix ## read; \ + s->mmap_f = prefix ## mmap; \ + s->munmap_f = prefix ## munmap; \ +} while (0) + + if (s->use_libv4l2) { +#if CONFIG_LIBV4L2 + SET_WRAPPERS(v4l2_); +#else + av_log(ctx, AV_LOG_ERROR, "libavdevice is not build with libv4l2 support.\n"); + return AVERROR(EINVAL); +#endif + } else { + SET_WRAPPERS(); + } + +#define v4l2_open s->open_f +#define v4l2_close s->close_f +#define v4l2_dup s->dup_f +#define v4l2_ioctl s->ioctl_f +#define v4l2_read s->read_f +#define v4l2_mmap s->mmap_f +#define v4l2_munmap s->munmap_f + if (ctx->flags & AVFMT_FLAG_NONBLOCK) { flags |= O_NONBLOCK; } - fd = avpriv_open(ctx->filename, flags); + fd = v4l2_open(ctx->filename, flags, 0); if (fd < 0) { - err = errno; - - av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s : %s\n", - ctx->filename, strerror(err)); - - return AVERROR(err); + ret = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s: %s\n", + ctx->filename, av_err2str(ret)); + return ret; } - res = ioctl(fd, VIDIOC_QUERYCAP, &cap); - if (res < 0) { - err = errno; + if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) { + ret = AVERROR(errno); av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n", - strerror(err)); - + av_err2str(ret)); goto fail; } - av_log(ctx, AV_LOG_VERBOSE, "[%d]Capabilities: %x\n", + av_log(ctx, AV_LOG_VERBOSE, "fd:%d capabilities:%x\n", fd, cap.capabilities); if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { av_log(ctx, AV_LOG_ERROR, "Not a video capture device.\n"); - err = ENODEV; - + ret = AVERROR(ENODEV); goto fail; } if (!(cap.capabilities & V4L2_CAP_STREAMING)) { av_log(ctx, AV_LOG_ERROR, "The device does not support the streaming I/O method.\n"); - err = ENOSYS; - + ret = AVERROR(ENOSYS); goto fail; } return fd; fail: - close(fd); - return AVERROR(err); + v4l2_close(fd); + return ret; } static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pix_fmt) { struct video_data *s = ctx->priv_data; - int fd = s->fd; struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE }; struct v4l2_pix_format *pix = &fmt.fmt.pix; - - int res; + int res = 0; pix->width = *width; pix->height = *height; pix->pixelformat = pix_fmt; pix->field = V4L2_FIELD_ANY; - res = ioctl(fd, VIDIOC_S_FMT, &fmt); + if (v4l2_ioctl(s->fd, VIDIOC_S_FMT, &fmt) < 0) + res = AVERROR(errno); if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) { av_log(ctx, AV_LOG_INFO, @@ -198,82 +211,39 @@ static int device_init(AVFormatContext *ctx, int *width, int *height, "The V4L2 driver changed the pixel format " "from 0x%08X to 0x%08X\n", pix_fmt, fmt.fmt.pix.pixelformat); - res = -1; + res = AVERROR(EINVAL); } if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED) { - av_log(ctx, AV_LOG_DEBUG, "The V4L2 driver using the interlaced mode"); + av_log(ctx, AV_LOG_DEBUG, + "The V4L2 driver is using the interlaced mode\n"); s->interlaced = 1; } return res; } -static int first_field(int fd) +static int first_field(const struct video_data *s) { int res; v4l2_std_id std; - res = ioctl(fd, VIDIOC_G_STD, &std); - if (res < 0) { + res = v4l2_ioctl(s->fd, VIDIOC_G_STD, &std); + if (res < 0) return 0; - } - if (std & V4L2_STD_NTSC) { + if (std & V4L2_STD_NTSC) return 0; - } return 1; } -static uint32_t fmt_ff2v4l(enum AVPixelFormat pix_fmt, enum AVCodecID codec_id) -{ - int i; - - for (i = 0; i < FF_ARRAY_ELEMS(fmt_conversion_table); i++) { - if ((codec_id == AV_CODEC_ID_NONE || - fmt_conversion_table[i].codec_id == codec_id) && - (pix_fmt == AV_PIX_FMT_NONE || - fmt_conversion_table[i].ff_fmt == pix_fmt)) { - return fmt_conversion_table[i].v4l2_fmt; - } - } - - return 0; -} - -static enum AVPixelFormat fmt_v4l2ff(uint32_t v4l2_fmt, enum AVCodecID codec_id) -{ - int i; - - for (i = 0; i < FF_ARRAY_ELEMS(fmt_conversion_table); i++) { - if (fmt_conversion_table[i].v4l2_fmt == v4l2_fmt && - fmt_conversion_table[i].codec_id == codec_id) { - return fmt_conversion_table[i].ff_fmt; - } - } - - return AV_PIX_FMT_NONE; -} - -static enum AVCodecID fmt_v4l2codec(uint32_t v4l2_fmt) -{ - int i; - - for (i = 0; i < FF_ARRAY_ELEMS(fmt_conversion_table); i++) { - if (fmt_conversion_table[i].v4l2_fmt == v4l2_fmt) { - return fmt_conversion_table[i].codec_id; - } - } - - return AV_CODEC_ID_NONE; -} - #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE -static void list_framesizes(AVFormatContext *ctx, int fd, uint32_t pixelformat) +static void list_framesizes(AVFormatContext *ctx, uint32_t pixelformat) { + const struct video_data *s = ctx->priv_data; struct v4l2_frmsizeenum vfse = { .pixel_format = pixelformat }; - while(!ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) { + while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) { switch (vfse.type) { case V4L2_FRMSIZE_TYPE_DISCRETE: av_log(ctx, AV_LOG_INFO, " %ux%u", @@ -294,26 +264,27 @@ static void list_framesizes(AVFormatContext *ctx, int fd, uint32_t pixelformat) } #endif -static void list_formats(AVFormatContext *ctx, int fd, int type) +static void list_formats(AVFormatContext *ctx, int type) { + const struct video_data *s = ctx->priv_data; struct v4l2_fmtdesc vfd = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE }; - while(!ioctl(fd, VIDIOC_ENUM_FMT, &vfd)) { - enum AVCodecID codec_id = fmt_v4l2codec(vfd.pixelformat); - enum AVPixelFormat pix_fmt = fmt_v4l2ff(vfd.pixelformat, codec_id); + while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FMT, &vfd)) { + enum AVCodecID codec_id = avpriv_fmt_v4l2codec(vfd.pixelformat); + enum AVPixelFormat pix_fmt = avpriv_fmt_v4l2ff(vfd.pixelformat, codec_id); vfd.index++; if (!(vfd.flags & V4L2_FMT_FLAG_COMPRESSED) && type & V4L_RAWFORMATS) { const char *fmt_name = av_get_pix_fmt_name(pix_fmt); - av_log(ctx, AV_LOG_INFO, "R : %9s : %20s :", + av_log(ctx, AV_LOG_INFO, "Raw : %9s : %20s :", fmt_name ? fmt_name : "Unsupported", vfd.description); } else if (vfd.flags & V4L2_FMT_FLAG_COMPRESSED && type & V4L_COMPFORMATS) { - AVCodec *codec = avcodec_find_encoder(codec_id); - av_log(ctx, AV_LOG_INFO, "C : %9s : %20s :", + AVCodec *codec = avcodec_find_decoder(codec_id); + av_log(ctx, AV_LOG_INFO, "Compressed: %9s : %20s :", codec ? codec->name : "Unsupported", vfd.description); } else { @@ -321,18 +292,40 @@ static void list_formats(AVFormatContext *ctx, int fd, int type) } #ifdef V4L2_FMT_FLAG_EMULATED - if (vfd.flags & V4L2_FMT_FLAG_EMULATED) { - av_log(ctx, AV_LOG_WARNING, "%s", "Emulated"); - continue; - } + if (vfd.flags & V4L2_FMT_FLAG_EMULATED) + av_log(ctx, AV_LOG_INFO, " Emulated :"); #endif #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE - list_framesizes(ctx, fd, vfd.pixelformat); + list_framesizes(ctx, vfd.pixelformat); #endif av_log(ctx, AV_LOG_INFO, "\n"); } } +static void list_standards(AVFormatContext *ctx) +{ + int ret; + struct video_data *s = ctx->priv_data; + struct v4l2_standard standard; + + if (s->std_id == 0) + return; + + for (standard.index = 0; ; standard.index++) { + if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) { + ret = AVERROR(errno); + if (ret == AVERROR(EINVAL)) { + break; + } else { + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret)); + return; + } + } + av_log(ctx, AV_LOG_INFO, "%2d, %16"PRIx64", %s\n", + standard.index, (uint64_t)standard.id, standard.name); + } +} + static int mmap_init(AVFormatContext *ctx) { int i, res; @@ -343,34 +336,26 @@ static int mmap_init(AVFormatContext *ctx) .memory = V4L2_MEMORY_MMAP }; - res = ioctl(s->fd, VIDIOC_REQBUFS, &req); - if (res < 0) { - if (errno == EINVAL) { - av_log(ctx, AV_LOG_ERROR, "Device does not support mmap\n"); - } else { - av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS)\n"); - } - - return AVERROR(errno); + if (v4l2_ioctl(s->fd, VIDIOC_REQBUFS, &req) < 0) { + res = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS): %s\n", av_err2str(res)); + return res; } if (req.count < 2) { av_log(ctx, AV_LOG_ERROR, "Insufficient buffer memory\n"); - return AVERROR(ENOMEM); } s->buffers = req.count; - s->buf_start = av_malloc(sizeof(void *) * s->buffers); + s->buf_start = av_malloc_array(s->buffers, sizeof(void *)); if (!s->buf_start) { av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer pointers\n"); - return AVERROR(ENOMEM); } - s->buf_len = av_malloc(sizeof(unsigned int) * s->buffers); + s->buf_len = av_malloc_array(s->buffers, sizeof(unsigned int)); if (!s->buf_len) { av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer sizes\n"); av_free(s->buf_start); - return AVERROR(ENOMEM); } @@ -380,30 +365,27 @@ static int mmap_init(AVFormatContext *ctx) .index = i, .memory = V4L2_MEMORY_MMAP }; - - res = ioctl(s->fd, VIDIOC_QUERYBUF, &buf); - if (res < 0) { - av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF)\n"); - - return AVERROR(errno); + if (v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf) < 0) { + res = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF): %s\n", av_err2str(res)); + return res; } s->buf_len[i] = buf.length; if (s->frame_size > 0 && s->buf_len[i] < s->frame_size) { av_log(ctx, AV_LOG_ERROR, - "Buffer len [%d] = %d != %d\n", + "buf_len[%d] = %d < expected frame size %d\n", i, s->buf_len[i], s->frame_size); - - return -1; + return AVERROR(ENOMEM); } - s->buf_start[i] = mmap(NULL, buf.length, + s->buf_start[i] = v4l2_mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, s->fd, buf.m.offset); if (s->buf_start[i] == MAP_FAILED) { - av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", strerror(errno)); - - return AVERROR(errno); + res = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", av_err2str(res)); + return res; } } @@ -417,24 +399,93 @@ static void dummy_release_buffer(AVPacket *pkt) } #endif +static int enqueue_buffer(struct video_data *s, struct v4l2_buffer *buf) +{ + int res = 0; + + if (v4l2_ioctl(s->fd, VIDIOC_QBUF, buf) < 0) { + res = AVERROR(errno); + av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res)); + } else { + avpriv_atomic_int_add_and_fetch(&s->buffers_queued, 1); + } + + return res; +} + static void mmap_release_buffer(void *opaque, uint8_t *data) { struct v4l2_buffer buf = { 0 }; - int res, fd; struct buff_data *buf_descriptor = opaque; struct video_data *s = buf_descriptor->s; buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = buf_descriptor->index; - fd = buf_descriptor->fd; av_free(buf_descriptor); - res = ioctl(fd, VIDIOC_QBUF, &buf); - if (res < 0) - av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", - strerror(errno)); - avpriv_atomic_int_add_and_fetch(&s->buffers_queued, 1); + enqueue_buffer(s, &buf); +} + +#if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC) +static int64_t av_gettime_monotonic(void) +{ + return av_gettime_relative(); +} +#endif + +static int init_convert_timestamp(AVFormatContext *ctx, int64_t ts) +{ + struct video_data *s = ctx->priv_data; + int64_t now; + + now = av_gettime(); + if (s->ts_mode == V4L_TS_ABS && + ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE) { + av_log(ctx, AV_LOG_INFO, "Detected absolute timestamps\n"); + s->ts_mode = V4L_TS_CONVERT_READY; + return 0; + } +#if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC) + if (ctx->streams[0]->avg_frame_rate.num) { + now = av_gettime_monotonic(); + if (s->ts_mode == V4L_TS_MONO2ABS || + (ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE)) { + AVRational tb = {AV_TIME_BASE, 1}; + int64_t period = av_rescale_q(1, tb, ctx->streams[0]->avg_frame_rate); + av_log(ctx, AV_LOG_INFO, "Detected monotonic timestamps, converting\n"); + /* microseconds instead of seconds, MHz instead of Hz */ + s->timefilter = ff_timefilter_new(1, period, 1.0E-6); + if (!s->timefilter) + return AVERROR(ENOMEM); + s->ts_mode = V4L_TS_CONVERT_READY; + return 0; + } + } +#endif + av_log(ctx, AV_LOG_ERROR, "Unknown timestamps\n"); + return AVERROR(EIO); +} + +static int convert_timestamp(AVFormatContext *ctx, int64_t *ts) +{ + struct video_data *s = ctx->priv_data; + + if (s->ts_mode) { + int r = init_convert_timestamp(ctx, *ts); + if (r < 0) + return r; + } +#if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC) + if (s->timefilter) { + int64_t nowa = av_gettime(); + int64_t nowm = av_gettime_monotonic(); + ff_timefilter_update(s->timefilter, nowa, nowm - s->last_time_m); + s->last_time_m = nowm; + *ts = ff_timefilter_eval(s->timefilter, *ts - nowm); + } +#endif + return 0; } static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt) @@ -444,28 +495,18 @@ static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt) .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, .memory = V4L2_MEMORY_MMAP }; - struct pollfd p = { .fd = s->fd, .events = POLLIN }; int res; - res = poll(&p, 1, s->timeout); - if (res < 0) - return AVERROR(errno); - - if (!(p.revents & (POLLIN | POLLERR | POLLHUP))) - return AVERROR(EAGAIN); - /* FIXME: Some special treatment might be needed in case of loss of signal... */ - while ((res = ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR)); + while ((res = v4l2_ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR)); if (res < 0) { if (errno == EAGAIN) { pkt->size = 0; - return AVERROR(EAGAIN); } - av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n", - strerror(errno)); - - return AVERROR(errno); + res = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n", av_err2str(res)); + return res; } if (buf.index >= s->buffers) { @@ -476,11 +517,17 @@ static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt) // always keep at least one buffer queued av_assert0(avpriv_atomic_int_get(&s->buffers_queued) >= 1); + /* CPIA is a compressed format and we don't know the exact number of bytes + * used by a frame, so set it here as the driver announces it. + */ + if (ctx->video_codec_id == AV_CODEC_ID_CPIA) + s->frame_size = buf.bytesused; + if (s->frame_size > 0 && buf.bytesused != s->frame_size) { av_log(ctx, AV_LOG_ERROR, "The v4l2 frame is %d bytes, but %d bytes are expected\n", buf.bytesused, s->frame_size); - + enqueue_buffer(s, &buf); return AVERROR_INVALIDDATA; } @@ -490,17 +537,16 @@ static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt) res = av_new_packet(pkt, buf.bytesused); if (res < 0) { av_log(ctx, AV_LOG_ERROR, "Error allocating a packet.\n"); + enqueue_buffer(s, &buf); return res; } memcpy(pkt->data, s->buf_start[buf.index], buf.bytesused); - res = ioctl(s->fd, VIDIOC_QBUF, &buf); - if (res < 0) { - av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF)\n"); + res = enqueue_buffer(s, &buf); + if (res) { av_free_packet(pkt); - return AVERROR(errno); + return res; } - avpriv_atomic_int_add_and_fetch(&s->buffers_queued, 1); } else { struct buff_data *buf_descriptor; @@ -518,22 +564,24 @@ FF_ENABLE_DEPRECATION_WARNINGS * allocate a buffer for memcpying into it */ av_log(ctx, AV_LOG_ERROR, "Failed to allocate a buffer descriptor\n"); - res = ioctl(s->fd, VIDIOC_QBUF, &buf); + enqueue_buffer(s, &buf); return AVERROR(ENOMEM); } - buf_descriptor->fd = s->fd; buf_descriptor->index = buf.index; buf_descriptor->s = s; pkt->buf = av_buffer_create(pkt->data, pkt->size, mmap_release_buffer, buf_descriptor, 0); if (!pkt->buf) { + av_log(ctx, AV_LOG_ERROR, "Failed to create a buffer\n"); + enqueue_buffer(s, &buf); av_freep(&buf_descriptor); return AVERROR(ENOMEM); } } pkt->pts = buf.timestamp.tv_sec * INT64_C(1000000) + buf.timestamp.tv_usec; + convert_timestamp(ctx, &pkt->pts); return s->buf_len[buf.index]; } @@ -551,23 +599,19 @@ static int mmap_start(AVFormatContext *ctx) .memory = V4L2_MEMORY_MMAP }; - res = ioctl(s->fd, VIDIOC_QBUF, &buf); - if (res < 0) { - av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", - strerror(errno)); - - return AVERROR(errno); + if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) { + res = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res)); + return res; } } s->buffers_queued = s->buffers; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - res = ioctl(s->fd, VIDIOC_STREAMON, &type); - if (res < 0) { - av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n", - strerror(errno)); - - return AVERROR(errno); + if (v4l2_ioctl(s->fd, VIDIOC_STREAMON, &type) < 0) { + res = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n", av_err2str(res)); + return res; } return 0; @@ -582,264 +626,348 @@ static void mmap_close(struct video_data *s) /* We do not check for the result, because we could * not do anything about it anyway... */ - ioctl(s->fd, VIDIOC_STREAMOFF, &type); + v4l2_ioctl(s->fd, VIDIOC_STREAMOFF, &type); for (i = 0; i < s->buffers; i++) { - munmap(s->buf_start[i], s->buf_len[i]); + v4l2_munmap(s->buf_start[i], s->buf_len[i]); } av_free(s->buf_start); av_free(s->buf_len); } -static int v4l2_set_parameters(AVFormatContext *s1) +static int v4l2_set_parameters(AVFormatContext *ctx) { - struct video_data *s = s1->priv_data; - struct v4l2_input input = { 0 }; + struct video_data *s = ctx->priv_data; struct v4l2_standard standard = { 0 }; struct v4l2_streamparm streamparm = { 0 }; - struct v4l2_fract *tpf = &streamparm.parm.capture.timeperframe; + struct v4l2_fract *tpf; AVRational framerate_q = { 0 }; int i, ret; - streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (s->framerate && (ret = av_parse_video_rate(&framerate_q, s->framerate)) < 0) { - av_log(s1, AV_LOG_ERROR, "Could not parse framerate '%s'.\n", + av_log(ctx, AV_LOG_ERROR, "Could not parse framerate '%s'.\n", s->framerate); return ret; } - /* set tv video input */ - input.index = s->channel; - if (ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) { - av_log(s1, AV_LOG_ERROR, "The V4L2 driver ioctl enum input failed:\n"); - return AVERROR(EIO); - } + if (s->standard) { + if (s->std_id) { + ret = 0; + av_log(ctx, AV_LOG_DEBUG, "Setting standard: %s\n", s->standard); + /* set tv standard */ + for (i = 0; ; i++) { + standard.index = i; + if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) { + ret = AVERROR(errno); + break; + } + if (!av_strcasecmp(standard.name, s->standard)) + break; + } + if (ret < 0) { + av_log(ctx, AV_LOG_ERROR, "Unknown or unsupported standard '%s'\n", s->standard); + return ret; + } - av_log(s1, AV_LOG_DEBUG, "The V4L2 driver set input_id: %d, input: %s\n", - s->channel, input.name); - if (ioctl(s->fd, VIDIOC_S_INPUT, &input.index) < 0) { - av_log(s1, AV_LOG_ERROR, - "The V4L2 driver ioctl set input(%d) failed\n", - s->channel); - return AVERROR(EIO); + if (v4l2_ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) { + ret = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_STD): %s\n", av_err2str(ret)); + return ret; + } + } else { + av_log(ctx, AV_LOG_WARNING, + "This device does not support any standard\n"); + } } - if (s->standard) { - av_log(s1, AV_LOG_DEBUG, "The V4L2 driver set standard: %s\n", - s->standard); - /* set tv standard */ - for(i=0;;i++) { + /* get standard */ + if (v4l2_ioctl(s->fd, VIDIOC_G_STD, &s->std_id) == 0) { + tpf = &standard.frameperiod; + for (i = 0; ; i++) { standard.index = i; - if (ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) { - av_log(s1, AV_LOG_ERROR, - "The V4L2 driver ioctl set standard(%s) failed\n", - s->standard); - return AVERROR(EIO); + if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) { + ret = AVERROR(errno); + if (ret == AVERROR(EINVAL) +#ifdef ENODATA + || ret == AVERROR(ENODATA) +#endif + ) { + tpf = &streamparm.parm.capture.timeperframe; + break; + } + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret)); + return ret; } - - if (!av_strcasecmp(standard.name, s->standard)) { + if (standard.id == s->std_id) { + av_log(ctx, AV_LOG_DEBUG, + "Current standard: %s, id: %"PRIx64", frameperiod: %d/%d\n", + standard.name, (uint64_t)standard.id, tpf->numerator, tpf->denominator); break; } } + } else { + tpf = &streamparm.parm.capture.timeperframe; + } - av_log(s1, AV_LOG_DEBUG, - "The V4L2 driver set standard: %s, id: %"PRIu64"\n", - s->standard, (uint64_t)standard.id); - if (ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) { - av_log(s1, AV_LOG_ERROR, - "The V4L2 driver ioctl set standard(%s) failed\n", - s->standard); - return AVERROR(EIO); - } + streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (v4l2_ioctl(s->fd, VIDIOC_G_PARM, &streamparm) < 0) { + ret = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_PARM): %s\n", av_err2str(ret)); + return ret; } if (framerate_q.num && framerate_q.den) { - av_log(s1, AV_LOG_DEBUG, "Setting time per frame to %d/%d\n", - framerate_q.den, framerate_q.num); - tpf->numerator = framerate_q.den; - tpf->denominator = framerate_q.num; - - if (ioctl(s->fd, VIDIOC_S_PARM, &streamparm) != 0) { - av_log(s1, AV_LOG_ERROR, - "ioctl set time per frame(%d/%d) failed\n", + if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { + tpf = &streamparm.parm.capture.timeperframe; + + av_log(ctx, AV_LOG_DEBUG, "Setting time per frame to %d/%d\n", framerate_q.den, framerate_q.num); - return AVERROR(EIO); - } + tpf->numerator = framerate_q.den; + tpf->denominator = framerate_q.num; - if (framerate_q.num != tpf->denominator || - framerate_q.den != tpf->numerator) { - av_log(s1, AV_LOG_INFO, - "The driver changed the time per frame from " - "%d/%d to %d/%d\n", - framerate_q.den, framerate_q.num, - tpf->numerator, tpf->denominator); - } - } else { - if (ioctl(s->fd, VIDIOC_G_PARM, &streamparm) != 0) { - av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_PARM): %s\n", - strerror(errno)); - return AVERROR(errno); + if (v4l2_ioctl(s->fd, VIDIOC_S_PARM, &streamparm) < 0) { + ret = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_PARM): %s\n", av_err2str(ret)); + return ret; + } + + if (framerate_q.num != tpf->denominator || + framerate_q.den != tpf->numerator) { + av_log(ctx, AV_LOG_INFO, + "The driver changed the time per frame from " + "%d/%d to %d/%d\n", + framerate_q.den, framerate_q.num, + tpf->numerator, tpf->denominator); + } + } else { + av_log(ctx, AV_LOG_WARNING, + "The driver does not allow to change time per frame\n"); } } - s1->streams[0]->avg_frame_rate.num = tpf->denominator; - s1->streams[0]->avg_frame_rate.den = tpf->numerator; - - s->timeout = 100 + - av_rescale_q(1, s1->streams[0]->avg_frame_rate, - (AVRational){1, 1000}); + if (tpf->denominator > 0 && tpf->numerator > 0) { + ctx->streams[0]->avg_frame_rate.num = tpf->denominator; + ctx->streams[0]->avg_frame_rate.den = tpf->numerator; + ctx->streams[0]->r_frame_rate = ctx->streams[0]->avg_frame_rate; + } else + av_log(ctx, AV_LOG_WARNING, "Time per frame unknown\n"); return 0; } -static uint32_t device_try_init(AVFormatContext *s1, - enum AVPixelFormat pix_fmt, - int *width, - int *height, - enum AVCodecID *codec_id) +static int device_try_init(AVFormatContext *ctx, + enum AVPixelFormat pix_fmt, + int *width, + int *height, + uint32_t *desired_format, + enum AVCodecID *codec_id) { - uint32_t desired_format = fmt_ff2v4l(pix_fmt, s1->video_codec_id); - - if (desired_format == 0 || - device_init(s1, width, height, desired_format) < 0) { - int i; - - desired_format = 0; - for (i = 0; i<FF_ARRAY_ELEMS(fmt_conversion_table); i++) { - if (s1->video_codec_id == AV_CODEC_ID_NONE || - fmt_conversion_table[i].codec_id == s1->video_codec_id) { - desired_format = fmt_conversion_table[i].v4l2_fmt; - if (device_init(s1, width, height, desired_format) >= 0) { + int ret, i; + + *desired_format = avpriv_fmt_ff2v4l(pix_fmt, ctx->video_codec_id); + + if (*desired_format) { + ret = device_init(ctx, width, height, *desired_format); + if (ret < 0) { + *desired_format = 0; + if (ret != AVERROR(EINVAL)) + return ret; + } + } + + if (!*desired_format) { + for (i = 0; avpriv_fmt_conversion_table[i].codec_id != AV_CODEC_ID_NONE; i++) { + if (ctx->video_codec_id == AV_CODEC_ID_NONE || + avpriv_fmt_conversion_table[i].codec_id == ctx->video_codec_id) { + av_log(ctx, AV_LOG_DEBUG, "Trying to set codec:%s pix_fmt:%s\n", + avcodec_get_name(avpriv_fmt_conversion_table[i].codec_id), + (char *)av_x_if_null(av_get_pix_fmt_name(avpriv_fmt_conversion_table[i].ff_fmt), "none")); + + *desired_format = avpriv_fmt_conversion_table[i].v4l2_fmt; + ret = device_init(ctx, width, height, *desired_format); + if (ret >= 0) break; - } - desired_format = 0; + else if (ret != AVERROR(EINVAL)) + return ret; + *desired_format = 0; } } - } - if (desired_format != 0) { - *codec_id = fmt_v4l2codec(desired_format); - assert(*codec_id != AV_CODEC_ID_NONE); + if (*desired_format == 0) { + av_log(ctx, AV_LOG_ERROR, "Cannot find a proper format for " + "codec '%s' (id %d), pixel format '%s' (id %d)\n", + avcodec_get_name(ctx->video_codec_id), ctx->video_codec_id, + (char *)av_x_if_null(av_get_pix_fmt_name(pix_fmt), "none"), pix_fmt); + ret = AVERROR(EINVAL); + } } - return desired_format; + *codec_id = avpriv_fmt_v4l2codec(*desired_format); + av_assert0(*codec_id != AV_CODEC_ID_NONE); + return ret; } -static int v4l2_read_header(AVFormatContext *s1) +static int v4l2_read_header(AVFormatContext *ctx) { - struct video_data *s = s1->priv_data; + struct video_data *s = ctx->priv_data; AVStream *st; int res = 0; uint32_t desired_format; - enum AVCodecID codec_id; + enum AVCodecID codec_id = AV_CODEC_ID_NONE; enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE; + struct v4l2_input input = { 0 }; - st = avformat_new_stream(s1, NULL); + st = avformat_new_stream(ctx, NULL); if (!st) return AVERROR(ENOMEM); - s->fd = device_open(s1); +#if CONFIG_LIBV4L2 + /* silence libv4l2 logging. if fopen() fails v4l2_log_file will be NULL + and errors will get sent to stderr */ + if (s->use_libv4l2) + v4l2_log_file = fopen("/dev/null", "w"); +#endif + + s->fd = device_open(ctx); if (s->fd < 0) return s->fd; + if (s->channel != -1) { + /* set video input */ + av_log(ctx, AV_LOG_DEBUG, "Selecting input_channel: %d\n", s->channel); + if (v4l2_ioctl(s->fd, VIDIOC_S_INPUT, &s->channel) < 0) { + res = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_INPUT): %s\n", av_err2str(res)); + goto fail; + } + } else { + /* get current video input */ + if (v4l2_ioctl(s->fd, VIDIOC_G_INPUT, &s->channel) < 0) { + res = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_INPUT): %s\n", av_err2str(res)); + goto fail; + } + } + + /* enum input */ + input.index = s->channel; + if (v4l2_ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) { + res = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMINPUT): %s\n", av_err2str(res)); + goto fail; + } + s->std_id = input.std; + av_log(ctx, AV_LOG_DEBUG, "Current input_channel: %d, input_name: %s, input_std: %"PRIx64"\n", + s->channel, input.name, (uint64_t)input.std); + if (s->list_format) { - list_formats(s1, s->fd, s->list_format); - return AVERROR_EXIT; + list_formats(ctx, s->list_format); + res = AVERROR_EXIT; + goto fail; + } + + if (s->list_standard) { + list_standards(ctx); + res = AVERROR_EXIT; + goto fail; } avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */ - if (s->video_size && - (res = av_parse_video_size(&s->width, &s->height, s->video_size)) < 0) { - av_log(s1, AV_LOG_ERROR, "Could not parse video size '%s'.\n", - s->video_size); - return res; - } + if ((res = v4l2_set_parameters(ctx)) < 0) + goto fail; if (s->pixel_format) { AVCodec *codec = avcodec_find_decoder_by_name(s->pixel_format); if (codec) - s1->video_codec_id = codec->id; + ctx->video_codec_id = codec->id; pix_fmt = av_get_pix_fmt(s->pixel_format); if (pix_fmt == AV_PIX_FMT_NONE && !codec) { - av_log(s1, AV_LOG_ERROR, "No such input format: %s.\n", + av_log(ctx, AV_LOG_ERROR, "No such input format: %s.\n", s->pixel_format); - return AVERROR(EINVAL); + res = AVERROR(EINVAL); + goto fail; } } if (!s->width && !s->height) { - struct v4l2_format fmt; + struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE }; - av_log(s1, AV_LOG_VERBOSE, + av_log(ctx, AV_LOG_VERBOSE, "Querying the device for the current frame size\n"); - fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) { - av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n", - strerror(errno)); - return AVERROR(errno); + if (v4l2_ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) { + res = AVERROR(errno); + av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n", av_err2str(res)); + goto fail; } s->width = fmt.fmt.pix.width; s->height = fmt.fmt.pix.height; - av_log(s1, AV_LOG_VERBOSE, + av_log(ctx, AV_LOG_VERBOSE, "Setting frame size to %dx%d\n", s->width, s->height); } - desired_format = device_try_init(s1, pix_fmt, &s->width, &s->height, - &codec_id); - if (desired_format == 0) { - av_log(s1, AV_LOG_ERROR, "Cannot find a proper format for " - "codec_id %d, pix_fmt %d.\n", s1->video_codec_id, pix_fmt); - close(s->fd); + res = device_try_init(ctx, pix_fmt, &s->width, &s->height, &desired_format, &codec_id); + if (res < 0) + goto fail; - return AVERROR(EIO); - } + /* If no pixel_format was specified, the codec_id was not known up + * until now. Set video_codec_id in the context, as codec_id will + * not be available outside this function + */ + if (codec_id != AV_CODEC_ID_NONE && ctx->video_codec_id == AV_CODEC_ID_NONE) + ctx->video_codec_id = codec_id; - if ((res = av_image_check_size(s->width, s->height, 0, s1) < 0)) - return res; + if ((res = av_image_check_size(s->width, s->height, 0, ctx)) < 0) + goto fail; s->frame_format = desired_format; - if ((res = v4l2_set_parameters(s1) < 0)) - return res; - - st->codec->pix_fmt = fmt_v4l2ff(desired_format, codec_id); + st->codec->pix_fmt = avpriv_fmt_v4l2ff(desired_format, codec_id); s->frame_size = avpicture_get_size(st->codec->pix_fmt, s->width, s->height); - if ((res = mmap_init(s1)) || - (res = mmap_start(s1)) < 0) { - close(s->fd); - return res; - } + if ((res = mmap_init(ctx)) || + (res = mmap_start(ctx)) < 0) + goto fail; - s->top_field_first = first_field(s->fd); + s->top_field_first = first_field(s); st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = codec_id; if (codec_id == AV_CODEC_ID_RAWVIDEO) st->codec->codec_tag = avcodec_pix_fmt_to_codec_tag(st->codec->pix_fmt); + else if (codec_id == AV_CODEC_ID_H264) { + st->need_parsing = AVSTREAM_PARSE_HEADERS; + } + if (desired_format == V4L2_PIX_FMT_YVU420) + st->codec->codec_tag = MKTAG('Y', 'V', '1', '2'); + else if (desired_format == V4L2_PIX_FMT_YVU410) + st->codec->codec_tag = MKTAG('Y', 'V', 'U', '9'); st->codec->width = s->width; st->codec->height = s->height; - st->codec->bit_rate = s->frame_size * av_q2d(st->avg_frame_rate) * 8; + if (st->avg_frame_rate.den) + st->codec->bit_rate = s->frame_size * av_q2d(st->avg_frame_rate) * 8; return 0; + +fail: + v4l2_close(s->fd); + return res; } -static int v4l2_read_packet(AVFormatContext *s1, AVPacket *pkt) +static int v4l2_read_packet(AVFormatContext *ctx, AVPacket *pkt) { - struct video_data *s = s1->priv_data; - AVFrame *frame = s1->streams[0]->codec->coded_frame; + struct video_data *s = ctx->priv_data; + AVFrame *frame = ctx->streams[0]->codec->coded_frame; int res; av_init_packet(pkt); - if ((res = mmap_read_frame(s1, pkt)) < 0) { + if ((res = mmap_read_frame(ctx, pkt)) < 0) { return res; } @@ -851,33 +979,45 @@ static int v4l2_read_packet(AVFormatContext *s1, AVPacket *pkt) return pkt->size; } -static int v4l2_read_close(AVFormatContext *s1) +static int v4l2_read_close(AVFormatContext *ctx) { - struct video_data *s = s1->priv_data; + struct video_data *s = ctx->priv_data; if (avpriv_atomic_int_get(&s->buffers_queued) != s->buffers) - av_log(s1, AV_LOG_WARNING, "Some buffers are still owned by the caller on " + av_log(ctx, AV_LOG_WARNING, "Some buffers are still owned by the caller on " "close.\n"); mmap_close(s); - close(s->fd); + v4l2_close(s->fd); return 0; } #define OFFSET(x) offsetof(struct video_data, x) #define DEC AV_OPT_FLAG_DECODING_PARAM + static const AVOption options[] = { - { "standard", "TV standard, used only by analog frame grabber", OFFSET(standard), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC }, - { "channel", "TV channel, used only by frame grabber", OFFSET(channel), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, INT_MAX, DEC }, - { "video_size", "A string describing frame size, such as 640x480 or hd720.", OFFSET(video_size), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC }, - { "pixel_format", "Preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC }, - { "input_format", "Preferred pixel format (for raw video) or codec name", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC }, - { "framerate", "", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC }, - { "list_formats", "List available formats and exit", OFFSET(list_format), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, INT_MAX, DEC, "list_formats" }, - { "all", "Show all available formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_ALLFORMATS }, 0, INT_MAX, DEC, "list_formats" }, - { "raw", "Show only non-compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_RAWFORMATS }, 0, INT_MAX, DEC, "list_formats" }, - { "compressed", "Show only compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_COMPFORMATS }, 0, INT_MAX, DEC, "list_formats" }, + { "standard", "set TV standard, used only by analog frame grabber", OFFSET(standard), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC }, + { "channel", "set TV channel, used only by frame grabber", OFFSET(channel), AV_OPT_TYPE_INT, {.i64 = -1 }, -1, INT_MAX, DEC }, + { "video_size", "set frame size", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, DEC }, + { "pixel_format", "set preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC }, + { "input_format", "set preferred pixel format (for raw video) or codec name", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC }, + { "framerate", "set frame rate", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC }, + + { "list_formats", "list available formats and exit", OFFSET(list_format), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, INT_MAX, DEC, "list_formats" }, + { "all", "show all available formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_ALLFORMATS }, 0, INT_MAX, DEC, "list_formats" }, + { "raw", "show only non-compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_RAWFORMATS }, 0, INT_MAX, DEC, "list_formats" }, + { "compressed", "show only compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_COMPFORMATS }, 0, INT_MAX, DEC, "list_formats" }, + + { "list_standards", "list supported standards and exit", OFFSET(list_standard), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 1, DEC, "list_standards" }, + { "all", "show all supported standards", OFFSET(list_standard), AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, DEC, "list_standards" }, + + { "timestamps", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, "timestamps" }, + { "ts", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, "timestamps" }, + { "default", "use timestamps from the kernel", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_DEFAULT }, 0, 2, DEC, "timestamps" }, + { "abs", "use absolute timestamps (wall clock)", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_ABS }, 0, 2, DEC, "timestamps" }, + { "mono2abs", "force conversion from monotonic to absolute timestamps", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_MONO2ABS }, 0, 2, DEC, "timestamps" }, + { "use_libv4l2", "use libv4l2 (v4l-utils) conversion functions", OFFSET(use_libv4l2), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 1, DEC }, { NULL }, }; @@ -886,10 +1026,11 @@ static const AVClass v4l2_class = { .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, }; AVInputFormat ff_v4l2_demuxer = { - .name = "video4linux2", + .name = "video4linux2,v4l2", .long_name = NULL_IF_CONFIG_SMALL("Video4Linux2 device grab"), .priv_data_size = sizeof(struct video_data), .read_header = v4l2_read_header, diff --git a/libavdevice/v4l2enc.c b/libavdevice/v4l2enc.c new file mode 100644 index 0000000..c9f8d92 --- /dev/null +++ b/libavdevice/v4l2enc.c @@ -0,0 +1,119 @@ +/* + * Copyright (c) 2013 Clément Bœsch + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "v4l2-common.h" +#include "avdevice.h" + +typedef struct { + AVClass *class; + int fd; +} V4L2Context; + +static av_cold int write_header(AVFormatContext *s1) +{ + int res = 0, flags = O_RDWR; + struct v4l2_format fmt = { + .type = V4L2_BUF_TYPE_VIDEO_OUTPUT + }; + V4L2Context *s = s1->priv_data; + AVCodecContext *enc_ctx; + uint32_t v4l2_pixfmt; + + if (s1->flags & AVFMT_FLAG_NONBLOCK) + flags |= O_NONBLOCK; + + s->fd = open(s1->filename, flags); + if (s->fd < 0) { + res = AVERROR(errno); + av_log(s1, AV_LOG_ERROR, "Unable to open V4L2 device '%s'\n", s1->filename); + return res; + } + + if (s1->nb_streams != 1 || + s1->streams[0]->codec->codec_type != AVMEDIA_TYPE_VIDEO || + s1->streams[0]->codec->codec_id != AV_CODEC_ID_RAWVIDEO) { + av_log(s1, AV_LOG_ERROR, + "V4L2 output device supports only a single raw video stream\n"); + return AVERROR(EINVAL); + } + + enc_ctx = s1->streams[0]->codec; + + v4l2_pixfmt = avpriv_fmt_ff2v4l(enc_ctx->pix_fmt, AV_CODEC_ID_RAWVIDEO); + if (!v4l2_pixfmt) { // XXX: try to force them one by one? + av_log(s1, AV_LOG_ERROR, "Unknown V4L2 pixel format equivalent for %s\n", + av_get_pix_fmt_name(enc_ctx->pix_fmt)); + return AVERROR(EINVAL); + } + + if (ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) { + res = AVERROR(errno); + av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n", av_err2str(res)); + return res; + } + + fmt.fmt.pix.width = enc_ctx->width; + fmt.fmt.pix.height = enc_ctx->height; + fmt.fmt.pix.pixelformat = v4l2_pixfmt; + fmt.fmt.pix.sizeimage = av_image_get_buffer_size(enc_ctx->pix_fmt, enc_ctx->width, enc_ctx->height, 1); + + if (ioctl(s->fd, VIDIOC_S_FMT, &fmt) < 0) { + res = AVERROR(errno); + av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_S_FMT): %s\n", av_err2str(res)); + return res; + } + + return res; +} + +static int write_packet(AVFormatContext *s1, AVPacket *pkt) +{ + const V4L2Context *s = s1->priv_data; + if (write(s->fd, pkt->data, pkt->size) == -1) + return AVERROR(errno); + return 0; +} + +static int write_trailer(AVFormatContext *s1) +{ + const V4L2Context *s = s1->priv_data; + close(s->fd); + return 0; +} + +static const AVClass v4l2_class = { + .class_name = "V4L2 outdev", + .item_name = av_default_item_name, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT, +}; + +AVOutputFormat ff_v4l2_muxer = { + .name = "v4l2", + .long_name = NULL_IF_CONFIG_SMALL("Video4Linux2 output device"), + .priv_data_size = sizeof(V4L2Context), + .audio_codec = AV_CODEC_ID_NONE, + .video_codec = AV_CODEC_ID_RAWVIDEO, + .write_header = write_header, + .write_packet = write_packet, + .write_trailer = write_trailer, + .flags = AVFMT_NOFILE, + .priv_class = &v4l2_class, +}; diff --git a/libavdevice/version.h b/libavdevice/version.h index 3d82c97..ca5b87a 100644 --- a/libavdevice/version.h +++ b/libavdevice/version.h @@ -1,18 +1,18 @@ /* - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -27,9 +27,9 @@ #include "libavutil/version.h" -#define LIBAVDEVICE_VERSION_MAJOR 55 -#define LIBAVDEVICE_VERSION_MINOR 0 -#define LIBAVDEVICE_VERSION_MICRO 0 +#define LIBAVDEVICE_VERSION_MAJOR 56 +#define LIBAVDEVICE_VERSION_MINOR 1 +#define LIBAVDEVICE_VERSION_MICRO 100 #define LIBAVDEVICE_VERSION_INT AV_VERSION_INT(LIBAVDEVICE_VERSION_MAJOR, \ LIBAVDEVICE_VERSION_MINOR, \ @@ -39,6 +39,8 @@ LIBAVDEVICE_VERSION_MICRO) #define LIBAVDEVICE_BUILD LIBAVDEVICE_VERSION_INT +#define LIBAVDEVICE_IDENT "Lavd" AV_STRINGIFY(LIBAVDEVICE_VERSION) + /** * FF_API_* defines may be placed below to indicate public API that will be * dropped at a future version bump. The defines themselves are not part of diff --git a/libavdevice/vfwcap.c b/libavdevice/vfwcap.c index b47de1b..00fb48b 100644 --- a/libavdevice/vfwcap.c +++ b/libavdevice/vfwcap.c @@ -2,30 +2,30 @@ * VFW capture interface * Copyright (c) 2006-2008 Ramiro Polla * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav is free software; you can redistribute it and/or + * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public - * License along with Libav; if not, write to the Free Software + * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -#include "libavformat/avformat.h" #include "libavformat/internal.h" #include "libavutil/log.h" #include "libavutil/opt.h" #include "libavutil/parseutils.h" #include <windows.h> #include <vfw.h> +#include "avdevice.h" /* Some obsolete versions of MinGW32 before 4.0.0 lack this. */ #ifndef HWND_MESSAGE @@ -154,7 +154,7 @@ static void dump_bih(AVFormatContext *s, BITMAPINFOHEADER *bih) static int shall_we_drop(AVFormatContext *s) { struct vfw_ctx *ctx = s->priv_data; - const uint8_t dropscore[] = {62, 75, 87, 100}; + static const uint8_t dropscore[] = {62, 75, 87, 100}; const int ndropscores = FF_ARRAY_ELEMS(dropscore); unsigned int buffer_fullness = (ctx->curbufsize*100)/s->max_picture_buffer; @@ -242,7 +242,7 @@ static int vfw_read_header(AVFormatContext *s) AVStream *st; int devnum; int bisize; - BITMAPINFO *bi; + BITMAPINFO *bi = NULL; CAPTUREPARMS cparms; DWORD biCompression; WORD biBitCount; @@ -288,7 +288,7 @@ static int vfw_read_header(AVFormatContext *s) (LPARAM) videostream_cb); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not set video stream callback.\n"); - goto fail_io; + goto fail; } SetWindowLongPtr(ctx->hwnd, GWLP_USERDATA, (LONG_PTR) s); @@ -302,7 +302,7 @@ static int vfw_read_header(AVFormatContext *s) /* Set video format */ bisize = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, 0, 0); if(!bisize) - goto fail_io; + goto fail; bi = av_malloc(bisize); if(!bi) { vfw_read_close(s); @@ -310,16 +310,21 @@ static int vfw_read_header(AVFormatContext *s) } ret = SendMessage(ctx->hwnd, WM_CAP_GET_VIDEOFORMAT, bisize, (LPARAM) bi); if(!ret) - goto fail_bi; + goto fail; dump_bih(s, &bi->bmiHeader); + ret = av_parse_video_rate(&framerate_q, ctx->framerate); + if (ret < 0) { + av_log(s, AV_LOG_ERROR, "Could not parse framerate '%s'.\n", ctx->framerate); + goto fail; + } if (ctx->video_size) { ret = av_parse_video_size(&bi->bmiHeader.biWidth, &bi->bmiHeader.biHeight, ctx->video_size); if (ret < 0) { av_log(s, AV_LOG_ERROR, "Couldn't parse video size.\n"); - goto fail_bi; + goto fail; } } @@ -338,19 +343,17 @@ static int vfw_read_header(AVFormatContext *s) ret = SendMessage(ctx->hwnd, WM_CAP_SET_VIDEOFORMAT, bisize, (LPARAM) bi); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not set Video Format.\n"); - goto fail_bi; + goto fail; } biCompression = bi->bmiHeader.biCompression; biBitCount = bi->bmiHeader.biBitCount; - av_free(bi); - /* Set sequence setup */ ret = SendMessage(ctx->hwnd, WM_CAP_GET_SEQUENCE_SETUP, sizeof(cparms), (LPARAM) &cparms); if(!ret) - goto fail_io; + goto fail; dump_captureparms(s, &cparms); @@ -365,10 +368,10 @@ static int vfw_read_header(AVFormatContext *s) ret = SendMessage(ctx->hwnd, WM_CAP_SET_SEQUENCE_SETUP, sizeof(cparms), (LPARAM) &cparms); if(!ret) - goto fail_io; + goto fail; codec = st->codec; - codec->time_base = (AVRational){framerate_q.den, framerate_q.num}; + codec->time_base = av_inv_q(framerate_q); codec->codec_type = AVMEDIA_TYPE_VIDEO; codec->width = bi->bmiHeader.biWidth; codec->height = bi->bmiHeader.biHeight; @@ -394,31 +397,31 @@ static int vfw_read_header(AVFormatContext *s) } } + av_freep(&bi); + avpriv_set_pts_info(st, 32, 1, 1000); ctx->mutex = CreateMutex(NULL, 0, NULL); if(!ctx->mutex) { av_log(s, AV_LOG_ERROR, "Could not create Mutex.\n" ); - goto fail_io; + goto fail; } ctx->event = CreateEvent(NULL, 1, 0, NULL); if(!ctx->event) { av_log(s, AV_LOG_ERROR, "Could not create Event.\n" ); - goto fail_io; + goto fail; } ret = SendMessage(ctx->hwnd, WM_CAP_SEQUENCE_NOFILE, 0, 0); if(!ret) { av_log(s, AV_LOG_ERROR, "Could not start capture sequence.\n" ); - goto fail_io; + goto fail; } return 0; -fail_bi: - av_free(bi); - -fail_io: +fail: + av_freep(&bi); vfw_read_close(s); return AVERROR(EIO); } @@ -465,6 +468,7 @@ static const AVClass vfw_class = { .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT }; AVInputFormat ff_vfwcap_demuxer = { diff --git a/libavdevice/x11grab.c b/libavdevice/x11grab.c index 1f91be9..9066ae2 100644 --- a/libavdevice/x11grab.c +++ b/libavdevice/x11grab.c @@ -1,9 +1,9 @@ /* * X11 video grab interface * - * This file is part of Libav. + * This file is part of FFmpeg. * - * Libav integration: + * FFmpeg integration: * Copyright (C) 2006 Clemens Fruhwirth <clemens@endorphin.org> * Edouard Gomez <ed.gomez@free.fr> * @@ -14,18 +14,18 @@ * Copyright (C) 1997-1998 Rasca, Berlin * 2003-2004 Karl H. Beckers, Frankfurt * - * Libav is free software; you can redistribute it and/or modify + * FFmpeg is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * - * Libav is distributed in the hope that it will be useful, + * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License - * along with Libav; if not, write to the Free Software + * along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ @@ -41,6 +41,7 @@ #include <time.h> #include <sys/shm.h> +#include <X11/cursorfont.h> #include <X11/X.h> #include <X11/Xlib.h> #include <X11/Xlibint.h> @@ -51,12 +52,13 @@ #include <X11/extensions/Xfixes.h> #include <X11/extensions/XShm.h> +#include "avdevice.h" + #include "libavutil/log.h" #include "libavutil/opt.h" #include "libavutil/parseutils.h" #include "libavutil/time.h" -#include "libavformat/avformat.h" #include "libavformat/internal.h" /** X11 device demuxer context */ @@ -66,9 +68,8 @@ typedef struct X11GrabContext { AVRational time_base; /**< Time base */ int64_t time_frame; /**< Current time */ - char *video_size; /**< String describing video size, set by a private option. */ - int height; /**< Height of the grab frame */ int width; /**< Width of the grab frame */ + int height; /**< Height of the grab frame */ int x_off; /**< Horizontal top-left corner coordinate */ int y_off; /**< Vertical top-left corner coordinate */ @@ -79,8 +80,11 @@ typedef struct X11GrabContext { int draw_mouse; /**< Set by a private option. */ int follow_mouse; /**< Set by a private option. */ int show_region; /**< set by a private option. */ - char *framerate; /**< Set by a private option. */ + AVRational framerate; /**< Set by a private option. */ + int palette_changed; + uint32_t palette[256]; + Cursor c; Window region_win; /**< This is used by show_region option. */ } X11GrabContext; @@ -190,6 +194,8 @@ static int pixfmt_from_image(AVFormatContext *s, XImage *image, int *pix_fmt) image->blue_mask, image->bits_per_pixel); + *pix_fmt = AV_PIX_FMT_NONE; + switch (image->bits_per_pixel) { case 8: *pix_fmt = AV_PIX_FMT_PAL8; @@ -217,9 +223,14 @@ static int pixfmt_from_image(AVFormatContext *s, XImage *image, int *pix_fmt) } break; case 32: - *pix_fmt = AV_PIX_FMT_RGB32; + if (image->red_mask == 0xff0000 && + image->green_mask == 0x00ff00 && + image->blue_mask == 0x0000ff ) { + *pix_fmt = AV_PIX_FMT_0RGB32; + } break; - default: + } + if (*pix_fmt == AV_PIX_FMT_NONE) { av_log(s, AV_LOG_ERROR, "XImages with RGB mask 0x%.6lx 0x%.6lx 0x%.6lx and depth %i " "are currently not supported.\n", @@ -250,39 +261,34 @@ static int x11grab_read_header(AVFormatContext *s1) Display *dpy; AVStream *st = NULL; XImage *image; - int x_off = 0, y_off = 0, ret = 0, screen, use_shm; - char *param, *offset; - AVRational framerate; - - param = av_strdup(s1->filename); - if (!param) + int x_off = 0, y_off = 0, ret = 0, screen, use_shm = 0; + char *dpyname, *offset; + Colormap color_map; + XColor color[256]; + int i; + + dpyname = av_strdup(s1->filename); + if (!dpyname) goto out; - offset = strchr(param, '+'); + offset = strchr(dpyname, '+'); if (offset) { sscanf(offset, "%d,%d", &x_off, &y_off); - x11grab->draw_mouse = !strstr(offset, "nomouse"); + if (strstr(offset, "nomouse")) { + av_log(s1, AV_LOG_WARNING, + "'nomouse' specification in argument is deprecated: " + "use 'draw_mouse' option with value 0 instead\n"); + x11grab->draw_mouse = 0; + } *offset = 0; } - ret = av_parse_video_size(&x11grab->width, &x11grab->height, - x11grab->video_size); - if (ret < 0) { - av_log(s1, AV_LOG_ERROR, "Couldn't parse video size.\n"); - goto out; - } - - ret = av_parse_video_rate(&framerate, x11grab->framerate); - if (ret < 0) { - av_log(s1, AV_LOG_ERROR, "Could not parse framerate: %s.\n", - x11grab->framerate); - goto out; - } av_log(s1, AV_LOG_INFO, "device: %s -> display: %s x: %d y: %d width: %d height: %d\n", - s1->filename, param, x_off, y_off, x11grab->width, x11grab->height); + s1->filename, dpyname, x_off, y_off, x11grab->width, x11grab->height); - dpy = XOpenDisplay(param); + dpy = XOpenDisplay(dpyname); + av_freep(&dpyname); if (!dpy) { av_log(s1, AV_LOG_ERROR, "Could not open X display.\n"); ret = AVERROR(EIO); @@ -315,9 +321,11 @@ static int x11grab_read_header(AVFormatContext *s1) x_off, y_off); } - use_shm = XShmQueryExtension(dpy); - av_log(s1, AV_LOG_INFO, - "shared memory extension %sfound\n", use_shm ? "" : "not "); + if (x11grab->use_shm) { + use_shm = XShmQueryExtension(dpy); + av_log(s1, AV_LOG_INFO, + "shared memory extension %sfound\n", use_shm ? "" : "not "); + } if (use_shm && setup_shm(s1, dpy, &image) < 0) { av_log(s1, AV_LOG_WARNING, "Falling back to XGetImage\n"); @@ -339,7 +347,7 @@ static int x11grab_read_header(AVFormatContext *s1) x11grab->frame_size = x11grab->width * x11grab->height * image->bits_per_pixel / 8; x11grab->dpy = dpy; - x11grab->time_base = (AVRational) { framerate.den, framerate.num }; + x11grab->time_base = av_inv_q(x11grab->framerate); x11grab->time_frame = av_gettime() / av_q2d(x11grab->time_base); x11grab->x_off = x_off; x11grab->y_off = y_off; @@ -350,6 +358,19 @@ static int x11grab_read_header(AVFormatContext *s1) if (ret < 0) goto out; + if (st->codec->pix_fmt == AV_PIX_FMT_PAL8) { + color_map = DefaultColormap(dpy, screen); + for (i = 0; i < 256; ++i) + color[i].pixel = i; + XQueryColors(dpy, color_map, color, 256); + for (i = 0; i < 256; ++i) + x11grab->palette[i] = (color[i].red & 0xFF00) << 8 | + (color[i].green & 0xFF00) | + (color[i].blue & 0xFF00) >> 8; + x11grab->palette_changed = 1; + } + + st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_id = AV_CODEC_ID_RAWVIDEO; st->codec->width = x11grab->width; @@ -358,7 +379,7 @@ static int x11grab_read_header(AVFormatContext *s1) st->codec->bit_rate = x11grab->frame_size * 1 / av_q2d(x11grab->time_base) * 8; out: - av_free(param); + av_free(dpyname); return ret; } @@ -369,8 +390,9 @@ out: * @param s context used to retrieve original grabbing rectangle * coordinates */ -static void paint_mouse_pointer(XImage *image, X11GrabContext *s) +static void paint_mouse_pointer(XImage *image, AVFormatContext *s1) { + X11GrabContext *s = s1->priv_data; int x_off = s->x_off; int y_off = s->y_off; int width = s->width; @@ -386,14 +408,32 @@ static void paint_mouse_pointer(XImage *image, X11GrabContext *s) * Anyone who performs further investigation of the xlib API likely risks * permanent brain damage. */ uint8_t *pix = image->data; + Window root; + XSetWindowAttributes attr; + Bool pointer_on_screen; + Window w; + int _; + + root = DefaultRootWindow(dpy); + pointer_on_screen = XQueryPointer(dpy, root, &w, &w, &_, &_, &_, &_, &_); + if (!pointer_on_screen) + return; /* Code doesn't currently support 16-bit or PAL8 */ if (image->bits_per_pixel != 24 && image->bits_per_pixel != 32) return; + if (!s->c) + s->c = XCreateFontCursor(dpy, XC_left_ptr); + attr.cursor = s->c; + XChangeWindowAttributes(dpy, root, CWCursor, &attr); + xcim = XFixesGetCursorImage(dpy); - if (!xcim) + if (!xcim) { + av_log(s1, AV_LOG_WARNING, + "XFixesGetCursorImage failed\n"); return; + } x = xcim->x - xcim->xhot; y = xcim->y - xcim->yhot; @@ -513,6 +553,16 @@ static int x11grab_read_packet(AVFormatContext *s1, AVPacket *pkt) pkt->data = image->data; pkt->size = s->frame_size; pkt->pts = curtime; + if (s->palette_changed) { + uint8_t *pal = av_packet_new_side_data(pkt, AV_PKT_DATA_PALETTE, + AVPALETTE_SIZE); + if (!pal) { + av_log(s, AV_LOG_ERROR, "Cannot append palette to packet\n"); + } else { + memcpy(pal, s->palette, AVPALETTE_SIZE); + s->palette_changed = 0; + } + } screen = DefaultScreen(dpy); root = RootWindow(dpy, screen); @@ -573,7 +623,7 @@ static int x11grab_read_packet(AVFormatContext *s1, AVPacket *pkt) } if (s->draw_mouse) - paint_mouse_pointer(image, s); + paint_mouse_pointer(image, s1); return s->frame_size; } @@ -612,13 +662,17 @@ static int x11grab_read_close(AVFormatContext *s1) #define OFFSET(x) offsetof(X11GrabContext, x) #define DEC AV_OPT_FLAG_DECODING_PARAM static const AVOption options[] = { - { "video_size", "A string describing frame size, such as 640x480 or hd720.", OFFSET(video_size), AV_OPT_TYPE_STRING, {.str = "vga"}, 0, 0, DEC }, - { "framerate", "", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = "ntsc"}, 0, 0, DEC }, - { "draw_mouse", "Draw the mouse pointer.", OFFSET(draw_mouse), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, DEC }, - { "follow_mouse", "Move the grabbing region when the mouse pointer reaches within specified amount of pixels to the edge of region.", - OFFSET(follow_mouse), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT_MAX, DEC, "follow_mouse" }, - { "centered", "Keep the mouse pointer at the center of grabbing region when following.", 0, AV_OPT_TYPE_CONST, { .i64 = -1 }, INT_MIN, INT_MAX, DEC, "follow_mouse" }, - { "show_region", "Show the grabbing region.", OFFSET(show_region), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, DEC }, + { "draw_mouse", "draw the mouse pointer", OFFSET(draw_mouse), AV_OPT_TYPE_INT, {.i64 = 1}, 0, 1, DEC }, + + { "follow_mouse", "move the grabbing region when the mouse pointer reaches within specified amount of pixels to the edge of region", + OFFSET(follow_mouse), AV_OPT_TYPE_INT, {.i64 = 0}, -1, INT_MAX, DEC, "follow_mouse" }, + { "centered", "keep the mouse pointer at the center of grabbing region when following", + 0, AV_OPT_TYPE_CONST, {.i64 = -1}, INT_MIN, INT_MAX, DEC, "follow_mouse" }, + + { "framerate", "set video frame rate", OFFSET(framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "ntsc"}, 0, 0, DEC }, + { "show_region", "show the grabbing region", OFFSET(show_region), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 1, DEC }, + { "video_size", "set video frame size", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, {.str = "vga"}, 0, 0, DEC }, + { "use_shm", "use MIT-SHM extension", OFFSET(use_shm), AV_OPT_TYPE_INT, {.i64 = 1}, 0, 1, DEC }, { NULL }, }; @@ -627,6 +681,7 @@ static const AVClass x11_class = { .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, }; /** x11 grabber device demuxer declaration */ diff --git a/libavdevice/xv.c b/libavdevice/xv.c new file mode 100644 index 0000000..c19c15c --- /dev/null +++ b/libavdevice/xv.c @@ -0,0 +1,383 @@ +/* + * Copyright (c) 2013 Jeff Moguillansky + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * XVideo output device + * + * TODO: + * - add support to more formats + */ + +#include <X11/Xlib.h> +#include <X11/extensions/Xv.h> +#include <X11/extensions/XShm.h> +#include <X11/extensions/Xvlib.h> +#include <sys/shm.h> + +#include "libavutil/opt.h" +#include "libavutil/pixdesc.h" +#include "libavutil/imgutils.h" +#include "libavformat/internal.h" +#include "avdevice.h" + +typedef struct { + AVClass *class; + GC gc; + + Window window; + int64_t window_id; + char *window_title; + int window_width, window_height; + int window_x, window_y; + int dest_x, dest_y; /**< display area position */ + unsigned int dest_w, dest_h; /**< display area dimensions */ + + Display* display; + char *display_name; + + XvImage* yuv_image; + enum AVPixelFormat image_format; + int image_width, image_height; + XShmSegmentInfo yuv_shminfo; + int xv_port; + Atom wm_delete_message; +} XVContext; + +typedef struct XVTagFormatMap +{ + int tag; + enum AVPixelFormat format; +} XVTagFormatMap; + +static const XVTagFormatMap tag_codec_map[] = { + { MKTAG('I','4','2','0'), AV_PIX_FMT_YUV420P }, + { MKTAG('U','Y','V','Y'), AV_PIX_FMT_UYVY422 }, + { MKTAG('Y','U','Y','2'), AV_PIX_FMT_YUYV422 }, + { 0, AV_PIX_FMT_NONE } +}; + +static int xv_get_tag_from_format(enum AVPixelFormat format) +{ + const XVTagFormatMap *m = tag_codec_map; + int i; + for (i = 0; m->tag; m = &tag_codec_map[++i]) { + if (m->format == format) + return m->tag; + } + return 0; +} + +static int xv_write_trailer(AVFormatContext *s) +{ + XVContext *xv = s->priv_data; + if (xv->display) { + XShmDetach(xv->display, &xv->yuv_shminfo); + if (xv->yuv_image) + shmdt(xv->yuv_image->data); + XFree(xv->yuv_image); + if (xv->gc) + XFreeGC(xv->display, xv->gc); + XCloseDisplay(xv->display); + } + return 0; +} + +static int xv_write_header(AVFormatContext *s) +{ + XVContext *xv = s->priv_data; + unsigned int num_adaptors; + XvAdaptorInfo *ai; + XvImageFormatValues *fv; + XColor fgcolor; + XWindowAttributes window_attrs; + int num_formats = 0, j, tag, ret; + AVCodecContext *encctx = s->streams[0]->codec; + + if ( s->nb_streams > 1 + || encctx->codec_type != AVMEDIA_TYPE_VIDEO + || encctx->codec_id != AV_CODEC_ID_RAWVIDEO) { + av_log(s, AV_LOG_ERROR, "Only supports one rawvideo stream\n"); + return AVERROR(EINVAL); + } + + if (!(tag = xv_get_tag_from_format(encctx->pix_fmt))) { + av_log(s, AV_LOG_ERROR, + "Unsupported pixel format '%s', only yuv420p, uyvy422, yuyv422 are currently supported\n", + av_get_pix_fmt_name(encctx->pix_fmt)); + return AVERROR_PATCHWELCOME; + } + xv->image_format = encctx->pix_fmt; + + xv->display = XOpenDisplay(xv->display_name); + if (!xv->display) { + av_log(s, AV_LOG_ERROR, "Could not open the X11 display '%s'\n", xv->display_name); + return AVERROR(EINVAL); + } + + xv->image_width = encctx->width; + xv->image_height = encctx->height; + if (!xv->window_width && !xv->window_height) { + AVRational sar = encctx->sample_aspect_ratio; + xv->window_width = encctx->width; + xv->window_height = encctx->height; + if (sar.num) { + if (sar.num > sar.den) + xv->window_width = av_rescale(xv->window_width, sar.num, sar.den); + if (sar.num < sar.den) + xv->window_height = av_rescale(xv->window_height, sar.den, sar.num); + } + } + if (!xv->window_id) { + xv->window = XCreateSimpleWindow(xv->display, DefaultRootWindow(xv->display), + xv->window_x, xv->window_y, + xv->window_width, xv->window_height, + 0, 0, 0); + if (!xv->window_title) { + if (!(xv->window_title = av_strdup(s->filename))) { + ret = AVERROR(ENOMEM); + goto fail; + } + } + XStoreName(xv->display, xv->window, xv->window_title); + xv->wm_delete_message = XInternAtom(xv->display, "WM_DELETE_WINDOW", False); + XSetWMProtocols(xv->display, xv->window, &xv->wm_delete_message, 1); + XMapWindow(xv->display, xv->window); + } else + xv->window = xv->window_id; + + if (XvQueryAdaptors(xv->display, DefaultRootWindow(xv->display), &num_adaptors, &ai) != Success) { + ret = AVERROR_EXTERNAL; + goto fail; + } + if (!num_adaptors) { + av_log(s, AV_LOG_ERROR, "No X-Video adaptors present\n"); + return AVERROR(ENODEV); + } + xv->xv_port = ai[0].base_id; + XvFreeAdaptorInfo(ai); + + fv = XvListImageFormats(xv->display, xv->xv_port, &num_formats); + if (!fv) { + ret = AVERROR_EXTERNAL; + goto fail; + } + for (j = 0; j < num_formats; j++) { + if (fv[j].id == tag) { + break; + } + } + XFree(fv); + + if (j >= num_formats) { + av_log(s, AV_LOG_ERROR, + "Device does not support pixel format %s, aborting\n", + av_get_pix_fmt_name(encctx->pix_fmt)); + ret = AVERROR(EINVAL); + goto fail; + } + + xv->gc = XCreateGC(xv->display, xv->window, 0, 0); + xv->image_width = encctx->width; + xv->image_height = encctx->height; + xv->yuv_image = XvShmCreateImage(xv->display, xv->xv_port, tag, 0, + xv->image_width, xv->image_height, &xv->yuv_shminfo); + xv->yuv_shminfo.shmid = shmget(IPC_PRIVATE, xv->yuv_image->data_size, + IPC_CREAT | 0777); + xv->yuv_shminfo.shmaddr = (char *)shmat(xv->yuv_shminfo.shmid, 0, 0); + xv->yuv_image->data = xv->yuv_shminfo.shmaddr; + xv->yuv_shminfo.readOnly = False; + + XShmAttach(xv->display, &xv->yuv_shminfo); + XSync(xv->display, False); + shmctl(xv->yuv_shminfo.shmid, IPC_RMID, 0); + + XGetWindowAttributes(xv->display, xv->window, &window_attrs); + fgcolor.red = fgcolor.green = fgcolor.blue = 0; + fgcolor.flags = DoRed | DoGreen | DoBlue; + XAllocColor(xv->display, window_attrs.colormap, &fgcolor); + XSetForeground(xv->display, xv->gc, fgcolor.pixel); + //force display area recalculation at first frame + xv->window_width = xv->window_height = 0; + + return 0; + fail: + xv_write_trailer(s); + return ret; +} + +static void compute_display_area(AVFormatContext *s) +{ + XVContext *xv = s->priv_data; + AVRational sar, dar; /* sample and display aspect ratios */ + AVStream *st = s->streams[0]; + AVCodecContext *encctx = st->codec; + + /* compute overlay width and height from the codec context information */ + sar = st->sample_aspect_ratio.num ? st->sample_aspect_ratio : (AVRational){ 1, 1 }; + dar = av_mul_q(sar, (AVRational){ encctx->width, encctx->height }); + + /* we suppose the screen has a 1/1 sample aspect ratio */ + /* fit in the window */ + if (av_cmp_q(dar, (AVRational){ xv->dest_w, xv->dest_h }) > 0) { + /* fit in width */ + xv->dest_y = xv->dest_h; + xv->dest_x = 0; + xv->dest_h = av_rescale(xv->dest_w, dar.den, dar.num); + xv->dest_y -= xv->dest_h; + xv->dest_y /= 2; + } else { + /* fit in height */ + xv->dest_x = xv->dest_w; + xv->dest_y = 0; + xv->dest_w = av_rescale(xv->dest_h, dar.num, dar.den); + xv->dest_x -= xv->dest_w; + xv->dest_x /= 2; + } +} + +static int xv_repaint(AVFormatContext *s) +{ + XVContext *xv = s->priv_data; + XWindowAttributes window_attrs; + + XGetWindowAttributes(xv->display, xv->window, &window_attrs); + if (window_attrs.width != xv->window_width || window_attrs.height != xv->window_height) { + XRectangle rect[2]; + xv->dest_w = window_attrs.width; + xv->dest_h = window_attrs.height; + compute_display_area(s); + if (xv->dest_x) { + rect[0].width = rect[1].width = xv->dest_x; + rect[0].height = rect[1].height = window_attrs.height; + rect[0].y = rect[1].y = 0; + rect[0].x = 0; + rect[1].x = xv->dest_w + xv->dest_x; + XFillRectangles(xv->display, xv->window, xv->gc, rect, 2); + } + if (xv->dest_y) { + rect[0].width = rect[1].width = window_attrs.width; + rect[0].height = rect[1].height = xv->dest_y; + rect[0].x = rect[1].x = 0; + rect[0].y = 0; + rect[1].y = xv->dest_h + xv->dest_y; + XFillRectangles(xv->display, xv->window, xv->gc, rect, 2); + } + } + + if (XvShmPutImage(xv->display, xv->xv_port, xv->window, xv->gc, + xv->yuv_image, 0, 0, xv->image_width, xv->image_height, + xv->dest_x, xv->dest_y, xv->dest_w, xv->dest_h, True) != Success) { + av_log(s, AV_LOG_ERROR, "Could not copy image to XV shared memory buffer\n"); + return AVERROR_EXTERNAL; + } + return 0; +} + +static int write_picture(AVFormatContext *s, AVPicture *pict) +{ + XVContext *xv = s->priv_data; + XvImage *img = xv->yuv_image; + uint8_t *data[3] = { + img->data + img->offsets[0], + img->data + img->offsets[1], + img->data + img->offsets[2] + }; + + /* Check messages. Window might get closed. */ + if (!xv->window_id) { + XEvent event; + while (XPending(xv->display)) { + XNextEvent(xv->display, &event); + if (event.type == ClientMessage && event.xclient.data.l[0] == xv->wm_delete_message) { + av_log(xv, AV_LOG_DEBUG, "Window close event.\n"); + return AVERROR(EPIPE); + } + } + } + + av_image_copy(data, img->pitches, (const uint8_t **)pict->data, pict->linesize, + xv->image_format, img->width, img->height); + return xv_repaint(s); +} + +static int xv_write_packet(AVFormatContext *s, AVPacket *pkt) +{ + AVPicture pict; + AVCodecContext *ctx = s->streams[0]->codec; + + avpicture_fill(&pict, pkt->data, ctx->pix_fmt, ctx->width, ctx->height); + return write_picture(s, &pict); +} + +static int xv_write_frame(AVFormatContext *s, int stream_index, AVFrame **frame, + unsigned flags) +{ + /* xv_write_header() should have accepted only supported formats */ + if ((flags & AV_WRITE_UNCODED_FRAME_QUERY)) + return 0; + return write_picture(s, (AVPicture *)*frame); +} + +static int xv_control_message(AVFormatContext *s, int type, void *data, size_t data_size) +{ + switch(type) { + case AV_APP_TO_DEV_WINDOW_REPAINT: + return xv_repaint(s); + default: + break; + } + return AVERROR(ENOSYS); +} + +#define OFFSET(x) offsetof(XVContext, x) +static const AVOption options[] = { + { "display_name", "set display name", OFFSET(display_name), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, AV_OPT_FLAG_ENCODING_PARAM }, + { "window_id", "set existing window id", OFFSET(window_id), AV_OPT_TYPE_INT64, {.i64 = 0 }, 0, INT64_MAX, AV_OPT_FLAG_ENCODING_PARAM }, + { "window_size", "set window forced size", OFFSET(window_width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, AV_OPT_FLAG_ENCODING_PARAM }, + { "window_title", "set window title", OFFSET(window_title), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, AV_OPT_FLAG_ENCODING_PARAM }, + { "window_x", "set window x offset", OFFSET(window_x), AV_OPT_TYPE_INT, {.i64 = 0 }, -INT_MAX, INT_MAX, AV_OPT_FLAG_ENCODING_PARAM }, + { "window_y", "set window y offset", OFFSET(window_y), AV_OPT_TYPE_INT, {.i64 = 0 }, -INT_MAX, INT_MAX, AV_OPT_FLAG_ENCODING_PARAM }, + { NULL } + +}; + +static const AVClass xv_class = { + .class_name = "xvideo outdev", + .item_name = av_default_item_name, + .option = options, + .version = LIBAVUTIL_VERSION_INT, + .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT, +}; + +AVOutputFormat ff_xv_muxer = { + .name = "xv", + .long_name = NULL_IF_CONFIG_SMALL("XV (XVideo) output device"), + .priv_data_size = sizeof(XVContext), + .audio_codec = AV_CODEC_ID_NONE, + .video_codec = AV_CODEC_ID_RAWVIDEO, + .write_header = xv_write_header, + .write_packet = xv_write_packet, + .write_uncoded_frame = xv_write_frame, + .write_trailer = xv_write_trailer, + .control_message = xv_control_message, + .flags = AVFMT_NOFILE | AVFMT_VARIABLE_FPS | AVFMT_NOTIMESTAMPS, + .priv_class = &xv_class, +}; |