1/*
2 * Copyright (c) 2000,2001 Fabrice Bellard
3 * Copyright (c) 2006 Luca Abeni
4 *
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22/**
23 * @file
24 * Video4Linux2 grab interface
25 *
26 * Part of this file is based on the V4L2 video capture example
27 * (http://linuxtv.org/downloads/v4l-dvb-apis/capture-example.html)
28 *
29 * Thanks to Michael Niedermayer for providing the mapping between
30 * V4L2_PIX_FMT_* and AV_PIX_FMT_*
31 */
32
33#include "v4l2-common.h"
34
35#if CONFIG_LIBV4L2
36#include <libv4l2.h>
37#endif
38
39static const int desired_video_buffers = 256;
40
41#define V4L_ALLFORMATS  3
42#define V4L_RAWFORMATS  1
43#define V4L_COMPFORMATS 2
44
45/**
46 * Return timestamps to the user exactly as returned by the kernel
47 */
48#define V4L_TS_DEFAULT  0
49/**
50 * Autodetect the kind of timestamps returned by the kernel and convert to
51 * absolute (wall clock) timestamps.
52 */
53#define V4L_TS_ABS      1
54/**
55 * Assume kernel timestamps are from the monotonic clock and convert to
56 * absolute timestamps.
57 */
58#define V4L_TS_MONO2ABS 2
59
60/**
61 * Once the kind of timestamps returned by the kernel have been detected,
62 * the value of the timefilter (NULL or not) determines whether a conversion
63 * takes place.
64 */
65#define V4L_TS_CONVERT_READY V4L_TS_DEFAULT
66
67struct video_data {
68    AVClass *class;
69    int fd;
70    int frame_format; /* V4L2_PIX_FMT_* */
71    int width, height;
72    int frame_size;
73    int interlaced;
74    int top_field_first;
75    int ts_mode;
76    TimeFilter *timefilter;
77    int64_t last_time_m;
78
79    int buffers;
80    volatile int buffers_queued;
81    void **buf_start;
82    unsigned int *buf_len;
83    char *standard;
84    v4l2_std_id std_id;
85    int channel;
86    char *pixel_format; /**< Set by a private option. */
87    int list_format;    /**< Set by a private option. */
88    int list_standard;  /**< Set by a private option. */
89    char *framerate;    /**< Set by a private option. */
90
91    int use_libv4l2;
92    int (*open_f)(const char *file, int oflag, ...);
93    int (*close_f)(int fd);
94    int (*dup_f)(int fd);
95    int (*ioctl_f)(int fd, unsigned long int request, ...);
96    ssize_t (*read_f)(int fd, void *buffer, size_t n);
97    void *(*mmap_f)(void *start, size_t length, int prot, int flags, int fd, int64_t offset);
98    int (*munmap_f)(void *_start, size_t length);
99};
100
101struct buff_data {
102    struct video_data *s;
103    int index;
104};
105
106static int device_open(AVFormatContext *ctx)
107{
108    struct video_data *s = ctx->priv_data;
109    struct v4l2_capability cap;
110    int fd;
111    int ret;
112    int flags = O_RDWR;
113
114#define SET_WRAPPERS(prefix) do {       \
115    s->open_f   = prefix ## open;       \
116    s->close_f  = prefix ## close;      \
117    s->dup_f    = prefix ## dup;        \
118    s->ioctl_f  = prefix ## ioctl;      \
119    s->read_f   = prefix ## read;       \
120    s->mmap_f   = prefix ## mmap;       \
121    s->munmap_f = prefix ## munmap;     \
122} while (0)
123
124    if (s->use_libv4l2) {
125#if CONFIG_LIBV4L2
126        SET_WRAPPERS(v4l2_);
127#else
128        av_log(ctx, AV_LOG_ERROR, "libavdevice is not build with libv4l2 support.\n");
129        return AVERROR(EINVAL);
130#endif
131    } else {
132        SET_WRAPPERS();
133    }
134
135#define v4l2_open   s->open_f
136#define v4l2_close  s->close_f
137#define v4l2_dup    s->dup_f
138#define v4l2_ioctl  s->ioctl_f
139#define v4l2_read   s->read_f
140#define v4l2_mmap   s->mmap_f
141#define v4l2_munmap s->munmap_f
142
143    if (ctx->flags & AVFMT_FLAG_NONBLOCK) {
144        flags |= O_NONBLOCK;
145    }
146
147    fd = v4l2_open(ctx->filename, flags, 0);
148    if (fd < 0) {
149        ret = AVERROR(errno);
150        av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s: %s\n",
151               ctx->filename, av_err2str(ret));
152        return ret;
153    }
154
155    if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
156        ret = AVERROR(errno);
157        av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
158               av_err2str(ret));
159        goto fail;
160    }
161
162    av_log(ctx, AV_LOG_VERBOSE, "fd:%d capabilities:%x\n",
163           fd, cap.capabilities);
164
165    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
166        av_log(ctx, AV_LOG_ERROR, "Not a video capture device.\n");
167        ret = AVERROR(ENODEV);
168        goto fail;
169    }
170
171    if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
172        av_log(ctx, AV_LOG_ERROR,
173               "The device does not support the streaming I/O method.\n");
174        ret = AVERROR(ENOSYS);
175        goto fail;
176    }
177
178    return fd;
179
180fail:
181    v4l2_close(fd);
182    return ret;
183}
184
185static int device_init(AVFormatContext *ctx, int *width, int *height,
186                       uint32_t pix_fmt)
187{
188    struct video_data *s = ctx->priv_data;
189    int fd = s->fd;
190    struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
191    struct v4l2_pix_format *pix = &fmt.fmt.pix;
192
193    int res = 0;
194
195    pix->width = *width;
196    pix->height = *height;
197    pix->pixelformat = pix_fmt;
198    pix->field = V4L2_FIELD_ANY;
199
200    if (v4l2_ioctl(fd, VIDIOC_S_FMT, &fmt) < 0)
201        res = AVERROR(errno);
202
203    if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {
204        av_log(ctx, AV_LOG_INFO,
205               "The V4L2 driver changed the video from %dx%d to %dx%d\n",
206               *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);
207        *width = fmt.fmt.pix.width;
208        *height = fmt.fmt.pix.height;
209    }
210
211    if (pix_fmt != fmt.fmt.pix.pixelformat) {
212        av_log(ctx, AV_LOG_DEBUG,
213               "The V4L2 driver changed the pixel format "
214               "from 0x%08X to 0x%08X\n",
215               pix_fmt, fmt.fmt.pix.pixelformat);
216        res = AVERROR(EINVAL);
217    }
218
219    if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED) {
220        av_log(ctx, AV_LOG_DEBUG,
221               "The V4L2 driver is using the interlaced mode\n");
222        s->interlaced = 1;
223    }
224
225    return res;
226}
227
228static int first_field(const struct video_data *s, int fd)
229{
230    int res;
231    v4l2_std_id std;
232
233    res = v4l2_ioctl(fd, VIDIOC_G_STD, &std);
234    if (res < 0) {
235        return 0;
236    }
237    if (std & V4L2_STD_NTSC) {
238        return 0;
239    }
240
241    return 1;
242}
243
244#if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
245static void list_framesizes(AVFormatContext *ctx, int fd, uint32_t pixelformat)
246{
247    const struct video_data *s = ctx->priv_data;
248    struct v4l2_frmsizeenum vfse = { .pixel_format = pixelformat };
249
250    while(!v4l2_ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) {
251        switch (vfse.type) {
252        case V4L2_FRMSIZE_TYPE_DISCRETE:
253            av_log(ctx, AV_LOG_INFO, " %ux%u",
254                   vfse.discrete.width, vfse.discrete.height);
255        break;
256        case V4L2_FRMSIZE_TYPE_CONTINUOUS:
257        case V4L2_FRMSIZE_TYPE_STEPWISE:
258            av_log(ctx, AV_LOG_INFO, " {%u-%u, %u}x{%u-%u, %u}",
259                   vfse.stepwise.min_width,
260                   vfse.stepwise.max_width,
261                   vfse.stepwise.step_width,
262                   vfse.stepwise.min_height,
263                   vfse.stepwise.max_height,
264                   vfse.stepwise.step_height);
265        }
266        vfse.index++;
267    }
268}
269#endif
270
271static void list_formats(AVFormatContext *ctx, int fd, int type)
272{
273    const struct video_data *s = ctx->priv_data;
274    struct v4l2_fmtdesc vfd = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
275
276    while(!v4l2_ioctl(fd, VIDIOC_ENUM_FMT, &vfd)) {
277        enum AVCodecID codec_id = avpriv_fmt_v4l2codec(vfd.pixelformat);
278        enum AVPixelFormat pix_fmt = avpriv_fmt_v4l2ff(vfd.pixelformat, codec_id);
279
280        vfd.index++;
281
282        if (!(vfd.flags & V4L2_FMT_FLAG_COMPRESSED) &&
283            type & V4L_RAWFORMATS) {
284            const char *fmt_name = av_get_pix_fmt_name(pix_fmt);
285            av_log(ctx, AV_LOG_INFO, "Raw       : %9s : %20s :",
286                   fmt_name ? fmt_name : "Unsupported",
287                   vfd.description);
288        } else if (vfd.flags & V4L2_FMT_FLAG_COMPRESSED &&
289                   type & V4L_COMPFORMATS) {
290            AVCodec *codec = avcodec_find_decoder(codec_id);
291            av_log(ctx, AV_LOG_INFO, "Compressed: %9s : %20s :",
292                   codec ? codec->name : "Unsupported",
293                   vfd.description);
294        } else {
295            continue;
296        }
297
298#ifdef V4L2_FMT_FLAG_EMULATED
299        if (vfd.flags & V4L2_FMT_FLAG_EMULATED)
300            av_log(ctx, AV_LOG_INFO, " Emulated :");
301#endif
302#if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
303        list_framesizes(ctx, fd, vfd.pixelformat);
304#endif
305        av_log(ctx, AV_LOG_INFO, "\n");
306    }
307}
308
309static void list_standards(AVFormatContext *ctx)
310{
311    int ret;
312    struct video_data *s = ctx->priv_data;
313    struct v4l2_standard standard;
314
315    if (s->std_id == 0)
316        return;
317
318    for (standard.index = 0; ; standard.index++) {
319        if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
320            ret = AVERROR(errno);
321            if (ret == AVERROR(EINVAL)) {
322                break;
323            } else {
324                av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
325                return;
326            }
327        }
328        av_log(ctx, AV_LOG_INFO, "%2d, %16"PRIx64", %s\n",
329               standard.index, (uint64_t)standard.id, standard.name);
330    }
331}
332
333static int mmap_init(AVFormatContext *ctx)
334{
335    int i, res;
336    struct video_data *s = ctx->priv_data;
337    struct v4l2_requestbuffers req = {
338        .type   = V4L2_BUF_TYPE_VIDEO_CAPTURE,
339        .count  = desired_video_buffers,
340        .memory = V4L2_MEMORY_MMAP
341    };
342
343    if (v4l2_ioctl(s->fd, VIDIOC_REQBUFS, &req) < 0) {
344        res = AVERROR(errno);
345        av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS): %s\n", av_err2str(res));
346        return res;
347    }
348
349    if (req.count < 2) {
350        av_log(ctx, AV_LOG_ERROR, "Insufficient buffer memory\n");
351        return AVERROR(ENOMEM);
352    }
353    s->buffers = req.count;
354    s->buf_start = av_malloc_array(s->buffers, sizeof(void *));
355    if (s->buf_start == NULL) {
356        av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
357        return AVERROR(ENOMEM);
358    }
359    s->buf_len = av_malloc_array(s->buffers, sizeof(unsigned int));
360    if (s->buf_len == NULL) {
361        av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
362        av_free(s->buf_start);
363        return AVERROR(ENOMEM);
364    }
365
366    for (i = 0; i < req.count; i++) {
367        struct v4l2_buffer buf = {
368            .type   = V4L2_BUF_TYPE_VIDEO_CAPTURE,
369            .index  = i,
370            .memory = V4L2_MEMORY_MMAP
371        };
372        if (v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf) < 0) {
373            res = AVERROR(errno);
374            av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF): %s\n", av_err2str(res));
375            return res;
376        }
377
378        s->buf_len[i] = buf.length;
379        if (s->frame_size > 0 && s->buf_len[i] < s->frame_size) {
380            av_log(ctx, AV_LOG_ERROR,
381                   "buf_len[%d] = %d < expected frame size %d\n",
382                   i, s->buf_len[i], s->frame_size);
383            return AVERROR(ENOMEM);
384        }
385        s->buf_start[i] = v4l2_mmap(NULL, buf.length,
386                               PROT_READ | PROT_WRITE, MAP_SHARED,
387                               s->fd, buf.m.offset);
388
389        if (s->buf_start[i] == MAP_FAILED) {
390            res = AVERROR(errno);
391            av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", av_err2str(res));
392            return res;
393        }
394    }
395
396    return 0;
397}
398
399#if FF_API_DESTRUCT_PACKET
400static void dummy_release_buffer(AVPacket *pkt)
401{
402    av_assert0(0);
403}
404#endif
405
406static void mmap_release_buffer(void *opaque, uint8_t *data)
407{
408    struct v4l2_buffer buf = { 0 };
409    int res;
410    struct buff_data *buf_descriptor = opaque;
411    struct video_data *s = buf_descriptor->s;
412
413    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
414    buf.memory = V4L2_MEMORY_MMAP;
415    buf.index = buf_descriptor->index;
416    av_free(buf_descriptor);
417
418    if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
419        res = AVERROR(errno);
420        av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n",
421               av_err2str(res));
422    }
423
424    avpriv_atomic_int_add_and_fetch(&s->buffers_queued, 1);
425}
426
427#if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
428static int64_t av_gettime_monotonic(void)
429{
430    return av_gettime_relative();
431}
432#endif
433
434static int init_convert_timestamp(AVFormatContext *ctx, int64_t ts)
435{
436    struct video_data *s = ctx->priv_data;
437    int64_t now;
438
439    now = av_gettime();
440    if (s->ts_mode == V4L_TS_ABS &&
441        ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE) {
442        av_log(ctx, AV_LOG_INFO, "Detected absolute timestamps\n");
443        s->ts_mode = V4L_TS_CONVERT_READY;
444        return 0;
445    }
446#if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
447    if (ctx->streams[0]->avg_frame_rate.num) {
448        now = av_gettime_monotonic();
449        if (s->ts_mode == V4L_TS_MONO2ABS ||
450            (ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE)) {
451            AVRational tb = {AV_TIME_BASE, 1};
452            int64_t period = av_rescale_q(1, tb, ctx->streams[0]->avg_frame_rate);
453            av_log(ctx, AV_LOG_INFO, "Detected monotonic timestamps, converting\n");
454            /* microseconds instead of seconds, MHz instead of Hz */
455            s->timefilter = ff_timefilter_new(1, period, 1.0E-6);
456            if (!s->timefilter)
457                return AVERROR(ENOMEM);
458            s->ts_mode = V4L_TS_CONVERT_READY;
459            return 0;
460        }
461    }
462#endif
463    av_log(ctx, AV_LOG_ERROR, "Unknown timestamps\n");
464    return AVERROR(EIO);
465}
466
467static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
468{
469    struct video_data *s = ctx->priv_data;
470
471    if (s->ts_mode) {
472        int r = init_convert_timestamp(ctx, *ts);
473        if (r < 0)
474            return r;
475    }
476#if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
477    if (s->timefilter) {
478        int64_t nowa = av_gettime();
479        int64_t nowm = av_gettime_monotonic();
480        ff_timefilter_update(s->timefilter, nowa, nowm - s->last_time_m);
481        s->last_time_m = nowm;
482        *ts = ff_timefilter_eval(s->timefilter, *ts - nowm);
483    }
484#endif
485    return 0;
486}
487
488static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
489{
490    struct video_data *s = ctx->priv_data;
491    struct v4l2_buffer buf = {
492        .type   = V4L2_BUF_TYPE_VIDEO_CAPTURE,
493        .memory = V4L2_MEMORY_MMAP
494    };
495    int res;
496
497    /* FIXME: Some special treatment might be needed in case of loss of signal... */
498    while ((res = v4l2_ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
499    if (res < 0) {
500        if (errno == EAGAIN) {
501            pkt->size = 0;
502            return AVERROR(EAGAIN);
503        }
504        res = AVERROR(errno);
505        av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n", av_err2str(res));
506        return res;
507    }
508
509    if (buf.index >= s->buffers) {
510        av_log(ctx, AV_LOG_ERROR, "Invalid buffer index received.\n");
511        return AVERROR(EINVAL);
512    }
513    avpriv_atomic_int_add_and_fetch(&s->buffers_queued, -1);
514    // always keep at least one buffer queued
515    av_assert0(avpriv_atomic_int_get(&s->buffers_queued) >= 1);
516
517    /* CPIA is a compressed format and we don't know the exact number of bytes
518     * used by a frame, so set it here as the driver announces it.
519     */
520    if (ctx->video_codec_id == AV_CODEC_ID_CPIA)
521        s->frame_size = buf.bytesused;
522
523    if (s->frame_size > 0 && buf.bytesused != s->frame_size) {
524        av_log(ctx, AV_LOG_ERROR,
525               "The v4l2 frame is %d bytes, but %d bytes are expected\n",
526               buf.bytesused, s->frame_size);
527        return AVERROR_INVALIDDATA;
528    }
529
530    /* Image is at s->buff_start[buf.index] */
531    if (avpriv_atomic_int_get(&s->buffers_queued) == FFMAX(s->buffers / 8, 1)) {
532        /* when we start getting low on queued buffers, fall back on copying data */
533        res = av_new_packet(pkt, buf.bytesused);
534        if (res < 0) {
535            av_log(ctx, AV_LOG_ERROR, "Error allocating a packet.\n");
536            if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) == 0)
537                avpriv_atomic_int_add_and_fetch(&s->buffers_queued, 1);
538            return res;
539        }
540        memcpy(pkt->data, s->buf_start[buf.index], buf.bytesused);
541
542        if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
543            res = AVERROR(errno);
544            av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res));
545            av_free_packet(pkt);
546            return res;
547        }
548        avpriv_atomic_int_add_and_fetch(&s->buffers_queued, 1);
549    } else {
550        struct buff_data *buf_descriptor;
551
552        pkt->data     = s->buf_start[buf.index];
553        pkt->size     = buf.bytesused;
554#if FF_API_DESTRUCT_PACKET
555FF_DISABLE_DEPRECATION_WARNINGS
556        pkt->destruct = dummy_release_buffer;
557FF_ENABLE_DEPRECATION_WARNINGS
558#endif
559
560        buf_descriptor = av_malloc(sizeof(struct buff_data));
561        if (buf_descriptor == NULL) {
562            /* Something went wrong... Since av_malloc() failed, we cannot even
563             * allocate a buffer for memcpying into it
564             */
565            av_log(ctx, AV_LOG_ERROR, "Failed to allocate a buffer descriptor\n");
566            if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) == 0)
567                avpriv_atomic_int_add_and_fetch(&s->buffers_queued, 1);
568
569            return AVERROR(ENOMEM);
570        }
571        buf_descriptor->index = buf.index;
572        buf_descriptor->s     = s;
573
574        pkt->buf = av_buffer_create(pkt->data, pkt->size, mmap_release_buffer,
575                                    buf_descriptor, 0);
576        if (!pkt->buf) {
577            av_log(ctx, AV_LOG_ERROR, "Failed to create a buffer\n");
578            if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) == 0)
579                avpriv_atomic_int_add_and_fetch(&s->buffers_queued, 1);
580            av_freep(&buf_descriptor);
581            return AVERROR(ENOMEM);
582        }
583    }
584    pkt->pts = buf.timestamp.tv_sec * INT64_C(1000000) + buf.timestamp.tv_usec;
585    convert_timestamp(ctx, &pkt->pts);
586
587    return s->buf_len[buf.index];
588}
589
590static int mmap_start(AVFormatContext *ctx)
591{
592    struct video_data *s = ctx->priv_data;
593    enum v4l2_buf_type type;
594    int i, res;
595
596    for (i = 0; i < s->buffers; i++) {
597        struct v4l2_buffer buf = {
598            .type   = V4L2_BUF_TYPE_VIDEO_CAPTURE,
599            .index  = i,
600            .memory = V4L2_MEMORY_MMAP
601        };
602
603        if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
604            res = AVERROR(errno);
605            av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res));
606            return res;
607        }
608    }
609    s->buffers_queued = s->buffers;
610
611    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
612    if (v4l2_ioctl(s->fd, VIDIOC_STREAMON, &type) < 0) {
613        res = AVERROR(errno);
614        av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n", av_err2str(res));
615        return res;
616    }
617
618    return 0;
619}
620
621static void mmap_close(struct video_data *s)
622{
623    enum v4l2_buf_type type;
624    int i;
625
626    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
627    /* We do not check for the result, because we could
628     * not do anything about it anyway...
629     */
630    v4l2_ioctl(s->fd, VIDIOC_STREAMOFF, &type);
631    for (i = 0; i < s->buffers; i++) {
632        v4l2_munmap(s->buf_start[i], s->buf_len[i]);
633    }
634    av_free(s->buf_start);
635    av_free(s->buf_len);
636}
637
638static int v4l2_set_parameters(AVFormatContext *s1)
639{
640    struct video_data *s = s1->priv_data;
641    struct v4l2_standard standard = { 0 };
642    struct v4l2_streamparm streamparm = { 0 };
643    struct v4l2_fract *tpf;
644    AVRational framerate_q = { 0 };
645    int i, ret;
646
647    if (s->framerate &&
648        (ret = av_parse_video_rate(&framerate_q, s->framerate)) < 0) {
649        av_log(s1, AV_LOG_ERROR, "Could not parse framerate '%s'.\n",
650               s->framerate);
651        return ret;
652    }
653
654    if (s->standard) {
655        if (s->std_id) {
656            ret = 0;
657            av_log(s1, AV_LOG_DEBUG, "Setting standard: %s\n", s->standard);
658            /* set tv standard */
659            for (i = 0; ; i++) {
660                standard.index = i;
661                if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
662                    ret = AVERROR(errno);
663                    break;
664                }
665                if (!av_strcasecmp(standard.name, s->standard))
666                    break;
667            }
668            if (ret < 0) {
669                av_log(s1, AV_LOG_ERROR, "Unknown or unsupported standard '%s'\n", s->standard);
670                return ret;
671            }
672
673            if (v4l2_ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) {
674                ret = AVERROR(errno);
675                av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_S_STD): %s\n", av_err2str(ret));
676                return ret;
677            }
678        } else {
679            av_log(s1, AV_LOG_WARNING,
680                   "This device does not support any standard\n");
681        }
682    }
683
684    /* get standard */
685    if (v4l2_ioctl(s->fd, VIDIOC_G_STD, &s->std_id) == 0) {
686        tpf = &standard.frameperiod;
687        for (i = 0; ; i++) {
688            standard.index = i;
689            if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
690                ret = AVERROR(errno);
691                if (ret == AVERROR(EINVAL)) {
692                    tpf = &streamparm.parm.capture.timeperframe;
693                    break;
694                }
695                av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
696                return ret;
697            }
698            if (standard.id == s->std_id) {
699                av_log(s1, AV_LOG_DEBUG,
700                       "Current standard: %s, id: %"PRIx64", frameperiod: %d/%d\n",
701                       standard.name, (uint64_t)standard.id, tpf->numerator, tpf->denominator);
702                break;
703            }
704        }
705    } else {
706        tpf = &streamparm.parm.capture.timeperframe;
707    }
708
709    streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
710    if (v4l2_ioctl(s->fd, VIDIOC_G_PARM, &streamparm) < 0) {
711        ret = AVERROR(errno);
712        av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_PARM): %s\n", av_err2str(ret));
713        return ret;
714    }
715
716    if (framerate_q.num && framerate_q.den) {
717        if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
718            tpf = &streamparm.parm.capture.timeperframe;
719
720            av_log(s1, AV_LOG_DEBUG, "Setting time per frame to %d/%d\n",
721                   framerate_q.den, framerate_q.num);
722            tpf->numerator   = framerate_q.den;
723            tpf->denominator = framerate_q.num;
724
725            if (v4l2_ioctl(s->fd, VIDIOC_S_PARM, &streamparm) < 0) {
726                ret = AVERROR(errno);
727                av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_S_PARM): %s\n", av_err2str(ret));
728                return ret;
729            }
730
731            if (framerate_q.num != tpf->denominator ||
732                framerate_q.den != tpf->numerator) {
733                av_log(s1, AV_LOG_INFO,
734                       "The driver changed the time per frame from "
735                       "%d/%d to %d/%d\n",
736                       framerate_q.den, framerate_q.num,
737                       tpf->numerator, tpf->denominator);
738            }
739        } else {
740            av_log(s1, AV_LOG_WARNING,
741                   "The driver does not allow to change time per frame\n");
742        }
743    }
744    if (tpf->denominator > 0 && tpf->numerator > 0) {
745        s1->streams[0]->avg_frame_rate.num = tpf->denominator;
746        s1->streams[0]->avg_frame_rate.den = tpf->numerator;
747        s1->streams[0]->r_frame_rate = s1->streams[0]->avg_frame_rate;
748    } else
749        av_log(s1, AV_LOG_WARNING, "Time per frame unknown\n");
750
751    return 0;
752}
753
754static int device_try_init(AVFormatContext *s1,
755                           enum AVPixelFormat pix_fmt,
756                           int *width,
757                           int *height,
758                           uint32_t *desired_format,
759                           enum AVCodecID *codec_id)
760{
761    int ret, i;
762
763    *desired_format = avpriv_fmt_ff2v4l(pix_fmt, s1->video_codec_id);
764
765    if (*desired_format) {
766        ret = device_init(s1, width, height, *desired_format);
767        if (ret < 0) {
768            *desired_format = 0;
769            if (ret != AVERROR(EINVAL))
770                return ret;
771        }
772    }
773
774    if (!*desired_format) {
775        for (i = 0; avpriv_fmt_conversion_table[i].codec_id != AV_CODEC_ID_NONE; i++) {
776            if (s1->video_codec_id == AV_CODEC_ID_NONE ||
777                avpriv_fmt_conversion_table[i].codec_id == s1->video_codec_id) {
778                av_log(s1, AV_LOG_DEBUG, "Trying to set codec:%s pix_fmt:%s\n",
779                       avcodec_get_name(avpriv_fmt_conversion_table[i].codec_id),
780                       (char *)av_x_if_null(av_get_pix_fmt_name(avpriv_fmt_conversion_table[i].ff_fmt), "none"));
781
782                *desired_format = avpriv_fmt_conversion_table[i].v4l2_fmt;
783                ret = device_init(s1, width, height, *desired_format);
784                if (ret >= 0)
785                    break;
786                else if (ret != AVERROR(EINVAL))
787                    return ret;
788                *desired_format = 0;
789            }
790        }
791
792        if (*desired_format == 0) {
793            av_log(s1, AV_LOG_ERROR, "Cannot find a proper format for "
794                   "codec '%s' (id %d), pixel format '%s' (id %d)\n",
795                   avcodec_get_name(s1->video_codec_id), s1->video_codec_id,
796                   (char *)av_x_if_null(av_get_pix_fmt_name(pix_fmt), "none"), pix_fmt);
797            ret = AVERROR(EINVAL);
798        }
799    }
800
801    *codec_id = avpriv_fmt_v4l2codec(*desired_format);
802    av_assert0(*codec_id != AV_CODEC_ID_NONE);
803    return ret;
804}
805
806static int v4l2_read_header(AVFormatContext *s1)
807{
808    struct video_data *s = s1->priv_data;
809    AVStream *st;
810    int res = 0;
811    uint32_t desired_format;
812    enum AVCodecID codec_id = AV_CODEC_ID_NONE;
813    enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE;
814    struct v4l2_input input = { 0 };
815
816    st = avformat_new_stream(s1, NULL);
817    if (!st)
818        return AVERROR(ENOMEM);
819
820#if CONFIG_LIBV4L2
821    /* silence libv4l2 logging. if fopen() fails v4l2_log_file will be NULL
822       and errors will get sent to stderr */
823    if (s->use_libv4l2)
824        v4l2_log_file = fopen("/dev/null", "w");
825#endif
826
827    s->fd = device_open(s1);
828    if (s->fd < 0)
829        return s->fd;
830
831    if (s->channel != -1) {
832        /* set video input */
833        av_log(s1, AV_LOG_DEBUG, "Selecting input_channel: %d\n", s->channel);
834        if (v4l2_ioctl(s->fd, VIDIOC_S_INPUT, &s->channel) < 0) {
835            res = AVERROR(errno);
836            av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_S_INPUT): %s\n", av_err2str(res));
837            return res;
838        }
839    } else {
840        /* get current video input */
841        if (v4l2_ioctl(s->fd, VIDIOC_G_INPUT, &s->channel) < 0) {
842            res = AVERROR(errno);
843            av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_INPUT): %s\n", av_err2str(res));
844            return res;
845        }
846    }
847
848    /* enum input */
849    input.index = s->channel;
850    if (v4l2_ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) {
851        res = AVERROR(errno);
852        av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMINPUT): %s\n", av_err2str(res));
853        return res;
854    }
855    s->std_id = input.std;
856    av_log(s1, AV_LOG_DEBUG, "Current input_channel: %d, input_name: %s, input_std: %"PRIx64"\n",
857           s->channel, input.name, (uint64_t)input.std);
858
859    if (s->list_format) {
860        list_formats(s1, s->fd, s->list_format);
861        return AVERROR_EXIT;
862    }
863
864    if (s->list_standard) {
865        list_standards(s1);
866        return AVERROR_EXIT;
867    }
868
869    avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
870
871    if ((res = v4l2_set_parameters(s1)) < 0)
872        return res;
873
874    if (s->pixel_format) {
875        AVCodec *codec = avcodec_find_decoder_by_name(s->pixel_format);
876
877        if (codec)
878            s1->video_codec_id = codec->id;
879
880        pix_fmt = av_get_pix_fmt(s->pixel_format);
881
882        if (pix_fmt == AV_PIX_FMT_NONE && !codec) {
883            av_log(s1, AV_LOG_ERROR, "No such input format: %s.\n",
884                   s->pixel_format);
885
886            return AVERROR(EINVAL);
887        }
888    }
889
890    if (!s->width && !s->height) {
891        struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
892
893        av_log(s1, AV_LOG_VERBOSE,
894               "Querying the device for the current frame size\n");
895        if (v4l2_ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) {
896            res = AVERROR(errno);
897            av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n", av_err2str(res));
898            return res;
899        }
900
901        s->width  = fmt.fmt.pix.width;
902        s->height = fmt.fmt.pix.height;
903        av_log(s1, AV_LOG_VERBOSE,
904               "Setting frame size to %dx%d\n", s->width, s->height);
905    }
906
907    res = device_try_init(s1, pix_fmt, &s->width, &s->height, &desired_format, &codec_id);
908    if (res < 0) {
909        v4l2_close(s->fd);
910        return res;
911    }
912
913    /* If no pixel_format was specified, the codec_id was not known up
914     * until now. Set video_codec_id in the context, as codec_id will
915     * not be available outside this function
916     */
917    if (codec_id != AV_CODEC_ID_NONE && s1->video_codec_id == AV_CODEC_ID_NONE)
918        s1->video_codec_id = codec_id;
919
920    if ((res = av_image_check_size(s->width, s->height, 0, s1)) < 0)
921        return res;
922
923    s->frame_format = desired_format;
924
925    st->codec->pix_fmt = avpriv_fmt_v4l2ff(desired_format, codec_id);
926    s->frame_size =
927        avpicture_get_size(st->codec->pix_fmt, s->width, s->height);
928
929    if ((res = mmap_init(s1)) ||
930        (res = mmap_start(s1)) < 0) {
931        v4l2_close(s->fd);
932        return res;
933    }
934
935    s->top_field_first = first_field(s, s->fd);
936
937    st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
938    st->codec->codec_id = codec_id;
939    if (codec_id == AV_CODEC_ID_RAWVIDEO)
940        st->codec->codec_tag =
941            avcodec_pix_fmt_to_codec_tag(st->codec->pix_fmt);
942    else if (codec_id == AV_CODEC_ID_H264) {
943        st->need_parsing = AVSTREAM_PARSE_HEADERS;
944    }
945    if (desired_format == V4L2_PIX_FMT_YVU420)
946        st->codec->codec_tag = MKTAG('Y', 'V', '1', '2');
947    else if (desired_format == V4L2_PIX_FMT_YVU410)
948        st->codec->codec_tag = MKTAG('Y', 'V', 'U', '9');
949    st->codec->width = s->width;
950    st->codec->height = s->height;
951    if (st->avg_frame_rate.den)
952        st->codec->bit_rate = s->frame_size * av_q2d(st->avg_frame_rate) * 8;
953
954    return 0;
955}
956
957static int v4l2_read_packet(AVFormatContext *s1, AVPacket *pkt)
958{
959    struct video_data *s = s1->priv_data;
960    AVFrame *frame = s1->streams[0]->codec->coded_frame;
961    int res;
962
963    av_init_packet(pkt);
964    if ((res = mmap_read_frame(s1, pkt)) < 0) {
965        return res;
966    }
967
968    if (frame && s->interlaced) {
969        frame->interlaced_frame = 1;
970        frame->top_field_first = s->top_field_first;
971    }
972
973    return pkt->size;
974}
975
976static int v4l2_read_close(AVFormatContext *s1)
977{
978    struct video_data *s = s1->priv_data;
979
980    if (avpriv_atomic_int_get(&s->buffers_queued) != s->buffers)
981        av_log(s1, AV_LOG_WARNING, "Some buffers are still owned by the caller on "
982               "close.\n");
983
984    mmap_close(s);
985
986    v4l2_close(s->fd);
987    return 0;
988}
989
990#define OFFSET(x) offsetof(struct video_data, x)
991#define DEC AV_OPT_FLAG_DECODING_PARAM
992
993static const AVOption options[] = {
994    { "standard",     "set TV standard, used only by analog frame grabber",       OFFSET(standard),     AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0,       DEC },
995    { "channel",      "set TV channel, used only by frame grabber",               OFFSET(channel),      AV_OPT_TYPE_INT,    {.i64 = -1 },  -1, INT_MAX, DEC },
996    { "video_size",   "set frame size",                                           OFFSET(width),        AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL},  0, 0,   DEC },
997    { "pixel_format", "set preferred pixel format",                               OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL},  0, 0,       DEC },
998    { "input_format", "set preferred pixel format (for raw video) or codec name", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL},  0, 0,       DEC },
999    { "framerate",    "set frame rate",                                           OFFSET(framerate),    AV_OPT_TYPE_STRING, {.str = NULL},  0, 0,       DEC },
1000
1001    { "list_formats", "list available formats and exit",                          OFFSET(list_format),  AV_OPT_TYPE_INT,    {.i64 = 0 },  0, INT_MAX, DEC, "list_formats" },
1002    { "all",          "show all available formats",                               OFFSET(list_format),  AV_OPT_TYPE_CONST,  {.i64 = V4L_ALLFORMATS  },    0, INT_MAX, DEC, "list_formats" },
1003    { "raw",          "show only non-compressed formats",                         OFFSET(list_format),  AV_OPT_TYPE_CONST,  {.i64 = V4L_RAWFORMATS  },    0, INT_MAX, DEC, "list_formats" },
1004    { "compressed",   "show only compressed formats",                             OFFSET(list_format),  AV_OPT_TYPE_CONST,  {.i64 = V4L_COMPFORMATS },    0, INT_MAX, DEC, "list_formats" },
1005
1006    { "list_standards", "list supported standards and exit",                      OFFSET(list_standard), AV_OPT_TYPE_INT,   {.i64 = 0 },  0, 1, DEC, "list_standards" },
1007    { "all",            "show all supported standards",                           OFFSET(list_standard), AV_OPT_TYPE_CONST, {.i64 = 1 },  0, 0, DEC, "list_standards" },
1008
1009    { "timestamps",   "set type of timestamps for grabbed frames",                OFFSET(ts_mode),      AV_OPT_TYPE_INT,    {.i64 = 0 }, 0, 2, DEC, "timestamps" },
1010    { "ts",           "set type of timestamps for grabbed frames",                OFFSET(ts_mode),      AV_OPT_TYPE_INT,    {.i64 = 0 }, 0, 2, DEC, "timestamps" },
1011    { "default",      "use timestamps from the kernel",                           OFFSET(ts_mode),      AV_OPT_TYPE_CONST,  {.i64 = V4L_TS_DEFAULT  }, 0, 2, DEC, "timestamps" },
1012    { "abs",          "use absolute timestamps (wall clock)",                     OFFSET(ts_mode),      AV_OPT_TYPE_CONST,  {.i64 = V4L_TS_ABS      }, 0, 2, DEC, "timestamps" },
1013    { "mono2abs",     "force conversion from monotonic to absolute timestamps",   OFFSET(ts_mode),      AV_OPT_TYPE_CONST,  {.i64 = V4L_TS_MONO2ABS }, 0, 2, DEC, "timestamps" },
1014    { "use_libv4l2",  "use libv4l2 (v4l-utils) conversion functions",             OFFSET(use_libv4l2),  AV_OPT_TYPE_INT,    {.i64 = 0}, 0, 1, DEC },
1015    { NULL },
1016};
1017
1018static const AVClass v4l2_class = {
1019    .class_name = "V4L2 indev",
1020    .item_name  = av_default_item_name,
1021    .option     = options,
1022    .version    = LIBAVUTIL_VERSION_INT,
1023    .category   = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
1024};
1025
1026AVInputFormat ff_v4l2_demuxer = {
1027    .name           = "video4linux2,v4l2",
1028    .long_name      = NULL_IF_CONFIG_SMALL("Video4Linux2 device grab"),
1029    .priv_data_size = sizeof(struct video_data),
1030    .read_header    = v4l2_read_header,
1031    .read_packet    = v4l2_read_packet,
1032    .read_close     = v4l2_read_close,
1033    .flags          = AVFMT_NOFILE,
1034    .priv_class     = &v4l2_class,
1035};
1036