FFmpeg
uncoded_frame.c
Go to the documentation of this file.
1 #include <stdio.h>
2 #include <stdlib.h>
3 #include <string.h>
4 #include "libavutil/avassert.h"
5 #include "libavdevice/avdevice.h"
6 #include "libavfilter/avfilter.h"
8 #include "libavformat/avformat.h"
9 #include "libavcodec/codec_id.h"
10 
11 typedef struct {
15 } Stream;
16 
17 static int create_sink(Stream *st, AVFilterGraph *graph,
18  AVFilterContext *f, int idx)
19 {
20  enum AVMediaType type = avfilter_pad_get_type(f->output_pads, idx);
21  const char *sink_name;
22  int ret;
23 
24  switch (type) {
25  case AVMEDIA_TYPE_VIDEO: sink_name = "buffersink"; break;
26  case AVMEDIA_TYPE_AUDIO: sink_name = "abuffersink"; break;
27  default:
28  av_log(NULL, AV_LOG_ERROR, "Stream type not supported\n");
29  return AVERROR(EINVAL);
30  }
32  avfilter_get_by_name(sink_name),
33  NULL, NULL, NULL, graph);
34  if (ret < 0)
35  return ret;
36  ret = avfilter_link(f, idx, st->sink, 0);
37  if (ret < 0)
38  return ret;
39  return 0;
40 }
41 
42 int main(int argc, char **argv)
43 {
44  char *in_graph_desc, **out_dev_name;
45  int nb_out_dev = 0, nb_streams = 0;
46  AVFilterGraph *in_graph = NULL;
47  Stream *streams = NULL, *st;
48  AVFrame *frame = NULL;
49  int i, j, run = 1, ret;
50 
51  //av_log_set_level(AV_LOG_DEBUG);
52 
53  if (argc < 3) {
55  "Usage: %s filter_graph dev:out [dev2:out2...]\n\n"
56  "Examples:\n"
57  "%s movie=file.nut:s=v+a xv:- alsa:default\n"
58  "%s movie=file.nut:s=v+a uncodedframecrc:pipe:0\n",
59  argv[0], argv[0], argv[0]);
60  exit(1);
61  }
62  in_graph_desc = argv[1];
63  out_dev_name = argv + 2;
64  nb_out_dev = argc - 2;
65 
67 
68  /* Create input graph */
69  if (!(in_graph = avfilter_graph_alloc())) {
70  ret = AVERROR(ENOMEM);
71  av_log(NULL, AV_LOG_ERROR, "Unable to alloc graph graph: %s\n",
72  av_err2str(ret));
73  goto fail;
74  }
75  ret = avfilter_graph_parse_ptr(in_graph, in_graph_desc, NULL, NULL, NULL);
76  if (ret < 0) {
77  av_log(NULL, AV_LOG_ERROR, "Unable to parse graph: %s\n",
78  av_err2str(ret));
79  goto fail;
80  }
81  nb_streams = 0;
82  for (i = 0; i < in_graph->nb_filters; i++) {
83  AVFilterContext *f = in_graph->filters[i];
84  for (j = 0; j < f->nb_inputs; j++) {
85  if (!f->inputs[j]) {
86  av_log(NULL, AV_LOG_ERROR, "Graph has unconnected inputs\n");
87  ret = AVERROR(EINVAL);
88  goto fail;
89  }
90  }
91  for (j = 0; j < f->nb_outputs; j++)
92  if (!f->outputs[j])
93  nb_streams++;
94  }
95  if (!nb_streams) {
96  av_log(NULL, AV_LOG_ERROR, "Graph has no output stream\n");
97  ret = AVERROR(EINVAL);
98  goto fail;
99  }
100  if (nb_out_dev != 1 && nb_out_dev != nb_streams) {
102  "Graph has %d output streams, %d devices given\n",
103  nb_streams, nb_out_dev);
104  ret = AVERROR(EINVAL);
105  goto fail;
106  }
107 
108  if (!(streams = av_calloc(nb_streams, sizeof(*streams)))) {
109  ret = AVERROR(ENOMEM);
110  av_log(NULL, AV_LOG_ERROR, "Could not allocate streams\n");
111  }
112  st = streams;
113  for (i = 0; i < in_graph->nb_filters; i++) {
114  AVFilterContext *f = in_graph->filters[i];
115  for (j = 0; j < f->nb_outputs; j++) {
116  if (!f->outputs[j]) {
117  if ((ret = create_sink(st++, in_graph, f, j)) < 0)
118  goto fail;
119  }
120  }
121  }
122  av_assert0(st - streams == nb_streams);
123  if ((ret = avfilter_graph_config(in_graph, NULL)) < 0) {
124  av_log(NULL, AV_LOG_ERROR, "Failed to configure graph\n");
125  goto fail;
126  }
127 
128  /* Create output devices */
129  for (i = 0; i < nb_out_dev; i++) {
130  char *fmt = NULL, *dev = out_dev_name[i];
131  st = &streams[i];
132  if ((dev = strchr(dev, ':'))) {
133  *(dev++) = 0;
134  fmt = out_dev_name[i];
135  }
136  ret = avformat_alloc_output_context2(&st->mux, NULL, fmt, dev);
137  if (ret < 0) {
138  av_log(NULL, AV_LOG_ERROR, "Failed to allocate output: %s\n",
139  av_err2str(ret));
140  goto fail;
141  }
142  if (!(st->mux->oformat->flags & AVFMT_NOFILE)) {
143  ret = avio_open2(&st->mux->pb, st->mux->url, AVIO_FLAG_WRITE,
144  NULL, NULL);
145  if (ret < 0) {
146  av_log(st->mux, AV_LOG_ERROR, "Failed to init output: %s\n",
147  av_err2str(ret));
148  goto fail;
149  }
150  }
151  }
152  for (; i < nb_streams; i++)
153  streams[i].mux = streams[0].mux;
154 
155  /* Create output device streams */
156  for (i = 0; i < nb_streams; i++) {
157  st = &streams[i];
158  if (!(st->stream = avformat_new_stream(st->mux, NULL))) {
159  ret = AVERROR(ENOMEM);
160  av_log(NULL, AV_LOG_ERROR, "Failed to create output stream\n");
161  goto fail;
162  }
163  st->stream->codecpar->codec_type = av_buffersink_get_type(st->sink);
164  st->stream->time_base = av_buffersink_get_time_base(st->sink);
165  switch (av_buffersink_get_type(st->sink)) {
166  case AVMEDIA_TYPE_VIDEO:
167  st->stream->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
168  st->stream->avg_frame_rate =
169  st->stream-> r_frame_rate = av_buffersink_get_frame_rate(st->sink);
170  st->stream->codecpar->width = av_buffersink_get_w(st->sink);
171  st->stream->codecpar->height = av_buffersink_get_h(st->sink);
172  st->stream->codecpar->sample_aspect_ratio = av_buffersink_get_sample_aspect_ratio(st->sink);
173  st->stream->codecpar->format = av_buffersink_get_format(st->sink);
174  break;
175  case AVMEDIA_TYPE_AUDIO:
176  ret = av_buffersink_get_ch_layout(st->sink, &st->stream->codecpar->ch_layout);
177  if (ret < 0)
178  goto fail;
179  st->stream->codecpar->sample_rate = av_buffersink_get_sample_rate(st->sink);
180  st->stream->codecpar->format = av_buffersink_get_format(st->sink);
181  st->stream->codecpar->codec_id = av_get_pcm_codec(st->stream->codecpar->format, -1);
182  break;
183  default:
184  av_assert0(!"reached");
185  }
186  }
187 
188  /* Init output devices */
189  for (i = 0; i < nb_out_dev; i++) {
190  st = &streams[i];
191  if ((ret = avformat_write_header(st->mux, NULL)) < 0) {
192  av_log(st->mux, AV_LOG_ERROR, "Failed to init output: %s\n",
193  av_err2str(ret));
194  goto fail;
195  }
196  }
197 
198  /* Check output devices */
199  for (i = 0; i < nb_streams; i++) {
200  st = &streams[i];
201  ret = av_write_uncoded_frame_query(st->mux, st->stream->index);
202  if (ret < 0) {
203  av_log(st->mux, AV_LOG_ERROR,
204  "Uncoded frames not supported on stream #%d: %s\n",
205  i, av_err2str(ret));
206  goto fail;
207  }
208  }
209 
210  while (run) {
212  if (ret < 0) {
213  if (ret == AVERROR_EOF) {
214  run = 0;
215  } else {
216  av_log(NULL, AV_LOG_ERROR, "Error filtering: %s\n",
217  av_err2str(ret));
218  break;
219  }
220  }
221  for (i = 0; i < nb_streams; i++) {
222  st = &streams[i];
223  while (1) {
224  if (!frame && !(frame = av_frame_alloc())) {
225  ret = AVERROR(ENOMEM);
226  av_log(NULL, AV_LOG_ERROR, "Could not allocate frame\n");
227  goto fail;
228  }
231  if (ret < 0) {
232  if (ret != AVERROR(EAGAIN) && ret != AVERROR_EOF)
233  av_log(NULL, AV_LOG_WARNING, "Error in sink: %s\n",
234  av_err2str(ret));
235  break;
236  }
237  if (frame->pts != AV_NOPTS_VALUE)
238  frame->pts = av_rescale_q(frame->pts,
239  av_buffersink_get_time_base(st->sink),
240  st->stream->time_base);
242  st->stream->index,
243  frame);
244  frame = NULL;
245  if (ret < 0) {
246  av_log(st->mux, AV_LOG_ERROR,
247  "Error writing frame: %s\n", av_err2str(ret));
248  goto fail;
249  }
250  }
251  }
252  }
253  ret = 0;
254 
255  for (i = 0; i < nb_out_dev; i++) {
256  st = &streams[i];
257  av_write_trailer(st->mux);
258  }
259 
260 fail:
262  avfilter_graph_free(&in_graph);
263  if (streams) {
264  for (i = 0; i < nb_out_dev; i++) {
265  st = &streams[i];
266  if (st->mux) {
267  if (st->mux->pb)
268  avio_closep(&st->mux->pb);
269  avformat_free_context(st->mux);
270  }
271  }
272  }
273  av_freep(&streams);
274  return ret < 0;
275 }
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
av_buffersink_get_ch_layout
int av_buffersink_get_ch_layout(const AVFilterContext *ctx, AVChannelLayout *out)
Definition: buffersink.c:219
av_buffersink_get_sample_aspect_ratio
AVRational av_buffersink_get_sample_aspect_ratio(const AVFilterContext *ctx)
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
avformat_new_stream
AVStream * avformat_new_stream(AVFormatContext *s, const AVCodec *c)
Add a new stream to a media file.
Definition: options.c:243
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
av_buffersink_get_frame_flags
int attribute_align_arg av_buffersink_get_frame_flags(AVFilterContext *ctx, AVFrame *frame, int flags)
Get a frame with filtered data from sink and put it in frame.
Definition: buffersink.c:146
AV_CODEC_ID_RAWVIDEO
@ AV_CODEC_ID_RAWVIDEO
Definition: codec_id.h:65
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:99
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
av_write_uncoded_frame_query
int av_write_uncoded_frame_query(AVFormatContext *s, int stream_index)
Test whether a muxer supports uncoded frame.
Definition: mux.c:1453
avfilter_graph_free
void avfilter_graph_free(AVFilterGraph **graph)
Free a graph, destroy its links, and set *graph to NULL.
Definition: avfiltergraph.c:119
avfilter_graph_create_filter
int avfilter_graph_create_filter(AVFilterContext **filt_ctx, const AVFilter *filt, const char *name, const char *args, void *opaque, AVFilterGraph *graph_ctx)
Create and add a filter instance into an existing graph.
Definition: avfiltergraph.c:138
avio_open2
int avio_open2(AVIOContext **s, const char *url, int flags, const AVIOInterruptCB *int_cb, AVDictionary **options)
Create and initialize a AVIOContext for accessing the resource indicated by url.
Definition: aviobuf.c:1241
fail
#define fail()
Definition: checkasm.h:134
avfilter_graph_alloc
AVFilterGraph * avfilter_graph_alloc(void)
Allocate a filter graph.
Definition: avfiltergraph.c:82
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
Stream::stream
AVStream * stream
Definition: uncoded_frame.c:13
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:87
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
codec_id.h
av_buffersink_get_frame_rate
AVRational av_buffersink_get_frame_rate(const AVFilterContext *ctx)
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
av_buffersink_get_format
int av_buffersink_get_format(const AVFilterContext *ctx)
av_buffersink_get_time_base
AVRational av_buffersink_get_time_base(const AVFilterContext *ctx)
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
AVIO_FLAG_WRITE
#define AVIO_FLAG_WRITE
write-only
Definition: avio.h:624
nb_streams
static int nb_streams
Definition: ffprobe.c:309
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
av_interleaved_write_uncoded_frame
int av_interleaved_write_uncoded_frame(AVFormatContext *s, int stream_index, AVFrame *frame)
Write an uncoded frame to an output media file.
Definition: mux.c:1447
avformat_write_header
av_warn_unused_result int avformat_write_header(AVFormatContext *s, AVDictionary **options)
Allocate the stream private data and write the stream header to an output media file.
Definition: mux.c:451
AVFormatContext
Format I/O context.
Definition: avformat.h:1104
avfilter_get_by_name
const AVFilter * avfilter_get_by_name(const char *name)
Get a filter definition matching the given name.
Definition: allfilters.c:611
avfilter_graph_config
int avfilter_graph_config(AVFilterGraph *graphctx, void *log_ctx)
Check validity and configure all the links and formats in the graph.
Definition: avfiltergraph.c:1167
NULL
#define NULL
Definition: coverity.c:32
run
uint8_t run
Definition: svq3.c:203
AVFilterGraph::filters
AVFilterContext ** filters
Definition: avfilter.h:857
avfilter_graph_request_oldest
int avfilter_graph_request_oldest(AVFilterGraph *graph)
Request a frame on the oldest sink link.
Definition: avfiltergraph.c:1295
AVFilterGraph
Definition: avfilter.h:855
Stream::mux
AVFormatContext * mux
Definition: uncoded_frame.c:12
create_sink
static int create_sink(Stream *st, AVFilterGraph *graph, AVFilterContext *f, int idx)
Definition: uncoded_frame.c:17
f
f
Definition: af_crystalizer.c:122
AVMediaType
AVMediaType
Definition: avutil.h:199
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:121
avfilter_link
int avfilter_link(AVFilterContext *src, unsigned srcpad, AVFilterContext *dst, unsigned dstpad)
Link two filters together.
Definition: avfilter.c:148
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
AVFMT_NOFILE
#define AVFMT_NOFILE
Demuxer will use avio_open, no opened file should be provided by the caller.
Definition: avformat.h:468
avdevice.h
av_buffersink_get_type
enum AVMediaType av_buffersink_get_type(const AVFilterContext *ctx)
buffersink.h
av_buffersink_get_w
int av_buffersink_get_w(const AVFilterContext *ctx)
avio_closep
int avio_closep(AVIOContext **s)
Close the resource accessed by the AVIOContext *s, free it and set the pointer pointing to it to NULL...
Definition: aviobuf.c:1280
av_write_trailer
int av_write_trailer(AVFormatContext *s)
Write the stream trailer to an output media file and free the file private data.
Definition: mux.c:1256
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
avfilter_graph_parse_ptr
int avfilter_graph_parse_ptr(AVFilterGraph *graph, const char *filters, AVFilterInOut **inputs, AVFilterInOut **outputs, void *log_ctx)
Add a graph described by a string to a graph.
Definition: graphparser.c:889
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:262
av_buffersink_get_h
int av_buffersink_get_h(const AVFilterContext *ctx)
AV_BUFFERSINK_FLAG_NO_REQUEST
#define AV_BUFFERSINK_FLAG_NO_REQUEST
Tell av_buffersink_get_buffer_ref() not to request a frame from its input.
Definition: buffersink.h:95
ret
ret
Definition: filter_design.txt:187
AVStream
Stream structure.
Definition: avformat.h:838
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
avformat.h
avfilter_pad_get_type
enum AVMediaType avfilter_pad_get_type(const AVFilterPad *pads, int pad_idx)
Get the type of an AVFilterPad.
Definition: avfilter.c:928
Stream::sink
AVFilterContext * sink
Definition: uncoded_frame.c:14
av_buffersink_get_sample_rate
int av_buffersink_get_sample_rate(const AVFilterContext *ctx)
main
int main(int argc, char **argv)
Definition: uncoded_frame.c:42
avfilter.h
avformat_free_context
void avformat_free_context(AVFormatContext *s)
Free an AVFormatContext and all its streams.
Definition: avformat.c:96
AVFilterContext
An instance of a filter.
Definition: avfilter.h:392
av_get_pcm_codec
enum AVCodecID av_get_pcm_codec(enum AVSampleFormat fmt, int be)
Return the PCM codec associated with a sample format.
Definition: utils.c:561
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
Stream
Definition: mpegts.c:112
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVFilterGraph::nb_filters
unsigned nb_filters
Definition: avfilter.h:858
avformat_alloc_output_context2
int avformat_alloc_output_context2(AVFormatContext **ctx, const AVOutputFormat *oformat, const char *format_name, const char *filename)
Allocate an AVFormatContext for an output format.
Definition: mux.c:91
avdevice_register_all
void avdevice_register_all(void)
Initialize libavdevice and register all the input and output devices.
Definition: alldevices.c:65