FFmpeg
rtpdec_jpeg.c
Go to the documentation of this file.
1 /*
2  * RTP JPEG-compressed Video Depacketizer, RFC 2435
3  * Copyright (c) 2012 Samuel Pitoiset
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include "avformat.h"
23 #include "avio_internal.h"
24 #include "rtpdec.h"
25 #include "rtpdec_formats.h"
26 #include "libavutil/intreadwrite.h"
27 #include "libavcodec/jpegtables.h"
28 #include "libavcodec/mjpeg.h"
29 #include "libavcodec/bytestream.h"
30 
31 /**
32  * RTP/JPEG specific private data.
33  */
34 struct PayloadContext {
35  AVIOContext *frame; ///< current frame buffer
36  uint32_t timestamp; ///< current frame timestamp
37  int hdr_size; ///< size of the current frame header
38  uint8_t qtables[128][128];
40 };
41 
42 static const uint8_t default_quantizers[128] = {
43  /* luma table */
44  16, 11, 12, 14, 12, 10, 16, 14,
45  13, 14, 18, 17, 16, 19, 24, 40,
46  26, 24, 22, 22, 24, 49, 35, 37,
47  29, 40, 58, 51, 61, 60, 57, 51,
48  56, 55, 64, 72, 92, 78, 64, 68,
49  87, 69, 55, 56, 80, 109, 81, 87,
50  95, 98, 103, 104, 103, 62, 77, 113,
51  121, 112, 100, 120, 92, 101, 103, 99,
52 
53  /* chroma table */
54  17, 18, 18, 24, 21, 24, 47, 26,
55  26, 47, 99, 66, 56, 66, 99, 99,
56  99, 99, 99, 99, 99, 99, 99, 99,
57  99, 99, 99, 99, 99, 99, 99, 99,
58  99, 99, 99, 99, 99, 99, 99, 99,
59  99, 99, 99, 99, 99, 99, 99, 99,
60  99, 99, 99, 99, 99, 99, 99, 99,
61  99, 99, 99, 99, 99, 99, 99, 99
62 };
63 
65 {
66  ffio_free_dyn_buf(&jpeg->frame);
67 }
68 
69 static int jpeg_create_huffman_table(PutByteContext *p, int table_class,
70  int table_id, const uint8_t *bits_table,
71  const uint8_t *value_table)
72 {
73  int i, n = 0;
74 
75  bytestream2_put_byte(p, table_class << 4 | table_id);
76 
77  for (i = 1; i <= 16; i++) {
78  n += bits_table[i];
79  bytestream2_put_byte(p, bits_table[i]);
80  }
81 
82  for (i = 0; i < n; i++) {
83  bytestream2_put_byte(p, value_table[i]);
84  }
85  return n + 17;
86 }
87 
88 static void jpeg_put_marker(PutByteContext *pbc, int code)
89 {
90  bytestream2_put_byte(pbc, 0xff);
91  bytestream2_put_byte(pbc, code);
92 }
93 
94 static int jpeg_create_header(uint8_t *buf, int size, uint32_t type, uint32_t w,
95  uint32_t h, const uint8_t *qtable, int nb_qtable,
96  int dri)
97 {
98  PutByteContext pbc;
99  uint8_t *dht_size_ptr;
100  int dht_size, i;
101 
102  bytestream2_init_writer(&pbc, buf, size);
103 
104  /* Convert from blocks to pixels. */
105  w <<= 3;
106  h <<= 3;
107 
108  /* SOI */
109  jpeg_put_marker(&pbc, SOI);
110 
111  /* JFIF header */
112  jpeg_put_marker(&pbc, APP0);
113  bytestream2_put_be16(&pbc, 16);
114  bytestream2_put_buffer(&pbc, "JFIF", 5);
115  bytestream2_put_be16(&pbc, 0x0201);
116  bytestream2_put_byte(&pbc, 0);
117  bytestream2_put_be16(&pbc, 1);
118  bytestream2_put_be16(&pbc, 1);
119  bytestream2_put_byte(&pbc, 0);
120  bytestream2_put_byte(&pbc, 0);
121 
122  if (dri) {
123  jpeg_put_marker(&pbc, DRI);
124  bytestream2_put_be16(&pbc, 4);
125  bytestream2_put_be16(&pbc, dri);
126  }
127 
128  /* DQT */
129  jpeg_put_marker(&pbc, DQT);
130  bytestream2_put_be16(&pbc, 2 + nb_qtable * (1 + 64));
131 
132  for (i = 0; i < nb_qtable; i++) {
133  bytestream2_put_byte(&pbc, i);
134 
135  /* Each table is an array of 64 values given in zig-zag
136  * order, identical to the format used in a JFIF DQT
137  * marker segment. */
138  bytestream2_put_buffer(&pbc, qtable + 64 * i, 64);
139  }
140 
141  /* DHT */
142  jpeg_put_marker(&pbc, DHT);
143  dht_size_ptr = pbc.buffer;
144  bytestream2_put_be16(&pbc, 0);
145 
146  dht_size = 2;
155  AV_WB16(dht_size_ptr, dht_size);
156 
157  /* SOF0 */
158  jpeg_put_marker(&pbc, SOF0);
159  bytestream2_put_be16(&pbc, 17); /* size */
160  bytestream2_put_byte(&pbc, 8); /* bits per component */
161  bytestream2_put_be16(&pbc, h);
162  bytestream2_put_be16(&pbc, w);
163  bytestream2_put_byte(&pbc, 3); /* number of components */
164  bytestream2_put_byte(&pbc, 1); /* component number */
165  bytestream2_put_byte(&pbc, (2 << 4) | (type ? 2 : 1)); /* hsample/vsample */
166  bytestream2_put_byte(&pbc, 0); /* matrix number */
167  bytestream2_put_byte(&pbc, 2); /* component number */
168  bytestream2_put_byte(&pbc, 1 << 4 | 1); /* hsample/vsample */
169  bytestream2_put_byte(&pbc, nb_qtable == 2 ? 1 : 0); /* matrix number */
170  bytestream2_put_byte(&pbc, 3); /* component number */
171  bytestream2_put_byte(&pbc, 1 << 4 | 1); /* hsample/vsample */
172  bytestream2_put_byte(&pbc, nb_qtable == 2 ? 1 : 0); /* matrix number */
173 
174  /* SOS */
175  jpeg_put_marker(&pbc, SOS);
176  bytestream2_put_be16(&pbc, 12);
177  bytestream2_put_byte(&pbc, 3);
178  bytestream2_put_byte(&pbc, 1);
179  bytestream2_put_byte(&pbc, 0);
180  bytestream2_put_byte(&pbc, 2);
181  bytestream2_put_byte(&pbc, 17);
182  bytestream2_put_byte(&pbc, 3);
183  bytestream2_put_byte(&pbc, 17);
184  bytestream2_put_byte(&pbc, 0);
185  bytestream2_put_byte(&pbc, 63);
186  bytestream2_put_byte(&pbc, 0);
187 
188  /* Return the length in bytes of the JPEG header. */
189  return bytestream2_tell_p(&pbc);
190 }
191 
193 {
194  int factor = q;
195  int i;
196  uint16_t S;
197 
198  factor = av_clip(q, 1, 99);
199 
200  if (q < 50)
201  S = 5000 / factor;
202  else
203  S = 200 - factor * 2;
204 
205  for (i = 0; i < 128; i++) {
206  int val = (default_quantizers[i] * S + 50) / 100;
207 
208  /* Limit the quantizers to 1 <= q <= 255. */
209  val = av_clip(val, 1, 255);
210  qtables[i] = val;
211  }
212 }
213 
215  AVStream *st, AVPacket *pkt, uint32_t *timestamp,
216  const uint8_t *buf, int len, uint16_t seq,
217  int flags)
218 {
219  uint8_t type, q, width, height;
220  const uint8_t *qtables = NULL;
221  uint16_t qtable_len;
222  uint32_t off;
223  int ret, dri = 0;
224 
225  if (len < 8) {
226  av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
227  return AVERROR_INVALIDDATA;
228  }
229 
230  /* Parse the main JPEG header. */
231  off = AV_RB24(buf + 1); /* fragment byte offset */
232  type = AV_RB8(buf + 4); /* id of jpeg decoder params */
233  q = AV_RB8(buf + 5); /* quantization factor (or table id) */
234  width = AV_RB8(buf + 6); /* frame width in 8 pixel blocks */
235  height = AV_RB8(buf + 7); /* frame height in 8 pixel blocks */
236  buf += 8;
237  len -= 8;
238 
239  if (type & 0x40) {
240  if (len < 4) {
241  av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
242  return AVERROR_INVALIDDATA;
243  }
244  dri = AV_RB16(buf);
245  buf += 4;
246  len -= 4;
247  type &= ~0x40;
248  }
249  if (type > 1) {
250  avpriv_report_missing_feature(ctx, "RTP/JPEG type %"PRIu8, type);
251  return AVERROR_PATCHWELCOME;
252  }
253 
254  /* Parse the quantization table header. */
255  if (off == 0) {
256  /* Start of JPEG data packet. */
257  uint8_t new_qtables[128];
258  uint8_t hdr[1024];
259 
260  if (q > 127) {
261  uint8_t precision;
262  if (len < 4) {
263  av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
264  return AVERROR_INVALIDDATA;
265  }
266 
267  /* The first byte is reserved for future use. */
268  precision = AV_RB8(buf + 1); /* size of coefficients */
269  qtable_len = AV_RB16(buf + 2); /* length in bytes */
270  buf += 4;
271  len -= 4;
272 
273  if (precision)
274  av_log(ctx, AV_LOG_WARNING, "Only 8-bit precision is supported.\n");
275 
276  if (qtable_len > 0) {
277  if (len < qtable_len) {
278  av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
279  return AVERROR_INVALIDDATA;
280  }
281  qtables = buf;
282  buf += qtable_len;
283  len -= qtable_len;
284  if (q < 255) {
285  if (jpeg->qtables_len[q - 128] &&
286  (jpeg->qtables_len[q - 128] != qtable_len ||
287  memcmp(qtables, &jpeg->qtables[q - 128][0], qtable_len))) {
288  av_log(ctx, AV_LOG_WARNING,
289  "Quantization tables for q=%d changed\n", q);
290  } else if (!jpeg->qtables_len[q - 128] && qtable_len <= 128) {
291  memcpy(&jpeg->qtables[q - 128][0], qtables,
292  qtable_len);
293  jpeg->qtables_len[q - 128] = qtable_len;
294  }
295  }
296  } else {
297  if (q == 255) {
298  av_log(ctx, AV_LOG_ERROR,
299  "Invalid RTP/JPEG packet. Quantization tables not found.\n");
300  return AVERROR_INVALIDDATA;
301  }
302  if (!jpeg->qtables_len[q - 128]) {
303  av_log(ctx, AV_LOG_ERROR,
304  "No quantization tables known for q=%d yet.\n", q);
305  return AVERROR_INVALIDDATA;
306  }
307  qtables = &jpeg->qtables[q - 128][0];
308  qtable_len = jpeg->qtables_len[q - 128];
309  }
310  } else { /* q <= 127 */
311  if (q == 0 || q > 99) {
312  av_log(ctx, AV_LOG_ERROR, "Reserved q value %d\n", q);
313  return AVERROR_INVALIDDATA;
314  }
315  create_default_qtables(new_qtables, q);
316  qtables = new_qtables;
317  qtable_len = sizeof(new_qtables);
318  }
319 
320  /* Skip the current frame in case of the end packet
321  * has been lost somewhere. */
322  ffio_free_dyn_buf(&jpeg->frame);
323 
324  if ((ret = avio_open_dyn_buf(&jpeg->frame)) < 0)
325  return ret;
326  jpeg->timestamp = *timestamp;
327 
328  /* Generate a frame and scan headers that can be prepended to the
329  * RTP/JPEG data payload to produce a JPEG compressed image in
330  * interchange format. */
331  jpeg->hdr_size = jpeg_create_header(hdr, sizeof(hdr), type, width,
332  height, qtables,
333  qtable_len / 64, dri);
334 
335  /* Copy JPEG header to frame buffer. */
336  avio_write(jpeg->frame, hdr, jpeg->hdr_size);
337  }
338 
339  if (!jpeg->frame) {
340  av_log(ctx, AV_LOG_ERROR,
341  "Received packet without a start chunk; dropping frame.\n");
342  return AVERROR(EAGAIN);
343  }
344 
345  if (jpeg->timestamp != *timestamp) {
346  /* Skip the current frame if timestamp is incorrect.
347  * A start packet has been lost somewhere. */
348  ffio_free_dyn_buf(&jpeg->frame);
349  av_log(ctx, AV_LOG_ERROR, "RTP timestamps don't match.\n");
350  return AVERROR_INVALIDDATA;
351  }
352 
353  if (off != avio_tell(jpeg->frame) - jpeg->hdr_size) {
354  av_log(ctx, AV_LOG_ERROR,
355  "Missing packets; dropping frame.\n");
356  return AVERROR(EAGAIN);
357  }
358 
359  /* Copy data to frame buffer. */
360  avio_write(jpeg->frame, buf, len);
361 
362  if (flags & RTP_FLAG_MARKER) {
363  /* End of JPEG data packet. */
364  uint8_t buf[2] = { 0xff, EOI };
365 
366  /* Put EOI marker. */
367  avio_write(jpeg->frame, buf, sizeof(buf));
368 
369  /* Prepare the JPEG packet. */
370  if ((ret = ff_rtp_finalize_packet(pkt, &jpeg->frame, st->index)) < 0) {
371  av_log(ctx, AV_LOG_ERROR,
372  "Error occurred when getting frame buffer.\n");
373  return ret;
374  }
375 
376  return 0;
377  }
378 
379  return AVERROR(EAGAIN);
380 }
381 
383  .enc_name = "JPEG",
384  .codec_type = AVMEDIA_TYPE_VIDEO,
385  .codec_id = AV_CODEC_ID_MJPEG,
386  .priv_data_size = sizeof(PayloadContext),
387  .close = jpeg_close_context,
389  .static_payload_id = 26,
390 };
AVPacket pkt
Definition: rtpdec_qt.c:37
#define NULL
Definition: coverity.c:32
const char const char void * val
Definition: avisynth_c.h:863
Bytestream IO Context.
Definition: avio.h:161
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
Definition: mjpeg.h:71
Definition: mjpeg.h:73
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
RTP/JPEG specific private data.
Definition: rdt.c:83
int index
stream index in AVFormatContext
Definition: avformat.h:882
static av_always_inline void bytestream2_init_writer(PutByteContext *p, uint8_t *buf, int buf_size)
Definition: bytestream.h:143
GLint GLenum type
Definition: opengl_enc.c:104
Definition: mjpeg.h:75
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_WB32 unsigned int_TMPL AV_WB24 unsigned int_TMPL AV_RB16
Definition: bytestream.h:87
MJPEG encoder and decoder.
int avio_open_dyn_buf(AVIOContext **s)
Open a write only memory stream.
Definition: aviobuf.c:1430
AVIOContext * frame
current frame buffer
Definition: rtpdec_jpeg.c:35
Format I/O context.
Definition: avformat.h:1358
Definition: mjpeg.h:72
uint8_t
static int jpeg_create_huffman_table(PutByteContext *p, int table_class, int table_id, const uint8_t *bits_table, const uint8_t *value_table)
Definition: rtpdec_jpeg.c:69
static void jpeg_put_marker(PutByteContext *pbc, int code)
Definition: rtpdec_jpeg.c:88
const uint8_t avpriv_mjpeg_bits_dc_luminance[17]
Definition: jpegtables.c:65
const RTPDynamicProtocolHandler ff_jpeg_dynamic_handler
Definition: rtpdec_jpeg.c:382
ptrdiff_t size
Definition: opengl_enc.c:100
static av_always_inline int64_t avio_tell(AVIOContext *s)
ftell() equivalent for AVIOContext.
Definition: avio.h:557
void avio_write(AVIOContext *s, const unsigned char *buf, int size)
Definition: aviobuf.c:218
#define AV_WB16(p, v)
Definition: intreadwrite.h:405
#define av_log(a,...)
uint32_t timestamp
current frame timestamp
Definition: rtpdec_ac3.c:31
static int jpeg_create_header(uint8_t *buf, int size, uint32_t type, uint32_t w, uint32_t h, const uint8_t *qtable, int nb_qtable, int dri)
Definition: rtpdec_jpeg.c:94
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
#define RTP_FLAG_MARKER
RTP marker bit was set for this packet.
Definition: rtpdec.h:93
#define S(s, c, i)
uint8_t qtables[128][128]
Definition: rtpdec_jpeg.c:38
const uint8_t avpriv_mjpeg_bits_dc_chrominance[17]
Definition: jpegtables.c:70
static void jpeg_close_context(PayloadContext *jpeg)
Definition: rtpdec_jpeg.c:64
static av_always_inline int bytestream2_tell_p(PutByteContext *p)
Definition: bytestream.h:193
Definition: mjpeg.h:39
Definition: mjpeg.h:70
Definition: mjpeg.h:79
Definition: mjpeg.h:56
uint8_t w
Definition: llviddspenc.c:38
AVFormatContext * ctx
Definition: movenc.c:48
int n
Definition: avisynth_c.h:760
static av_always_inline unsigned int bytestream2_put_buffer(PutByteContext *p, const uint8_t *src, unsigned int size)
Definition: bytestream.h:282
static void create_default_qtables(uint8_t *qtables, uint8_t q)
Definition: rtpdec_jpeg.c:192
void ffio_free_dyn_buf(AVIOContext **s)
Free a dynamic buffer.
Definition: aviobuf.c:1489
static int jpeg_parse_packet(AVFormatContext *ctx, PayloadContext *jpeg, AVStream *st, AVPacket *pkt, uint32_t *timestamp, const uint8_t *buf, int len, uint16_t seq, int flags)
Definition: rtpdec_jpeg.c:214
Stream structure.
Definition: avformat.h:881
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_WB32 unsigned int_TMPL AV_WB24 unsigned int_TMPL AV_WB16 unsigned int_TMPL AV_RB8
Definition: bytestream.h:87
uint8_t * buffer
Definition: bytestream.h:38
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_WB32 unsigned int_TMPL AV_RB24
Definition: bytestream.h:87
const uint8_t avpriv_mjpeg_val_dc[12]
Definition: jpegtables.c:67
uint8_t qtables_len[128]
Definition: rtpdec_jpeg.c:39
uint8_t * buf
the temporary storage buffer
Definition: rtpdec_asf.c:183
static const int factor[16]
Definition: vf_pp7.c:75
const uint8_t avpriv_mjpeg_bits_ac_chrominance[17]
Definition: jpegtables.c:99
const char * enc_name
Definition: rtpdec.h:116
const uint8_t avpriv_mjpeg_val_ac_chrominance[]
Definition: jpegtables.c:102
void avpriv_report_missing_feature(void *avc, const char *msg,...) av_printf_format(2
Log a generic warning message about a missing feature.
#define flags(name, subs,...)
Definition: cbs_av1.c:561
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Main libavformat public API header.
static const uint8_t default_quantizers[128]
Definition: rtpdec_jpeg.c:42
int ff_rtp_finalize_packet(AVPacket *pkt, AVIOContext **dyn_buf, int stream_idx)
Close the dynamic buffer and make a packet from it.
Definition: rtpdec.c:927
const uint8_t avpriv_mjpeg_bits_ac_luminance[17]
Definition: jpegtables.c:73
static int parse_packet(AVFormatContext *s, AVPacket *pkt, int stream_index)
Parse a packet, add all split parts to parse_queue.
Definition: utils.c:1437
const uint8_t avpriv_mjpeg_val_ac_luminance[]
Definition: jpegtables.c:75
int hdr_size
size of the current frame header
Definition: rtpdec_jpeg.c:37
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
This structure stores compressed data.
Definition: avcodec.h:1457