Commit | Line | Data |
---|---|---|
2ba45a60 DM |
1 | /* |
2 | * GIF decoder | |
3 | * Copyright (c) 2003 Fabrice Bellard | |
4 | * Copyright (c) 2006 Baptiste Coudurier | |
5 | * Copyright (c) 2012 Vitaliy E Sugrobov | |
6 | * | |
7 | * This file is part of FFmpeg. | |
8 | * | |
9 | * FFmpeg is free software; you can redistribute it and/or | |
10 | * modify it under the terms of the GNU Lesser General Public | |
11 | * License as published by the Free Software Foundation; either | |
12 | * version 2.1 of the License, or (at your option) any later version. | |
13 | * | |
14 | * FFmpeg is distributed in the hope that it will be useful, | |
15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
17 | * Lesser General Public License for more details. | |
18 | * | |
19 | * You should have received a copy of the GNU Lesser General Public | |
20 | * License along with FFmpeg; if not, write to the Free Software | |
21 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
22 | */ | |
23 | ||
24 | #include "libavutil/imgutils.h" | |
25 | #include "libavutil/opt.h" | |
26 | #include "avcodec.h" | |
27 | #include "bytestream.h" | |
28 | #include "internal.h" | |
29 | #include "lzw.h" | |
30 | #include "gif.h" | |
31 | ||
32 | /* This value is intentionally set to "transparent white" color. | |
33 | * It is much better to have white background instead of black | |
34 | * when gif image converted to format which not support transparency. | |
35 | */ | |
36 | #define GIF_TRANSPARENT_COLOR 0x00ffffff | |
37 | ||
38 | typedef struct GifState { | |
39 | const AVClass *class; | |
40 | AVFrame *frame; | |
41 | int screen_width; | |
42 | int screen_height; | |
43 | int has_global_palette; | |
44 | int bits_per_pixel; | |
45 | uint32_t bg_color; | |
46 | int background_color_index; | |
47 | int transparent_color_index; | |
48 | int color_resolution; | |
49 | /* intermediate buffer for storing color indices | |
50 | * obtained from lzw-encoded data stream */ | |
51 | uint8_t *idx_line; | |
52 | int idx_line_size; | |
53 | ||
54 | /* after the frame is displayed, the disposal method is used */ | |
55 | int gce_prev_disposal; | |
56 | int gce_disposal; | |
57 | /* rectangle describing area that must be disposed */ | |
58 | int gce_l, gce_t, gce_w, gce_h; | |
59 | /* depending on disposal method we store either part of the image | |
60 | * drawn on the canvas or background color that | |
61 | * should be used upon disposal */ | |
62 | uint32_t * stored_img; | |
63 | int stored_img_size; | |
64 | int stored_bg_color; | |
65 | ||
66 | GetByteContext gb; | |
67 | LZWState *lzw; | |
68 | ||
69 | /* aux buffers */ | |
70 | uint32_t global_palette[256]; | |
71 | uint32_t local_palette[256]; | |
72 | ||
73 | AVCodecContext *avctx; | |
74 | int keyframe; | |
75 | int keyframe_ok; | |
76 | int trans_color; /**< color value that is used instead of transparent color */ | |
77 | } GifState; | |
78 | ||
79 | static void gif_read_palette(GifState *s, uint32_t *pal, int nb) | |
80 | { | |
81 | int i; | |
82 | ||
83 | for (i = 0; i < nb; i++, pal++) | |
84 | *pal = (0xffu << 24) | bytestream2_get_be24u(&s->gb); | |
85 | } | |
86 | ||
87 | static void gif_fill(AVFrame *picture, uint32_t color) | |
88 | { | |
89 | uint32_t *p = (uint32_t *)picture->data[0]; | |
90 | uint32_t *p_end = p + (picture->linesize[0] / sizeof(uint32_t)) * picture->height; | |
91 | ||
92 | for (; p < p_end; p++) | |
93 | *p = color; | |
94 | } | |
95 | ||
96 | static void gif_fill_rect(AVFrame *picture, uint32_t color, int l, int t, int w, int h) | |
97 | { | |
98 | const int linesize = picture->linesize[0] / sizeof(uint32_t); | |
99 | const uint32_t *py = (uint32_t *)picture->data[0] + t * linesize; | |
100 | const uint32_t *pr, *pb = py + h * linesize; | |
101 | uint32_t *px; | |
102 | ||
103 | for (; py < pb; py += linesize) { | |
104 | px = (uint32_t *)py + l; | |
105 | pr = px + w; | |
106 | ||
107 | for (; px < pr; px++) | |
108 | *px = color; | |
109 | } | |
110 | } | |
111 | ||
112 | static void gif_copy_img_rect(const uint32_t *src, uint32_t *dst, | |
113 | int linesize, int l, int t, int w, int h) | |
114 | { | |
115 | const int y_start = t * linesize; | |
116 | const uint32_t *src_px, | |
117 | *src_py = src + y_start, | |
118 | *dst_py = dst + y_start; | |
119 | const uint32_t *src_pb = src_py + h * linesize; | |
120 | uint32_t *dst_px; | |
121 | ||
122 | for (; src_py < src_pb; src_py += linesize, dst_py += linesize) { | |
123 | src_px = src_py + l; | |
124 | dst_px = (uint32_t *)dst_py + l; | |
125 | ||
126 | memcpy(dst_px, src_px, w * sizeof(uint32_t)); | |
127 | } | |
128 | } | |
129 | ||
130 | static int gif_read_image(GifState *s, AVFrame *frame) | |
131 | { | |
132 | int left, top, width, height, bits_per_pixel, code_size, flags, pw; | |
133 | int is_interleaved, has_local_palette, y, pass, y1, linesize, pal_size; | |
134 | uint32_t *ptr, *pal, *px, *pr, *ptr1; | |
135 | int ret; | |
136 | uint8_t *idx; | |
137 | ||
138 | /* At least 9 bytes of Image Descriptor. */ | |
139 | if (bytestream2_get_bytes_left(&s->gb) < 9) | |
140 | return AVERROR_INVALIDDATA; | |
141 | ||
142 | left = bytestream2_get_le16u(&s->gb); | |
143 | top = bytestream2_get_le16u(&s->gb); | |
144 | width = bytestream2_get_le16u(&s->gb); | |
145 | height = bytestream2_get_le16u(&s->gb); | |
146 | flags = bytestream2_get_byteu(&s->gb); | |
147 | is_interleaved = flags & 0x40; | |
148 | has_local_palette = flags & 0x80; | |
149 | bits_per_pixel = (flags & 0x07) + 1; | |
150 | ||
151 | av_dlog(s->avctx, "image x=%d y=%d w=%d h=%d\n", left, top, width, height); | |
152 | ||
153 | if (has_local_palette) { | |
154 | pal_size = 1 << bits_per_pixel; | |
155 | ||
156 | if (bytestream2_get_bytes_left(&s->gb) < pal_size * 3) | |
157 | return AVERROR_INVALIDDATA; | |
158 | ||
159 | gif_read_palette(s, s->local_palette, pal_size); | |
160 | pal = s->local_palette; | |
161 | } else { | |
162 | if (!s->has_global_palette) { | |
163 | av_log(s->avctx, AV_LOG_ERROR, "picture doesn't have either global or local palette.\n"); | |
164 | return AVERROR_INVALIDDATA; | |
165 | } | |
166 | ||
167 | pal = s->global_palette; | |
168 | } | |
169 | ||
170 | if (s->keyframe) { | |
171 | if (s->transparent_color_index == -1 && s->has_global_palette) { | |
172 | /* transparency wasn't set before the first frame, fill with background color */ | |
173 | gif_fill(frame, s->bg_color); | |
174 | } else { | |
175 | /* otherwise fill with transparent color. | |
176 | * this is necessary since by default picture filled with 0x80808080. */ | |
177 | gif_fill(frame, s->trans_color); | |
178 | } | |
179 | } | |
180 | ||
181 | /* verify that all the image is inside the screen dimensions */ | |
182 | if (!width || width > s->screen_width || left >= s->screen_width) { | |
183 | av_log(s->avctx, AV_LOG_ERROR, "Invalid image width.\n"); | |
184 | return AVERROR_INVALIDDATA; | |
185 | } | |
186 | if (!height || height > s->screen_height || top >= s->screen_height) { | |
187 | av_log(s->avctx, AV_LOG_ERROR, "Invalid image height.\n"); | |
188 | return AVERROR_INVALIDDATA; | |
189 | } | |
190 | if (left + width > s->screen_width) { | |
191 | /* width must be kept around to avoid lzw vs line desync */ | |
192 | pw = s->screen_width - left; | |
193 | av_log(s->avctx, AV_LOG_WARNING, "Image too wide by %d, truncating.\n", | |
194 | left + width - s->screen_width); | |
195 | } else { | |
196 | pw = width; | |
197 | } | |
198 | if (top + height > s->screen_height) { | |
199 | /* we don't care about the extra invisible lines */ | |
200 | av_log(s->avctx, AV_LOG_WARNING, "Image too high by %d, truncating.\n", | |
201 | top + height - s->screen_height); | |
202 | height = s->screen_height - top; | |
203 | } | |
204 | ||
205 | /* process disposal method */ | |
206 | if (s->gce_prev_disposal == GCE_DISPOSAL_BACKGROUND) { | |
207 | gif_fill_rect(frame, s->stored_bg_color, s->gce_l, s->gce_t, s->gce_w, s->gce_h); | |
208 | } else if (s->gce_prev_disposal == GCE_DISPOSAL_RESTORE) { | |
209 | gif_copy_img_rect(s->stored_img, (uint32_t *)frame->data[0], | |
210 | frame->linesize[0] / sizeof(uint32_t), s->gce_l, s->gce_t, s->gce_w, s->gce_h); | |
211 | } | |
212 | ||
213 | s->gce_prev_disposal = s->gce_disposal; | |
214 | ||
215 | if (s->gce_disposal != GCE_DISPOSAL_NONE) { | |
216 | s->gce_l = left; s->gce_t = top; | |
217 | s->gce_w = pw; s->gce_h = height; | |
218 | ||
219 | if (s->gce_disposal == GCE_DISPOSAL_BACKGROUND) { | |
220 | if (s->transparent_color_index >= 0) | |
221 | s->stored_bg_color = s->trans_color; | |
222 | else | |
223 | s->stored_bg_color = s->bg_color; | |
224 | } else if (s->gce_disposal == GCE_DISPOSAL_RESTORE) { | |
225 | av_fast_malloc(&s->stored_img, &s->stored_img_size, frame->linesize[0] * frame->height); | |
226 | if (!s->stored_img) | |
227 | return AVERROR(ENOMEM); | |
228 | ||
229 | gif_copy_img_rect((uint32_t *)frame->data[0], s->stored_img, | |
230 | frame->linesize[0] / sizeof(uint32_t), left, top, pw, height); | |
231 | } | |
232 | } | |
233 | ||
234 | /* Expect at least 2 bytes: 1 for lzw code size and 1 for block size. */ | |
235 | if (bytestream2_get_bytes_left(&s->gb) < 2) | |
236 | return AVERROR_INVALIDDATA; | |
237 | ||
238 | /* now get the image data */ | |
239 | code_size = bytestream2_get_byteu(&s->gb); | |
240 | if ((ret = ff_lzw_decode_init(s->lzw, code_size, s->gb.buffer, | |
241 | bytestream2_get_bytes_left(&s->gb), FF_LZW_GIF)) < 0) { | |
242 | av_log(s->avctx, AV_LOG_ERROR, "LZW init failed\n"); | |
243 | return ret; | |
244 | } | |
245 | ||
246 | /* read all the image */ | |
247 | linesize = frame->linesize[0] / sizeof(uint32_t); | |
248 | ptr1 = (uint32_t *)frame->data[0] + top * linesize + left; | |
249 | ptr = ptr1; | |
250 | pass = 0; | |
251 | y1 = 0; | |
252 | for (y = 0; y < height; y++) { | |
253 | int count = ff_lzw_decode(s->lzw, s->idx_line, width); | |
254 | if (count != width) { | |
255 | if (count) | |
256 | av_log(s->avctx, AV_LOG_ERROR, "LZW decode failed\n"); | |
257 | goto decode_tail; | |
258 | } | |
259 | ||
260 | pr = ptr + pw; | |
261 | ||
262 | for (px = ptr, idx = s->idx_line; px < pr; px++, idx++) { | |
263 | if (*idx != s->transparent_color_index) | |
264 | *px = pal[*idx]; | |
265 | } | |
266 | ||
267 | if (is_interleaved) { | |
268 | switch(pass) { | |
269 | default: | |
270 | case 0: | |
271 | case 1: | |
272 | y1 += 8; | |
273 | ptr += linesize * 8; | |
274 | break; | |
275 | case 2: | |
276 | y1 += 4; | |
277 | ptr += linesize * 4; | |
278 | break; | |
279 | case 3: | |
280 | y1 += 2; | |
281 | ptr += linesize * 2; | |
282 | break; | |
283 | } | |
284 | while (y1 >= height) { | |
285 | y1 = 4 >> pass; | |
286 | ptr = ptr1 + linesize * y1; | |
287 | pass++; | |
288 | } | |
289 | } else { | |
290 | ptr += linesize; | |
291 | } | |
292 | } | |
293 | ||
294 | decode_tail: | |
295 | /* read the garbage data until end marker is found */ | |
296 | ff_lzw_decode_tail(s->lzw); | |
297 | ||
298 | /* Graphic Control Extension's scope is single frame. | |
299 | * Remove its influence. */ | |
300 | s->transparent_color_index = -1; | |
301 | s->gce_disposal = GCE_DISPOSAL_NONE; | |
302 | ||
303 | return 0; | |
304 | } | |
305 | ||
306 | static int gif_read_extension(GifState *s) | |
307 | { | |
308 | int ext_code, ext_len, gce_flags, gce_transparent_index; | |
309 | ||
310 | /* There must be at least 2 bytes: | |
311 | * 1 for extension label and 1 for extension length. */ | |
312 | if (bytestream2_get_bytes_left(&s->gb) < 2) | |
313 | return AVERROR_INVALIDDATA; | |
314 | ||
315 | ext_code = bytestream2_get_byteu(&s->gb); | |
316 | ext_len = bytestream2_get_byteu(&s->gb); | |
317 | ||
318 | av_dlog(s->avctx, "ext_code=0x%x len=%d\n", ext_code, ext_len); | |
319 | ||
320 | switch(ext_code) { | |
321 | case GIF_GCE_EXT_LABEL: | |
322 | if (ext_len != 4) | |
323 | goto discard_ext; | |
324 | ||
325 | /* We need at least 5 bytes more: 4 is for extension body | |
326 | * and 1 for next block size. */ | |
327 | if (bytestream2_get_bytes_left(&s->gb) < 5) | |
328 | return AVERROR_INVALIDDATA; | |
329 | ||
330 | gce_flags = bytestream2_get_byteu(&s->gb); | |
331 | bytestream2_skipu(&s->gb, 2); // delay during which the frame is shown | |
332 | gce_transparent_index = bytestream2_get_byteu(&s->gb); | |
333 | if (gce_flags & 0x01) | |
334 | s->transparent_color_index = gce_transparent_index; | |
335 | else | |
336 | s->transparent_color_index = -1; | |
337 | s->gce_disposal = (gce_flags >> 2) & 0x7; | |
338 | ||
339 | av_dlog(s->avctx, "gce_flags=%x tcolor=%d disposal=%d\n", | |
340 | gce_flags, | |
341 | s->transparent_color_index, s->gce_disposal); | |
342 | ||
343 | if (s->gce_disposal > 3) { | |
344 | s->gce_disposal = GCE_DISPOSAL_NONE; | |
345 | av_dlog(s->avctx, "invalid value in gce_disposal (%d). Using default value of 0.\n", ext_len); | |
346 | } | |
347 | ||
348 | ext_len = bytestream2_get_byteu(&s->gb); | |
349 | break; | |
350 | } | |
351 | ||
352 | /* NOTE: many extension blocks can come after */ | |
353 | discard_ext: | |
354 | while (ext_len) { | |
355 | /* There must be at least ext_len bytes and 1 for next block size byte. */ | |
356 | if (bytestream2_get_bytes_left(&s->gb) < ext_len + 1) | |
357 | return AVERROR_INVALIDDATA; | |
358 | ||
359 | bytestream2_skipu(&s->gb, ext_len); | |
360 | ext_len = bytestream2_get_byteu(&s->gb); | |
361 | ||
362 | av_dlog(s->avctx, "ext_len1=%d\n", ext_len); | |
363 | } | |
364 | return 0; | |
365 | } | |
366 | ||
367 | static int gif_read_header1(GifState *s) | |
368 | { | |
369 | uint8_t sig[6]; | |
370 | int v, n; | |
371 | int background_color_index; | |
372 | ||
373 | if (bytestream2_get_bytes_left(&s->gb) < 13) | |
374 | return AVERROR_INVALIDDATA; | |
375 | ||
376 | /* read gif signature */ | |
377 | bytestream2_get_bufferu(&s->gb, sig, 6); | |
378 | if (memcmp(sig, gif87a_sig, 6) && | |
379 | memcmp(sig, gif89a_sig, 6)) | |
380 | return AVERROR_INVALIDDATA; | |
381 | ||
382 | /* read screen header */ | |
383 | s->transparent_color_index = -1; | |
384 | s->screen_width = bytestream2_get_le16u(&s->gb); | |
385 | s->screen_height = bytestream2_get_le16u(&s->gb); | |
386 | ||
387 | v = bytestream2_get_byteu(&s->gb); | |
388 | s->color_resolution = ((v & 0x70) >> 4) + 1; | |
389 | s->has_global_palette = (v & 0x80); | |
390 | s->bits_per_pixel = (v & 0x07) + 1; | |
391 | background_color_index = bytestream2_get_byteu(&s->gb); | |
392 | n = bytestream2_get_byteu(&s->gb); | |
393 | if (n) { | |
394 | s->avctx->sample_aspect_ratio.num = n + 15; | |
395 | s->avctx->sample_aspect_ratio.den = 64; | |
396 | } | |
397 | ||
398 | av_dlog(s->avctx, "screen_w=%d screen_h=%d bpp=%d global_palette=%d\n", | |
399 | s->screen_width, s->screen_height, s->bits_per_pixel, | |
400 | s->has_global_palette); | |
401 | ||
402 | if (s->has_global_palette) { | |
403 | s->background_color_index = background_color_index; | |
404 | n = 1 << s->bits_per_pixel; | |
405 | if (bytestream2_get_bytes_left(&s->gb) < n * 3) | |
406 | return AVERROR_INVALIDDATA; | |
407 | ||
408 | gif_read_palette(s, s->global_palette, n); | |
409 | s->bg_color = s->global_palette[s->background_color_index]; | |
410 | } else | |
411 | s->background_color_index = -1; | |
412 | ||
413 | return 0; | |
414 | } | |
415 | ||
416 | static int gif_parse_next_image(GifState *s, AVFrame *frame) | |
417 | { | |
418 | while (bytestream2_get_bytes_left(&s->gb) > 0) { | |
419 | int code = bytestream2_get_byte(&s->gb); | |
420 | int ret; | |
421 | ||
422 | av_log(s->avctx, AV_LOG_DEBUG, "code=%02x '%c'\n", code, code); | |
423 | ||
424 | switch (code) { | |
425 | case GIF_IMAGE_SEPARATOR: | |
426 | return gif_read_image(s, frame); | |
427 | case GIF_EXTENSION_INTRODUCER: | |
428 | if ((ret = gif_read_extension(s)) < 0) | |
429 | return ret; | |
430 | break; | |
431 | case GIF_TRAILER: | |
432 | /* end of image */ | |
433 | return AVERROR_EOF; | |
434 | default: | |
435 | /* erroneous block label */ | |
436 | return AVERROR_INVALIDDATA; | |
437 | } | |
438 | } | |
439 | return AVERROR_EOF; | |
440 | } | |
441 | ||
442 | static av_cold int gif_decode_init(AVCodecContext *avctx) | |
443 | { | |
444 | GifState *s = avctx->priv_data; | |
445 | ||
446 | s->avctx = avctx; | |
447 | ||
448 | avctx->pix_fmt = AV_PIX_FMT_RGB32; | |
449 | s->frame = av_frame_alloc(); | |
450 | if (!s->frame) | |
451 | return AVERROR(ENOMEM); | |
452 | ff_lzw_decode_open(&s->lzw); | |
453 | return 0; | |
454 | } | |
455 | ||
456 | static int gif_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt) | |
457 | { | |
458 | GifState *s = avctx->priv_data; | |
459 | int ret; | |
460 | ||
461 | bytestream2_init(&s->gb, avpkt->data, avpkt->size); | |
462 | ||
463 | s->frame->pts = avpkt->pts; | |
464 | s->frame->pkt_pts = avpkt->pts; | |
465 | s->frame->pkt_dts = avpkt->dts; | |
466 | av_frame_set_pkt_duration(s->frame, avpkt->duration); | |
467 | ||
468 | if (avpkt->size >= 6) { | |
469 | s->keyframe = memcmp(avpkt->data, gif87a_sig, 6) == 0 || | |
470 | memcmp(avpkt->data, gif89a_sig, 6) == 0; | |
471 | } else { | |
472 | s->keyframe = 0; | |
473 | } | |
474 | ||
475 | if (s->keyframe) { | |
476 | s->keyframe_ok = 0; | |
477 | s->gce_prev_disposal = GCE_DISPOSAL_NONE; | |
478 | if ((ret = gif_read_header1(s)) < 0) | |
479 | return ret; | |
480 | ||
481 | if ((ret = ff_set_dimensions(avctx, s->screen_width, s->screen_height)) < 0) | |
482 | return ret; | |
483 | ||
484 | av_frame_unref(s->frame); | |
485 | if ((ret = ff_get_buffer(avctx, s->frame, 0)) < 0) | |
486 | return ret; | |
487 | ||
488 | av_fast_malloc(&s->idx_line, &s->idx_line_size, s->screen_width); | |
489 | if (!s->idx_line) | |
490 | return AVERROR(ENOMEM); | |
491 | ||
492 | s->frame->pict_type = AV_PICTURE_TYPE_I; | |
493 | s->frame->key_frame = 1; | |
494 | s->keyframe_ok = 1; | |
495 | } else { | |
496 | if (!s->keyframe_ok) { | |
497 | av_log(avctx, AV_LOG_ERROR, "cannot decode frame without keyframe\n"); | |
498 | return AVERROR_INVALIDDATA; | |
499 | } | |
500 | ||
501 | if ((ret = ff_reget_buffer(avctx, s->frame)) < 0) | |
502 | return ret; | |
503 | ||
504 | s->frame->pict_type = AV_PICTURE_TYPE_P; | |
505 | s->frame->key_frame = 0; | |
506 | } | |
507 | ||
508 | ret = gif_parse_next_image(s, s->frame); | |
509 | if (ret < 0) | |
510 | return ret; | |
511 | ||
512 | if ((ret = av_frame_ref(data, s->frame)) < 0) | |
513 | return ret; | |
514 | *got_frame = 1; | |
515 | ||
516 | return bytestream2_tell(&s->gb); | |
517 | } | |
518 | ||
519 | static av_cold int gif_decode_close(AVCodecContext *avctx) | |
520 | { | |
521 | GifState *s = avctx->priv_data; | |
522 | ||
523 | ff_lzw_decode_close(&s->lzw); | |
524 | av_frame_free(&s->frame); | |
525 | av_freep(&s->idx_line); | |
526 | av_freep(&s->stored_img); | |
527 | ||
528 | return 0; | |
529 | } | |
530 | ||
531 | static const AVOption options[] = { | |
532 | { "trans_color", "color value (ARGB) that is used instead of transparent color", | |
533 | offsetof(GifState, trans_color), AV_OPT_TYPE_INT, | |
534 | {.i64 = GIF_TRANSPARENT_COLOR}, 0, 0xffffffff, | |
535 | AV_OPT_FLAG_DECODING_PARAM|AV_OPT_FLAG_VIDEO_PARAM }, | |
536 | { NULL }, | |
537 | }; | |
538 | ||
539 | static const AVClass decoder_class = { | |
540 | .class_name = "gif decoder", | |
541 | .item_name = av_default_item_name, | |
542 | .option = options, | |
543 | .version = LIBAVUTIL_VERSION_INT, | |
544 | .category = AV_CLASS_CATEGORY_DECODER, | |
545 | }; | |
546 | ||
547 | AVCodec ff_gif_decoder = { | |
548 | .name = "gif", | |
549 | .long_name = NULL_IF_CONFIG_SMALL("GIF (Graphics Interchange Format)"), | |
550 | .type = AVMEDIA_TYPE_VIDEO, | |
551 | .id = AV_CODEC_ID_GIF, | |
552 | .priv_data_size = sizeof(GifState), | |
553 | .init = gif_decode_init, | |
554 | .close = gif_decode_close, | |
555 | .decode = gif_decode_frame, | |
556 | .capabilities = CODEC_CAP_DR1, | |
557 | .priv_class = &decoder_class, | |
558 | }; |