2 * Microsoft Screen 2 (aka Windows Media Video V9 Screen) decoder
4 * This file is part of FFmpeg.
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 * Microsoft Screen 2 (aka Windows Media Video V9 Screen) decoder
26 #include "libavutil/avassert.h"
27 #include "error_resilience.h"
30 #include "msmpeg4data.h"
36 typedef struct MSS2Context
{
46 static void arith2_normalise(ArithCoder
*c
)
48 while ((c
->high
>> 15) - (c
->low
>> 15) < 2) {
49 if ((c
->low
^ c
->high
) & 0x10000) {
54 c
->high
= c
->high
<< 8 & 0xFFFFFF | 0xFF;
55 c
->value
= c
->value
<< 8 & 0xFFFFFF | bytestream2_get_byte(c
->gbc
.gB
);
56 c
->low
= c
->low
<< 8 & 0xFFFFFF;
62 /* L. Stuiver and A. Moffat: "Piecewise Integer Mapping for Arithmetic Coding."
63 * In Proc. 8th Data Compression Conference (DCC '98), pp. 3-12, Mar. 1998 */
65 static int arith2_get_scaled_value(int value
, int n
, int range
)
67 int split
= (n
<< 1) - range
;
70 return split
+ (value
- split
>> 1);
75 static void arith2_rescale_interval(ArithCoder
*c
, int range
,
76 int low
, int high
, int n
)
78 int split
= (n
<< 1) - range
;
81 c
->high
= split
+ (high
- split
<< 1);
85 c
->high
+= c
->low
- 1;
88 c
->low
+= split
+ (low
- split
<< 1);
93 static int arith2_get_number(ArithCoder
*c
, int n
)
95 int range
= c
->high
- c
->low
+ 1;
96 int scale
= av_log2(range
) - av_log2(n
);
99 if (n
<< scale
> range
)
104 val
= arith2_get_scaled_value(c
->value
- c
->low
, n
, range
) >> scale
;
106 arith2_rescale_interval(c
, range
, val
<< scale
, (val
+ 1) << scale
, n
);
113 static int arith2_get_prob(ArithCoder
*c
, int16_t *probs
)
115 int range
= c
->high
- c
->low
+ 1, n
= *probs
;
116 int scale
= av_log2(range
) - av_log2(n
);
119 if (n
<< scale
> range
)
124 val
= arith2_get_scaled_value(c
->value
- c
->low
, n
, range
) >> scale
;
125 while (probs
[++i
] > val
) ;
127 arith2_rescale_interval(c
, range
,
128 probs
[i
] << scale
, probs
[i
- 1] << scale
, n
);
133 ARITH_GET_MODEL_SYM(arith2
)
135 static int arith2_get_consumed_bytes(ArithCoder
*c
)
137 int diff
= (c
->high
>> 16) - (c
->low
>> 16);
138 int bp
= bytestream2_tell(c
->gbc
.gB
) - 3 << 3;
141 while (!(diff
& 0x80)) {
146 return (bits
+ bp
+ 7 >> 3) + ((c
->low
>> 16) + 1 == c
->high
>> 16);
149 static void arith2_init(ArithCoder
*c
, GetByteContext
*gB
)
153 c
->value
= bytestream2_get_be24(gB
);
155 c
->get_model_sym
= arith2_get_model_sym
;
156 c
->get_number
= arith2_get_number
;
159 static int decode_pal_v2(MSS12Context
*ctx
, const uint8_t *buf
, int buf_size
)
162 uint32_t *pal
= ctx
->pal
+ 256 - ctx
->free_colours
;
164 if (!ctx
->free_colours
)
168 if (ncol
> ctx
->free_colours
|| buf_size
< 2 + ncol
* 3)
169 return AVERROR_INVALIDDATA
;
170 for (i
= 0; i
< ncol
; i
++)
171 *pal
++ = AV_RB24(buf
+ 3 * i
);
176 static int decode_555(GetByteContext
*gB
, uint16_t *dst
, int stride
,
177 int keyframe
, int w
, int h
)
179 int last_symbol
= 0, repeat
= 0, prev_avail
= 0;
182 int x
, y
, endx
, endy
, t
;
184 #define READ_PAIR(a, b) \
185 a = bytestream2_get_byte(gB) << 4; \
186 t = bytestream2_get_byte(gB); \
188 b = (t & 0xF) << 8; \
189 b |= bytestream2_get_byte(gB); \
194 if (endx
>= w
|| endy
>= h
|| x
> endx
|| y
> endy
)
195 return AVERROR_INVALIDDATA
;
196 dst
+= x
+ stride
* y
;
207 int b
= bytestream2_get_byte(gB
);
209 last_symbol
= b
<< 8 | bytestream2_get_byte(gB
);
213 repeat
= (repeat
<< 8) + bytestream2_get_byte(gB
) + 1;
214 if (last_symbol
== -2) {
215 int skip
= FFMIN((unsigned)repeat
, dst
+ w
- p
);
220 last_symbol
= 127 - b
;
222 if (last_symbol
>= 0)
224 else if (last_symbol
== -1 && prev_avail
)
226 } while (++p
< dst
+ w
);
234 static int decode_rle(GetBitContext
*gb
, uint8_t *pal_dst
, int pal_stride
,
235 uint8_t *rgb_dst
, int rgb_stride
, uint32_t *pal
,
236 int keyframe
, int kf_slipt
, int slice
, int w
, int h
)
238 uint8_t bits
[270] = { 0 };
242 int current_length
= 0, read_codes
= 0, next_code
= 0, current_codes
= 0;
243 int remaining_codes
, surplus_codes
, i
;
245 const int alphabet_size
= 270 - keyframe
;
247 int last_symbol
= 0, repeat
= 0, prev_avail
= 0;
250 int x
, y
, clipw
, cliph
;
252 x
= get_bits(gb
, 12);
253 y
= get_bits(gb
, 12);
254 clipw
= get_bits(gb
, 12) + 1;
255 cliph
= get_bits(gb
, 12) + 1;
257 if (x
+ clipw
> w
|| y
+ cliph
> h
)
258 return AVERROR_INVALIDDATA
;
259 pal_dst
+= pal_stride
* y
+ x
;
260 rgb_dst
+= rgb_stride
* y
+ x
* 3;
267 pal_dst
+= pal_stride
* kf_slipt
;
268 rgb_dst
+= rgb_stride
* kf_slipt
;
275 /* read explicit codes */
277 while (current_codes
--) {
278 int symbol
= get_bits(gb
, 8);
279 if (symbol
>= 204 - keyframe
)
280 symbol
+= 14 - keyframe
;
281 else if (symbol
> 189)
282 symbol
= get_bits1(gb
) + (symbol
<< 1) - 190;
284 return AVERROR_INVALIDDATA
;
285 bits
[symbol
] = current_length
;
286 codes
[symbol
] = next_code
++;
291 remaining_codes
= (1 << current_length
) - next_code
;
292 current_codes
= get_bits(gb
, av_ceil_log2(remaining_codes
+ 1));
293 if (current_length
> 22 || current_codes
> remaining_codes
)
294 return AVERROR_INVALIDDATA
;
295 } while (current_codes
!= remaining_codes
);
297 remaining_codes
= alphabet_size
- read_codes
;
299 /* determine the minimum length to fit the rest of the alphabet */
300 while ((surplus_codes
= (2 << current_length
) -
301 (next_code
<< 1) - remaining_codes
) < 0) {
306 /* add the rest of the symbols lexicographically */
307 for (i
= 0; i
< alphabet_size
; i
++)
309 if (surplus_codes
-- == 0) {
313 bits
[i
] = current_length
;
314 codes
[i
] = next_code
++;
317 if (next_code
!= 1 << current_length
)
318 return AVERROR_INVALIDDATA
;
320 if (i
= init_vlc(&vlc
, 9, alphabet_size
, bits
, 1, 1, codes
, 4, 4, 0))
325 uint8_t *pp
= pal_dst
;
326 uint8_t *rp
= rgb_dst
;
329 int b
= get_vlc2(gb
, vlc
.table
, 9, 3);
335 b
= get_bits(gb
, 4) + 10;
340 repeat
= get_bits(gb
, b
);
342 repeat
+= (1 << b
) - 1;
344 if (last_symbol
== -2) {
345 int skip
= FFMIN(repeat
, pal_dst
+ w
- pp
);
351 last_symbol
= 267 - b
;
353 if (last_symbol
>= 0) {
355 AV_WB24(rp
, pal
[last_symbol
]);
356 } else if (last_symbol
== -1 && prev_avail
) {
357 *pp
= *(pp
- pal_stride
);
358 memcpy(rp
, rp
- rgb_stride
, 3);
361 } while (++pp
< pal_dst
+ w
);
362 pal_dst
+= pal_stride
;
363 rgb_dst
+= rgb_stride
;
371 static int decode_wmv9(AVCodecContext
*avctx
, const uint8_t *buf
, int buf_size
,
372 int x
, int y
, int w
, int h
, int wmv9_mask
)
374 MSS2Context
*ctx
= avctx
->priv_data
;
375 MSS12Context
*c
= &ctx
->c
;
376 VC1Context
*v
= avctx
->priv_data
;
377 MpegEncContext
*s
= &v
->s
;
381 ff_mpeg_flush(avctx
);
383 if ((ret
= init_get_bits8(&s
->gb
, buf
, buf_size
)) < 0)
386 s
->loop_filter
= avctx
->skip_loop_filter
< AVDISCARD_ALL
;
388 if (ff_vc1_parse_frame_header(v
, &s
->gb
) < 0) {
389 av_log(v
->s
.avctx
, AV_LOG_ERROR
, "header error\n");
390 return AVERROR_INVALIDDATA
;
393 if (s
->pict_type
!= AV_PICTURE_TYPE_I
) {
394 av_log(v
->s
.avctx
, AV_LOG_ERROR
, "expected I-frame\n");
395 return AVERROR_INVALIDDATA
;
398 avctx
->pix_fmt
= AV_PIX_FMT_YUV420P
;
400 if ((ret
= ff_mpv_frame_start(s
, avctx
)) < 0) {
401 av_log(v
->s
.avctx
, AV_LOG_ERROR
, "ff_mpv_frame_start error\n");
402 avctx
->pix_fmt
= AV_PIX_FMT_RGB24
;
406 ff_mpeg_er_frame_start(s
);
408 v
->bits
= buf_size
* 8;
410 v
->end_mb_x
= (w
+ 15) >> 4;
411 s
->end_mb_y
= (h
+ 15) >> 4;
413 v
->end_mb_x
= v
->end_mb_x
+ 1 >> 1;
415 s
->end_mb_y
= s
->end_mb_y
+ 1 >> 1;
417 ff_vc1_decode_blocks(v
);
419 ff_er_frame_end(&s
->er
);
423 f
= s
->current_picture
.f
;
425 if (v
->respic
== 3) {
426 ctx
->dsp
.upsample_plane(f
->data
[0], f
->linesize
[0], w
, h
);
427 ctx
->dsp
.upsample_plane(f
->data
[1], f
->linesize
[1], w
+1 >> 1, h
+1 >> 1);
428 ctx
->dsp
.upsample_plane(f
->data
[2], f
->linesize
[2], w
+1 >> 1, h
+1 >> 1);
429 } else if (v
->respic
)
430 avpriv_request_sample(v
->s
.avctx
,
431 "Asymmetric WMV9 rectangle subsampling");
433 av_assert0(f
->linesize
[1] == f
->linesize
[2]);
436 ctx
->dsp
.mss2_blit_wmv9_masked(c
->rgb_pic
+ y
* c
->rgb_stride
+ x
* 3,
437 c
->rgb_stride
, wmv9_mask
,
438 c
->pal_pic
+ y
* c
->pal_stride
+ x
,
440 f
->data
[0], f
->linesize
[0],
441 f
->data
[1], f
->data
[2], f
->linesize
[1],
444 ctx
->dsp
.mss2_blit_wmv9(c
->rgb_pic
+ y
* c
->rgb_stride
+ x
* 3,
446 f
->data
[0], f
->linesize
[0],
447 f
->data
[1], f
->data
[2], f
->linesize
[1],
450 avctx
->pix_fmt
= AV_PIX_FMT_RGB24
;
455 typedef struct Rectangle
{
456 int coded
, x
, y
, w
, h
;
459 #define MAX_WMV9_RECTANGLES 20
460 #define ARITH2_PADDING 2
462 static int mss2_decode_frame(AVCodecContext
*avctx
, void *data
, int *got_frame
,
465 const uint8_t *buf
= avpkt
->data
;
466 int buf_size
= avpkt
->size
;
467 MSS2Context
*ctx
= avctx
->priv_data
;
468 MSS12Context
*c
= &ctx
->c
;
469 AVFrame
*frame
= data
;
474 int keyframe
, has_wmv9
, has_mv
, is_rle
, is_555
, ret
;
476 Rectangle wmv9rects
[MAX_WMV9_RECTANGLES
], *r
;
477 int used_rects
= 0, i
, implicit_rect
= 0, av_uninit(wmv9_mask
);
479 av_assert0(FF_INPUT_BUFFER_PADDING_SIZE
>=
480 ARITH2_PADDING
+ (MIN_CACHE_BITS
+ 7) / 8);
482 if ((ret
= init_get_bits8(&gb
, buf
, buf_size
)) < 0)
485 if (keyframe
= get_bits1(&gb
))
487 has_wmv9
= get_bits1(&gb
);
488 has_mv
= keyframe
? 0 : get_bits1(&gb
);
489 is_rle
= get_bits1(&gb
);
490 is_555
= is_rle
&& get_bits1(&gb
);
491 if (c
->slice_split
> 0)
492 ctx
->split_position
= c
->slice_split
;
493 else if (c
->slice_split
< 0) {
494 if (get_bits1(&gb
)) {
495 if (get_bits1(&gb
)) {
497 ctx
->split_position
= get_bits(&gb
, 16);
499 ctx
->split_position
= get_bits(&gb
, 12);
501 ctx
->split_position
= get_bits(&gb
, 8) << 4;
504 ctx
->split_position
= avctx
->height
/ 2;
507 ctx
->split_position
= avctx
->height
;
509 if (c
->slice_split
&& (ctx
->split_position
< 1 - is_555
||
510 ctx
->split_position
> avctx
->height
- 1))
511 return AVERROR_INVALIDDATA
;
514 buf
+= get_bits_count(&gb
) >> 3;
515 buf_size
-= get_bits_count(&gb
) >> 3;
518 return AVERROR_INVALIDDATA
;
520 if (is_555
&& (has_wmv9
|| has_mv
|| c
->slice_split
&& ctx
->split_position
))
521 return AVERROR_INVALIDDATA
;
523 avctx
->pix_fmt
= is_555
? AV_PIX_FMT_RGB555
: AV_PIX_FMT_RGB24
;
524 if (ctx
->last_pic
->format
!= avctx
->pix_fmt
)
525 av_frame_unref(ctx
->last_pic
);
528 bytestream2_init(&gB
, buf
, buf_size
+ ARITH2_PADDING
);
529 arith2_init(&acoder
, &gB
);
531 implicit_rect
= !arith2_get_bit(&acoder
);
533 while (arith2_get_bit(&acoder
)) {
534 if (used_rects
== MAX_WMV9_RECTANGLES
)
535 return AVERROR_INVALIDDATA
;
536 r
= &wmv9rects
[used_rects
];
538 r
->x
= arith2_get_number(&acoder
, avctx
->width
);
540 r
->x
= arith2_get_number(&acoder
, avctx
->width
-
541 wmv9rects
[used_rects
- 1].x
) +
542 wmv9rects
[used_rects
- 1].x
;
543 r
->y
= arith2_get_number(&acoder
, avctx
->height
);
544 r
->w
= arith2_get_number(&acoder
, avctx
->width
- r
->x
) + 1;
545 r
->h
= arith2_get_number(&acoder
, avctx
->height
- r
->y
) + 1;
549 if (implicit_rect
&& used_rects
) {
550 av_log(avctx
, AV_LOG_ERROR
, "implicit_rect && used_rects > 0\n");
551 return AVERROR_INVALIDDATA
;
557 wmv9rects
[0].w
= avctx
->width
;
558 wmv9rects
[0].h
= avctx
->height
;
562 for (i
= 0; i
< used_rects
; i
++) {
563 if (!implicit_rect
&& arith2_get_bit(&acoder
)) {
564 av_log(avctx
, AV_LOG_ERROR
, "Unexpected grandchildren\n");
565 return AVERROR_INVALIDDATA
;
568 wmv9_mask
= arith2_get_bit(&acoder
) - 1;
570 wmv9_mask
= arith2_get_number(&acoder
, 256);
572 wmv9rects
[i
].coded
= arith2_get_number(&acoder
, 2);
575 buf
+= arith2_get_consumed_bytes(&acoder
);
576 buf_size
-= arith2_get_consumed_bytes(&acoder
);
578 return AVERROR_INVALIDDATA
;
582 if (keyframe
&& !is_555
) {
583 if ((i
= decode_pal_v2(c
, buf
, buf_size
)) < 0)
584 return AVERROR_INVALIDDATA
;
591 return AVERROR_INVALIDDATA
;
592 c
->mvX
= AV_RB16(buf
- 4) - avctx
->width
;
593 c
->mvY
= AV_RB16(buf
- 2) - avctx
->height
;
596 if (c
->mvX
< 0 || c
->mvY
< 0) {
597 FFSWAP(uint8_t *, c
->pal_pic
, c
->last_pal_pic
);
599 if ((ret
= ff_get_buffer(avctx
, frame
, AV_GET_BUFFER_FLAG_REF
)) < 0)
602 if (ctx
->last_pic
->data
[0]) {
603 av_assert0(frame
->linesize
[0] == ctx
->last_pic
->linesize
[0]);
604 c
->last_rgb_pic
= ctx
->last_pic
->data
[0] +
605 ctx
->last_pic
->linesize
[0] * (avctx
->height
- 1);
607 av_log(avctx
, AV_LOG_ERROR
, "Missing keyframe\n");
608 return AVERROR_INVALIDDATA
;
611 if ((ret
= ff_reget_buffer(avctx
, ctx
->last_pic
)) < 0)
613 if ((ret
= av_frame_ref(frame
, ctx
->last_pic
)) < 0)
616 c
->last_rgb_pic
= NULL
;
618 c
->rgb_pic
= frame
->data
[0] +
619 frame
->linesize
[0] * (avctx
->height
- 1);
620 c
->rgb_stride
= -frame
->linesize
[0];
622 frame
->key_frame
= keyframe
;
623 frame
->pict_type
= keyframe
? AV_PICTURE_TYPE_I
: AV_PICTURE_TYPE_P
;
626 bytestream2_init(&gB
, buf
, buf_size
);
628 if (decode_555(&gB
, (uint16_t *)c
->rgb_pic
, c
->rgb_stride
>> 1,
629 keyframe
, avctx
->width
, avctx
->height
))
630 return AVERROR_INVALIDDATA
;
632 buf_size
-= bytestream2_tell(&gB
);
636 ff_mss12_slicecontext_reset(&ctx
->sc
[0]);
638 ff_mss12_slicecontext_reset(&ctx
->sc
[1]);
641 if ((ret
= init_get_bits8(&gb
, buf
, buf_size
)) < 0)
643 if (ret
= decode_rle(&gb
, c
->pal_pic
, c
->pal_stride
,
644 c
->rgb_pic
, c
->rgb_stride
, c
->pal
, keyframe
,
645 ctx
->split_position
, 0,
646 avctx
->width
, avctx
->height
))
651 if (ret
= decode_rle(&gb
, c
->pal_pic
, c
->pal_stride
,
652 c
->rgb_pic
, c
->rgb_stride
, c
->pal
, keyframe
,
653 ctx
->split_position
, 1,
654 avctx
->width
, avctx
->height
))
658 buf
+= get_bits_count(&gb
) >> 3;
659 buf_size
-= get_bits_count(&gb
) >> 3;
660 } else if (!implicit_rect
|| wmv9_mask
!= -1) {
662 return AVERROR_INVALIDDATA
;
663 bytestream2_init(&gB
, buf
, buf_size
+ ARITH2_PADDING
);
664 arith2_init(&acoder
, &gB
);
665 c
->keyframe
= keyframe
;
666 if (c
->corrupted
= ff_mss12_decode_rect(&ctx
->sc
[0], &acoder
, 0, 0,
668 ctx
->split_position
))
669 return AVERROR_INVALIDDATA
;
671 buf
+= arith2_get_consumed_bytes(&acoder
);
672 buf_size
-= arith2_get_consumed_bytes(&acoder
);
673 if (c
->slice_split
) {
675 return AVERROR_INVALIDDATA
;
676 bytestream2_init(&gB
, buf
, buf_size
+ ARITH2_PADDING
);
677 arith2_init(&acoder
, &gB
);
678 if (c
->corrupted
= ff_mss12_decode_rect(&ctx
->sc
[1], &acoder
, 0,
681 avctx
->height
- ctx
->split_position
))
682 return AVERROR_INVALIDDATA
;
684 buf
+= arith2_get_consumed_bytes(&acoder
);
685 buf_size
-= arith2_get_consumed_bytes(&acoder
);
688 memset(c
->pal_pic
, 0, c
->pal_stride
* avctx
->height
);
692 for (i
= 0; i
< used_rects
; i
++) {
693 int x
= wmv9rects
[i
].x
;
694 int y
= wmv9rects
[i
].y
;
695 int w
= wmv9rects
[i
].w
;
696 int h
= wmv9rects
[i
].h
;
697 if (wmv9rects
[i
].coded
) {
698 int WMV9codedFrameSize
;
699 if (buf_size
< 4 || !(WMV9codedFrameSize
= AV_RL24(buf
)))
700 return AVERROR_INVALIDDATA
;
701 if (ret
= decode_wmv9(avctx
, buf
+ 3, buf_size
- 3,
702 x
, y
, w
, h
, wmv9_mask
))
704 buf
+= WMV9codedFrameSize
+ 3;
705 buf_size
-= WMV9codedFrameSize
+ 3;
707 uint8_t *dst
= c
->rgb_pic
+ y
* c
->rgb_stride
+ x
* 3;
708 if (wmv9_mask
!= -1) {
709 ctx
->dsp
.mss2_gray_fill_masked(dst
, c
->rgb_stride
,
711 c
->pal_pic
+ y
* c
->pal_stride
+ x
,
716 memset(dst
, 0x80, w
* 3);
717 dst
+= c
->rgb_stride
;
725 av_log(avctx
, AV_LOG_WARNING
, "buffer not fully consumed\n");
727 if (c
->mvX
< 0 || c
->mvY
< 0) {
728 av_frame_unref(ctx
->last_pic
);
729 ret
= av_frame_ref(ctx
->last_pic
, frame
);
739 static av_cold
int wmv9_init(AVCodecContext
*avctx
)
741 VC1Context
*v
= avctx
->priv_data
;
746 if ((ret
= ff_vc1_init_common(v
)) < 0)
748 ff_vc1dsp_init(&v
->vc1dsp
);
750 v
->profile
= PROFILE_MAIN
;
752 v
->zz_8x4
= ff_wmv2_scantableA
;
753 v
->zz_4x8
= ff_wmv2_scantableB
;
757 v
->frmrtq_postproc
= 7;
758 v
->bitrtq_postproc
= 31;
775 v
->resync_marker
= 0;
778 v
->s
.max_b_frames
= avctx
->max_b_frames
= 0;
779 v
->quantizer_mode
= 0;
785 ff_vc1_init_transposed_scantables(v
);
787 if ((ret
= ff_msmpeg4_decode_init(avctx
)) < 0 ||
788 (ret
= ff_vc1_decode_init_alloc_tables(v
)) < 0)
791 /* error concealment */
792 v
->s
.me
.qpel_put
= v
->s
.qdsp
.put_qpel_pixels_tab
;
793 v
->s
.me
.qpel_avg
= v
->s
.qdsp
.avg_qpel_pixels_tab
;
798 static av_cold
int mss2_decode_end(AVCodecContext
*avctx
)
800 MSS2Context
*const ctx
= avctx
->priv_data
;
802 av_frame_free(&ctx
->last_pic
);
804 ff_mss12_decode_end(&ctx
->c
);
805 av_freep(&ctx
->c
.pal_pic
);
806 av_freep(&ctx
->c
.last_pal_pic
);
807 ff_vc1_decode_end(avctx
);
812 static av_cold
int mss2_decode_init(AVCodecContext
*avctx
)
814 MSS2Context
* const ctx
= avctx
->priv_data
;
815 MSS12Context
*c
= &ctx
->c
;
818 if (ret
= ff_mss12_decode_init(c
, 1, &ctx
->sc
[0], &ctx
->sc
[1]))
820 ctx
->last_pic
= av_frame_alloc();
821 c
->pal_stride
= c
->mask_stride
;
822 c
->pal_pic
= av_mallocz(c
->pal_stride
* avctx
->height
);
823 c
->last_pal_pic
= av_mallocz(c
->pal_stride
* avctx
->height
);
824 if (!c
->pal_pic
|| !c
->last_pal_pic
|| !ctx
->last_pic
) {
825 mss2_decode_end(avctx
);
826 return AVERROR(ENOMEM
);
828 if (ret
= wmv9_init(avctx
)) {
829 mss2_decode_end(avctx
);
832 ff_mss2dsp_init(&ctx
->dsp
);
833 ff_qpeldsp_init(&ctx
->qdsp
);
835 avctx
->pix_fmt
= c
->free_colours
== 127 ? AV_PIX_FMT_RGB555
842 AVCodec ff_mss2_decoder
= {
844 .long_name
= NULL_IF_CONFIG_SMALL("MS Windows Media Video V9 Screen"),
845 .type
= AVMEDIA_TYPE_VIDEO
,
846 .id
= AV_CODEC_ID_MSS2
,
847 .priv_data_size
= sizeof(MSS2Context
),
848 .init
= mss2_decode_init
,
849 .close
= mss2_decode_end
,
850 .decode
= mss2_decode_frame
,
851 .capabilities
= CODEC_CAP_DR1
,