Searched refs:current_picture (Results 26 - 50 of 76) sorted by relevance

1234

/netgear-R7000-V1.0.7.12_1.2.5/ap/gpl/iserver/libav-0.8.8/libavcodec/
H A Dituh263dec.c355 mot_val = s->current_picture.f.motion_val[0][s->block_index[0]];
361 s->current_picture.f.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
368 s->current_picture.f.mb_type[xy] = MB_TYPE_INTRA;
380 s->current_picture.f.mb_type[xy] = MB_TYPE_16x16 | MB_TYPE_L0;
398 s->current_picture.f.mb_type[xy] = MB_TYPE_8x8 | MB_TYPE_L0;
620 s->current_picture.f.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
653 s->current_picture.f.mb_type[xy] = MB_TYPE_16x16 | MB_TYPE_L0;
678 s->current_picture.f.mb_type[xy] = MB_TYPE_8x8 | MB_TYPE_L0;
706 int16_t *mot_val0 = s->current_picture.f.motion_val[0][2 * (s->mb_x + s->mb_y * stride)];
707 int16_t *mot_val1 = s->current_picture
[all...]
H A Dmpegvideo_common.h258 linesize = s->current_picture.f.linesize[0] << field_based;
259 uvlinesize = s->current_picture.f.linesize[1] << field_based;
660 memcpy(mv_cache[1][1], s->current_picture.f.motion_val[0][mot_xy ], sizeof(int16_t) * 4);
661 memcpy(mv_cache[2][1], s->current_picture.f.motion_val[0][mot_xy + mot_stride], sizeof(int16_t) * 4);
662 memcpy(mv_cache[3][1], s->current_picture.f.motion_val[0][mot_xy + mot_stride], sizeof(int16_t) * 4);
664 if (mb_y == 0 || IS_INTRA(s->current_picture.f.mb_type[xy - s->mb_stride])) {
667 memcpy(mv_cache[0][1], s->current_picture.f.motion_val[0][mot_xy - mot_stride], sizeof(int16_t) * 4);
670 if (mb_x == 0 || IS_INTRA(s->current_picture.f.mb_type[xy - 1])) {
674 AV_COPY32(mv_cache[1][0], s->current_picture.f.motion_val[0][mot_xy - 1]);
675 AV_COPY32(mv_cache[2][0], s->current_picture
[all...]
H A Dmpeg4video_parser.c77 s->current_picture_ptr = &s->current_picture;
H A Dsnowenc.c219 avctx->coded_frame= &s->current_picture;
326 const int stride= s->current_picture.linesize[0];
327 const int uvstride= s->current_picture.linesize[1];
583 const int ref_stride= s->current_picture.linesize[plane_index];
674 const int ref_stride= s->current_picture.linesize[plane_index];
675 uint8_t *dst= s->current_picture.data[plane_index];
774 const int ref_stride= s->current_picture.linesize[plane_index];
775 uint8_t *dst= s->current_picture.data[plane_index];
1120 uint8_t *dst= s->current_picture.data[0];
1121 const int stride= s->current_picture
[all...]
H A Dmotion_est.c512 s->current_picture.f.motion_val[0][mot_xy ][0] = mx;
513 s->current_picture.f.motion_val[0][mot_xy ][1] = my;
514 s->current_picture.f.motion_val[0][mot_xy + 1][0] = mx;
515 s->current_picture.f.motion_val[0][mot_xy + 1][1] = my;
518 s->current_picture.f.motion_val[0][mot_xy ][0] = mx;
519 s->current_picture.f.motion_val[0][mot_xy ][1] = my;
520 s->current_picture.f.motion_val[0][mot_xy + 1][0] = mx;
521 s->current_picture.f.motion_val[0][mot_xy + 1][1] = my;
594 P_LEFT[0] = s->current_picture.f.motion_val[0][mot_xy - 1][0];
595 P_LEFT[1] = s->current_picture
[all...]
H A Dmpeg4videodec.c58 int8_t * const qscale_table = s->current_picture.f.qscale_table;
573 s->current_picture.f.mb_type[xy] = MB_TYPE_INTRA;
579 s->current_picture.f.qscale_table[xy]= s->qscale;
595 int16_t * const mot_val = s->current_picture.f.motion_val[0][s->block_index[0]];
607 s->current_picture.f.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_GMC | MB_TYPE_L0;
611 s->current_picture.f.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
637 s->current_picture.f.mb_type[xy] = MB_TYPE_INTRA;
663 s->current_picture.f.mb_type[xy] = MB_TYPE_16x16 | MB_TYPE_L0;
667 s->current_picture.f.mb_type[xy] = MB_TYPE_16x16 | MB_TYPE_GMC | MB_TYPE_L0;
676 s->current_picture
[all...]
H A Dwmv2dec.c260 mot_val = s->current_picture.f.motion_val[0][xy];
262 A = s->current_picture.f.motion_val[0][xy - 1];
263 B = s->current_picture.f.motion_val[0][xy - wrap];
264 C = s->current_picture.f.motion_val[0][xy + 2 - wrap];
346 if (IS_SKIP(s->current_picture.f.mb_type[s->mb_y * s->mb_stride + s->mb_x])) {
H A Dpngdec.c40 AVFrame *current_picture, *last_picture; member in struct:PNGDecContext
397 FFSWAP(AVFrame *, s->current_picture, s->last_picture);
398 avctx->coded_frame= s->current_picture;
399 p = s->current_picture;
599 uint8_t *pd = s->current_picture->data[0];
612 *picture= *s->current_picture;
631 s->current_picture = &s->picture1;
H A Dmpegvideo.c734 s->avctx->coded_frame = (AVFrame *)&s->current_picture;
1243 ff_copy_picture(&s->current_picture, s->current_picture_ptr);
1314 s->current_picture.f.data[i] +=
1315 s->current_picture.f.linesize[i];
1317 s->current_picture.f.linesize[i] *= 2;
1363 s->current_picture.f.reference &&
1368 s->dsp.draw_edges(s->current_picture.f.data[0], s->linesize,
1372 s->dsp.draw_edges(s->current_picture.f.data[1], s->uvlinesize,
1376 s->dsp.draw_edges(s->current_picture.f.data[2], s->uvlinesize,
1390 /* copy back current_picture variable
[all...]
H A Ddxva2_mpeg2.c41 const Picture *current_picture = s->current_picture_ptr; local
45 pp->wDecodedPictureIndex = ff_dxva2_get_surface_index(ctx, current_picture);
H A Dsnowdec.c47 int ref_stride= s->current_picture.linesize[plane_index];
48 uint8_t *dst8= s->current_picture.data[plane_index];
397 s->current_picture.pict_type= AV_PICTURE_TYPE_I; //FIXME I vs. P
437 int v= s->current_picture.data[plane_index][y*s->current_picture.linesize[plane_index] + x];
535 *picture= s->current_picture;
H A Dh264.c267 if (ref->f.thread_opaque != s->current_picture.f.thread_opaque ||
279 if (ref->f.thread_opaque != s->current_picture.f.thread_opaque ||
296 const int mb_type = s->current_picture.f.mb_type[mb_xy];
726 const int mb_type = s->current_picture.f.mb_type[mb_xy];
1318 // s->decode = (s->flags & CODEC_FLAG_PSNR) || !s->encoding || s->current_picture.f.reference /*|| h->contains_intra*/ || 1;
1901 const int mb_type = s->current_picture.f.mb_type[mb_xy];
1913 dest_y = s->current_picture.f.data[0] + ((mb_x << pixel_shift) + mb_y * s->linesize ) * 16;
1914 dest_cb = s->current_picture.f.data[1] + (mb_x << pixel_shift)*8 + mb_y * s->uvlinesize * block_h;
1915 dest_cr = s->current_picture.f.data[2] + (mb_x << pixel_shift)*8 + mb_y * s->uvlinesize * block_h;
2110 const int mb_type = s->current_picture
[all...]
H A Dh264_cabac.c1289 ctx += h->mb_field_decoding_flag & !!s->mb_x; //for FMO:(s->current_picture.f.mb_type[mba_xy] >> 7) & (h->slice_table[mba_xy] == h->slice_num);
1290 ctx += (s->current_picture.f.mb_type[mbb_xy] >> 7) & (h->slice_table[mbb_xy] == h->slice_num);
1335 && MB_FIELD == !!IS_INTERLACED( s->current_picture.f.mb_type[mba_xy] ) )
1341 && IS_INTERLACED( s->current_picture.f.mb_type[mbb_xy] ) )
1351 if( h->slice_table[mba_xy] == h->slice_num && !IS_SKIP( s->current_picture.f.mb_type[mba_xy] ))
1353 if( h->slice_table[mbb_xy] == h->slice_num && !IS_SKIP( s->current_picture.f.mb_type[mbb_xy] ))
1891 s->current_picture.f.mb_type[mb_xy] = MB_TYPE_SKIP;
2009 s->current_picture.f.qscale_table[mb_xy] = 0;
2012 s->current_picture.f.mb_type[mb_xy] = mb_type;
2309 s->current_picture
[all...]
H A Dmpegvideo_enc.c178 * init s->current_picture.qscale_table from s->lambda_table
182 int8_t * const qscale_table = s->current_picture.f.qscale_table;
246 COPY(current_picture);
1424 ff_copy_picture(&s->current_picture, s->current_picture_ptr);
1466 // s->current_picture.quality, s->qscale);
1530 s->current_picture_ptr->f.error[i] = s->current_picture.f.error[i];
1928 if (s->current_picture.mc_mb_var[s->mb_stride * mb_y + mb_x] <
2322 s->current_picture.mb_var [s->mb_stride * mb_y + mb_x] = varc;
2323 s->current_picture.mb_mean[s->mb_stride * mb_y + mb_x] = (sum+128)>>8;
2383 s->current_picture
[all...]
/netgear-R7000-V1.0.7.12_1.2.5/ap/gpl/minidlna/ffmpeg-2.3.4/libavcodec/
H A Dmpegvideo_enc.c178 * init s->current_picture.qscale_table from s->lambda_table
182 int8_t * const qscale_table = s->current_picture.qscale_table;
198 COPY(current_picture);
843 s->avctx->coded_frame = s->current_picture.f;
1532 ff_mpeg_unref_picture(s, &s->current_picture);
1533 if ((ret = ff_mpeg_ref_picture(s, &s->current_picture,
1547 s->current_picture.reference &&
1552 s->mpvencdsp.draw_edges(s->current_picture.f->data[0],
1553 s->current_picture.f->linesize[0],
1557 s->mpvencdsp.draw_edges(s->current_picture
[all...]
H A Ddxva2_vc1.c41 const Picture *current_picture = s->current_picture_ptr; local
54 pp->wDeblockedPictureIndex = ff_dxva2_get_surface_index(ctx, current_picture->f);
258 const Picture *current_picture = v->s.current_picture_ptr; local
259 struct dxva2_picture_context *ctx_pic = current_picture->hwaccel_picture_private;
H A Dmpeg4videodec.c63 int8_t *const qscale_table = s->current_picture.qscale_table;
624 s->current_picture.mb_type[xy] = MB_TYPE_INTRA;
630 s->current_picture.qscale_table[xy] = s->qscale;
648 int16_t *const mot_val = s->current_picture.motion_val[0][s->block_index[0]];
661 s->current_picture.mb_type[xy] = MB_TYPE_SKIP |
668 s->current_picture.mb_type[xy] = MB_TYPE_SKIP |
701 s->current_picture.mb_type[xy] = MB_TYPE_INTRA;
734 s->current_picture.mb_type[xy] = MB_TYPE_16x16 |
739 s->current_picture.mb_type[xy] = MB_TYPE_16x16 |
754 s->current_picture
[all...]
H A Dsnowenc.c238 const int stride= s->current_picture->linesize[0];
239 const int uvstride= s->current_picture->linesize[1];
503 const int ref_stride= s->current_picture->linesize[plane_index];
597 const int ref_stride= s->current_picture->linesize[plane_index];
598 uint8_t *dst= s->current_picture->data[plane_index];
700 const int ref_stride= s->current_picture->linesize[plane_index];
701 uint8_t *dst= s->current_picture->data[plane_index];
1057 uint8_t *dst= s->current_picture->data[0];
1058 const int stride= s->current_picture->linesize[0];
1504 s->m.current_picture
[all...]
H A Dmsmpeg4.c279 dest= s->current_picture.f->data[0] + (((n >> 1) + 2*s->mb_y) * bs* wrap ) + ((n & 1) + 2*s->mb_x) * bs;
282 dest= s->current_picture.f->data[n - 3] + (s->mb_y * bs * wrap) + s->mb_x * bs;
H A Dwmv2dec.c258 mot_val = s->current_picture.motion_val[0][xy];
260 A = s->current_picture.motion_val[0][xy - 1];
261 B = s->current_picture.motion_val[0][xy - wrap];
262 C = s->current_picture.motion_val[0][xy + 2 - wrap];
343 if (IS_SKIP(s->current_picture.mb_type[s->mb_y * s->mb_stride + s->mb_x])) {
H A Dmpegvideo.c358 s->dest[0] = s->current_picture.f->data[0] + (s->mb_y * 16 * s->linesize) + s->mb_x * 16;
359 s->dest[1] = s->current_picture.f->data[1] + (s->mb_y * (16 >> s->chroma_y_shift) * s->uvlinesize) + s->mb_x * (16 >> s->chroma_x_shift);
360 s->dest[2] = s->current_picture.f->data[2] + (s->mb_y * (16 >> s->chroma_y_shift) * s->uvlinesize) + s->mb_x * (16 >> s->chroma_x_shift);
1003 UPDATE_PICTURE(current_picture);
1326 memset(&s->current_picture, 0, sizeof(s->current_picture));
1334 s->current_picture.f = av_frame_alloc();
1335 if (!s->current_picture.f)
1543 ff_free_picture_tables(&s->current_picture);
1544 ff_mpeg_unref_picture(s, &s->current_picture);
[all...]
H A Dmpeg4video_parser.c86 s->current_picture_ptr = &s->current_picture;
H A Ddxva2_mpeg2.c43 const Picture *current_picture = s->current_picture_ptr; local
47 pp->wDecodedPictureIndex = ff_dxva2_get_surface_index(ctx, current_picture->f);
H A Dsnowdec.c46 int ref_stride= s->current_picture->linesize[plane_index];
47 uint8_t *dst8= s->current_picture->data[plane_index];
420 s->current_picture->pict_type= AV_PICTURE_TYPE_I; //FIXME I vs. P
472 int v= s->current_picture->data[plane_index][y*s->current_picture->linesize[plane_index] + x];
571 res = av_frame_ref(picture, s->current_picture);
H A Ddiracdec.c215 DiracFrame *current_picture; member in struct:DiracContext
1605 uint8_t *frame = s->current_picture->avframe->data[comp];
1703 picnum = s->current_picture->avframe->display_picture_number = get_bits_long(gb, 32);
1741 if (s->current_picture->avframe->reference) {
1753 while (add_frame(s->ref_frames, MAX_REFERENCE_FRAMES, s->current_picture)) {
1877 s->current_picture = pic;
1912 s->current_picture = NULL;
1950 if (!s->current_picture)
1953 if (s->current_picture->avframe->display_picture_number > s->frame_number) {
1956 s->current_picture
[all...]

Completed in 264 milliseconds

1234