Searched refs:current_picture (Results 1 - 25 of 76) sorted by relevance

1234

/netgear-R7000-V1.0.7.12_1.2.5/ap/gpl/minidlna/ffmpeg-2.3.4/libavcodec/
H A Dh263.c52 s->current_picture.mbskip_table[mb_xy] = s->mb_skipped;
71 s->current_picture.ref_index[0][4*mb_xy ] =
72 s->current_picture.ref_index[0][4*mb_xy + 1] = s->field_select[0][0];
73 s->current_picture.ref_index[0][4*mb_xy + 2] =
74 s->current_picture.ref_index[0][4*mb_xy + 3] = s->field_select[0][1];
78 s->current_picture.motion_val[0][xy][0] = motion_x;
79 s->current_picture.motion_val[0][xy][1] = motion_y;
80 s->current_picture.motion_val[0][xy + 1][0] = motion_x;
81 s->current_picture.motion_val[0][xy + 1][1] = motion_y;
82 s->current_picture
[all...]
H A Dsvq1enc.h42 AVFrame *current_picture; member in struct:SVQ1EncContext
H A Dvc1dec.c355 s->current_picture.motion_val[1][s->block_index[i] + v->blocks_off][0] = mx;
356 s->current_picture.motion_val[1][s->block_index[i] + v->blocks_off][1] = my;
378 srcY = s->current_picture.f->data[0];
379 srcU = s->current_picture.f->data[1];
380 srcV = s->current_picture.f->data[2];
572 srcY = s->current_picture.f->data[0];
626 s->current_picture.motion_val[1][s->block_index[0] + v->blocks_off][0] = tx;
627 s->current_picture.motion_val[1][s->block_index[0] + v->blocks_off][1] = ty;
637 s->current_picture.motion_val[1][s->block_index[n] + v->blocks_off][0] = mx;
638 s->current_picture
[all...]
H A Dsvq1enc.c269 s->m.current_picture_ptr = &s->m.current_picture;
275 s->m.current_picture.f->linesize[0] = stride;
309 s->m.current_picture.mb_mean = (uint8_t *)s->dummy;
310 s->m.current_picture.mb_var = (uint16_t *)s->dummy;
311 s->m.current_picture.mc_mb_var = (uint16_t *)s->dummy;
312 s->m.current_picture.mb_type = s->dummy;
314 s->m.current_picture.motion_val[0] = s->motion_val8[plane] + 2;
506 av_frame_free(&s->current_picture);
523 s->current_picture = av_frame_alloc();
525 if (!avctx->coded_frame || !s->current_picture || !
[all...]
H A Ddxva2_h264.c48 const H264Picture *current_picture = h->cur_pic_ptr; local
54 ff_dxva2_get_surface_index(ctx, &current_picture->f),
126 current_picture->field_poc[0] != INT_MAX)
127 pp->CurrFieldOrderCnt[0] = current_picture->field_poc[0];
130 current_picture->field_poc[1] != INT_MAX)
131 pp->CurrFieldOrderCnt[1] = current_picture->field_poc[1];
297 const H264Picture *current_picture = h->cur_pic_ptr; local
298 struct dxva2_picture_context *ctx_pic = current_picture->hwaccel_picture_private;
417 const H264Picture *current_picture = h->cur_pic_ptr; local
418 struct dxva2_picture_context *ctx_pic = current_picture
[all...]
H A Dh261dec.c216 s->current_picture.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
425 s->current_picture.mb_type[xy] = MB_TYPE_INTRA;
432 s->current_picture.mb_type[xy] = MB_TYPE_16x16 | MB_TYPE_L0;
436 if (s->current_picture.motion_val[0]) {
439 s->current_picture.motion_val[0][b_xy][0] = s->mv[0][0][0];
440 s->current_picture.motion_val[0][b_xy][1] = s->mv[0][0][1];
621 s->current_picture.f->pict_type = s->pict_type;
622 s->current_picture.f->key_frame = s->pict_type == AV_PICTURE_TYPE_I;
645 av_assert0(s->current_picture.f->pict_type == s->current_picture_ptr->f->pict_type);
646 av_assert0(s->current_picture
[all...]
H A Dmpegvideo_xvmc.c45 struct xvmc_pix_fmt *render = (struct xvmc_pix_fmt*)s->current_picture.f->data[2];
90 struct xvmc_pix_fmt *last, *next, *render = (struct xvmc_pix_fmt*)s->current_picture.f->data[2];
159 struct xvmc_pix_fmt *render = (struct xvmc_pix_fmt*)s->current_picture.f->data[2];
198 s->current_picture.qscale_table[mb_xy] = s->qscale;
201 render = (struct xvmc_pix_fmt*)s->current_picture.f->data[2];
H A Dsnow.c498 s->current_picture = av_frame_alloc();
499 if (!s->mconly_picture || !s->current_picture)
635 av_free(s->halfpel_plane[s->max_ref_frames-1][1+i/3][i%3] - EDGE_WIDTH*(1+s->current_picture->linesize[i%3]));
645 if (s->current_picture->data[0] && !(s->avctx->flags&CODEC_FLAG_EMU_EDGE)) {
646 s->mpvencdsp.draw_edges(s->current_picture->data[0],
647 s->current_picture->linesize[0], w , h ,
649 if (s->current_picture->data[2]) {
650 s->mpvencdsp.draw_edges(s->current_picture->data[1],
651 s->current_picture->linesize[1], w>>s->chroma_h_shift, h>>s->chroma_v_shift,
653 s->mpvencdsp.draw_edges(s->current_picture
[all...]
H A Dmotion_est.c504 s->current_picture.motion_val[0][mot_xy ][0] = mx;
505 s->current_picture.motion_val[0][mot_xy ][1] = my;
506 s->current_picture.motion_val[0][mot_xy + 1][0] = mx;
507 s->current_picture.motion_val[0][mot_xy + 1][1] = my;
510 s->current_picture.motion_val[0][mot_xy ][0] = mx;
511 s->current_picture.motion_val[0][mot_xy ][1] = my;
512 s->current_picture.motion_val[0][mot_xy + 1][0] = mx;
513 s->current_picture.motion_val[0][mot_xy + 1][1] = my;
595 P_LEFT[0] = s->current_picture.motion_val[0][mot_xy - 1][0];
596 P_LEFT[1] = s->current_picture
[all...]
H A Dintrax8.c307 s->current_picture.f->linesize[chroma>0],
616 s->dest[chroma], s->current_picture.f->linesize[!!chroma]);
640 dsp_x8_put_solidcolor(w->predicted_dc, s->dest[chroma], s->current_picture.f->linesize[!!chroma]);
644 s->current_picture.f->linesize[!!chroma] );
648 s->current_picture.f->linesize[!!chroma],
659 int linesize = s->current_picture.f->linesize[!!chroma];
674 const int linesize = s->current_picture.f->linesize[0];
675 const int uvlinesize = s->current_picture.f->linesize[1];
677 s->dest[0] = s->current_picture.f->data[0];
678 s->dest[1] = s->current_picture
[all...]
H A Dituh263dec.c332 mot_val = s->current_picture.motion_val[0][s->block_index[0]];
338 s->current_picture.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
345 s->current_picture.mb_type[xy] = MB_TYPE_INTRA;
357 s->current_picture.mb_type[xy] = MB_TYPE_16x16 | MB_TYPE_L0;
375 s->current_picture.mb_type[xy] = MB_TYPE_8x8 | MB_TYPE_L0;
600 s->current_picture.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
633 s->current_picture.mb_type[xy] = MB_TYPE_16x16 | MB_TYPE_L0;
658 s->current_picture.mb_type[xy] = MB_TYPE_8x8 | MB_TYPE_L0;
686 int16_t *mot_val0 = s->current_picture.motion_val[0][2 * (s->mb_x + s->mb_y * stride)];
687 int16_t *mot_val1 = s->current_picture
[all...]
/netgear-R7000-V1.0.7.12_1.2.5/ap/gpl/iserver/libav-0.8.8/libavcodec/
H A Dh263.c55 s->current_picture.f.mbskip_table[mb_xy] = s->mb_skipped;
74 s->current_picture.f.ref_index[0][4*mb_xy ] =
75 s->current_picture.f.ref_index[0][4*mb_xy + 1] = s->field_select[0][0];
76 s->current_picture.f.ref_index[0][4*mb_xy + 2] =
77 s->current_picture.f.ref_index[0][4*mb_xy + 3] = s->field_select[0][1];
81 s->current_picture.f.motion_val[0][xy][0] = motion_x;
82 s->current_picture.f.motion_val[0][xy][1] = motion_y;
83 s->current_picture.f.motion_val[0][xy + 1][0] = motion_x;
84 s->current_picture.f.motion_val[0][xy + 1][1] = motion_y;
85 s->current_picture
[all...]
H A Derror_resilience.c45 s->dest[0] = s->current_picture.f.data[0] + (s->mb_y * 16 * s->linesize) + s->mb_x * 16;
46 s->dest[1] = s->current_picture.f.data[1] + (s->mb_y * (16 >> s->chroma_y_shift) * s->uvlinesize) + s->mb_x * (16 >> s->chroma_x_shift);
47 s->dest[2] = s->current_picture.f.data[2] + (s->mb_y * (16 >> s->chroma_y_shift) * s->uvlinesize) + s->mb_x * (16 >> s->chroma_x_shift);
60 fill_rectangle(&s->current_picture.f.ref_index[0][4 * h->mb_xy],
183 if (IS_INTER(s->current_picture.f.mb_type[mb_index]))
192 int intra_j = IS_INTRA(s->current_picture.f.mb_type[mb_index_j]);
204 int intra_j = IS_INTRA(s->current_picture.f.mb_type[mb_index_j]);
216 int intra_j = IS_INTRA(s->current_picture.f.mb_type[mb_index_j]);
229 int intra_j = IS_INTRA(s->current_picture.f.mb_type[mb_index_j]);
269 int left_intra = IS_INTRA(s->current_picture
[all...]
H A Dvc1dec.c350 s->current_picture.f.motion_val[1][s->block_index[0] + v->blocks_off][0] = mx;
351 s->current_picture.f.motion_val[1][s->block_index[0] + v->blocks_off][1] = my;
373 srcY = s->current_picture.f.data[0];
374 srcU = s->current_picture.f.data[1];
375 srcV = s->current_picture.f.data[2];
565 srcY = s->current_picture.f.data[0];
606 s->current_picture.f.motion_val[1][s->block_index[0] + v->blocks_off][0] = tx;
607 s->current_picture.f.motion_val[1][s->block_index[0] + v->blocks_off][1] = ty;
799 s->current_picture.f.motion_val[1][s->block_index[0] + v->blocks_off][0] = 0;
800 s->current_picture
[all...]
H A Dsnow.c565 av_free(s->halfpel_plane[s->max_ref_frames-1][1+i/3][i%3] - EDGE_WIDTH*(1+s->current_picture.linesize[i%3]));
574 if (s->current_picture.data[0] && !(s->avctx->flags&CODEC_FLAG_EMU_EDGE)) {
575 s->dsp.draw_edges(s->current_picture.data[0],
576 s->current_picture.linesize[0], w , h ,
578 s->dsp.draw_edges(s->current_picture.data[1],
579 s->current_picture.linesize[1], w>>1, h>>1,
581 s->dsp.draw_edges(s->current_picture.data[2],
582 s->current_picture.linesize[2], w>>1, h>>1,
591 if(USE_HALFPEL_PLANE && s->current_picture.data[0])
592 halfpel_interpol(s, s->halfpel_plane[0], &s->current_picture);
[all...]
H A Dh264_mvpred.h239 int8_t *ref = s->current_picture.f.ref_index[0];
240 int16_t (*mv)[2] = s->current_picture.f.motion_val[0];
353 const int left_mb_field_flag = IS_INTERLACED(s->current_picture.f.mb_type[mb_xy - 1]);
370 topleft_xy += s->mb_stride & (((s->current_picture.f.mb_type[top_xy - 1] >> 7) & 1) - 1);
371 topright_xy += s->mb_stride & (((s->current_picture.f.mb_type[top_xy + 1] >> 7) & 1) - 1);
372 top_xy += s->mb_stride & (((s->current_picture.f.mb_type[top_xy ] >> 7) & 1) - 1);
392 h->topleft_type = s->current_picture.f.mb_type[topleft_xy];
393 h->top_type = s->current_picture.f.mb_type[top_xy];
394 h->topright_type = s->current_picture.f.mb_type[topright_xy];
395 h->left_type[LTOP] = s->current_picture
[all...]
H A Ddxva2_h264.c48 const Picture *current_picture = s->current_picture_ptr; local
54 ff_dxva2_get_surface_index(ctx, current_picture),
123 current_picture->field_poc[0] != INT_MAX)
124 pp->CurrFieldOrderCnt[0] = current_picture->field_poc[0];
127 current_picture->field_poc[1] != INT_MAX)
128 pp->CurrFieldOrderCnt[1] = current_picture->field_poc[1];
282 const Picture *current_picture = h->s.current_picture_ptr; local
283 struct dxva2_picture_context *ctx_pic = current_picture->f.hwaccel_picture_private;
401 const Picture *current_picture = h->s.current_picture_ptr; local
402 struct dxva2_picture_context *ctx_pic = current_picture
[all...]
H A Dmpegvideo_xvmc.c44 struct xvmc_pix_fmt *render = (struct xvmc_pix_fmt*)s->current_picture.f.data[2];
76 struct xvmc_pix_fmt *last, *next, *render = (struct xvmc_pix_fmt*)s->current_picture.f.data[2];
144 struct xvmc_pix_fmt *render = (struct xvmc_pix_fmt*)s->current_picture.f.data[2];
182 s->current_picture.f.qscale_table[mb_xy] = s->qscale;
185 render = (struct xvmc_pix_fmt*)s->current_picture.f.data[2];
H A Dsvq1enc.c48 AVFrame current_picture; member in struct:SVQ1Context
281 s->m.current_picture_ptr= &s->m.current_picture;
287 s->m.current_picture.f.linesize[0] = stride;
314 s->m.current_picture.mb_mean= (uint8_t *)s->dummy;
315 s->m.current_picture.mb_var= (uint16_t*)s->dummy;
316 s->m.current_picture.mc_mb_var= (uint16_t*)s->dummy;
317 s->m.current_picture.f.mb_type = s->dummy;
319 s->m.current_picture.f.motion_val[0] = s->motion_val8[plane] + 2;
514 if(!s->current_picture.data[0]){
515 avctx->get_buffer(avctx, &s->current_picture);
[all...]
H A Dh264_loopfilter.c260 int mb_type = s->current_picture.f.mb_type[mb_xy];
261 int qp = s->current_picture.f.qscale_table[mb_xy];
262 int qp0 = s->current_picture.f.qscale_table[mb_xy - 1];
263 int qp1 = s->current_picture.f.qscale_table[h->top_mb_xy];
502 if (IS_INTRA(mb_type | s->current_picture.f.mb_type[mbn_xy])) {
505 if (!CABAC && IS_8x8DCT(s->current_picture.f.mb_type[mbn_xy])) {
520 qp = (s->current_picture.f.qscale_table[mb_xy] + s->current_picture.f.qscale_table[mbn_xy] + 1) >> 1;
524 chroma_qp_avg[0] = (h->chroma_qp[0] + get_chroma_qp(h, 0, s->current_picture.f.qscale_table[mbn_xy]) + 1) >> 1;
525 chroma_qp_avg[1] = (h->chroma_qp[1] + get_chroma_qp(h, 1, s->current_picture
[all...]
H A D4xm.c132 AVFrame current_picture, last_picture; member in struct:FourXContext
257 f->mv[i] = mv[i][0] + mv[i][1] *f->current_picture.linesize[0]/2;
259 f->mv[i] = (i&15) - 8 + ((i>>4)-8)*f->current_picture.linesize[0]/2;
380 uint16_t *dst= (uint16_t*)f->current_picture.data[0];
381 const int stride= f->current_picture.linesize[0]>>1;
481 int stride= f->current_picture.linesize[0]>>1;
483 uint16_t *dst = ((uint16_t*)f->current_picture.data[0]) + y * stride + x;
626 uint16_t *dst= (uint16_t*)f->current_picture.data[0];
627 const int stride= f->current_picture.linesize[0]>>1;
794 temp= f->current_picture;
[all...]
H A Dh261dec.c218 s->current_picture.f.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
326 s->current_picture.f.mb_type[xy] = MB_TYPE_INTRA;
333 s->current_picture.f.mb_type[xy] = MB_TYPE_16x16 | MB_TYPE_L0;
467 s->current_picture.f.pts = s->picture_number;
601 s->current_picture.f.pict_type = s->pict_type;
602 s->current_picture.f.key_frame = s->pict_type == AV_PICTURE_TYPE_I;
625 assert(s->current_picture.f.pict_type == s->current_picture_ptr->f.pict_type);
626 assert(s->current_picture.f.pict_type == s->pict_type);
H A Ddxva2_vc1.c40 const Picture *current_picture = s->current_picture_ptr; local
44 pp->wDeblockedPictureIndex = ff_dxva2_get_surface_index(ctx, current_picture);
233 const Picture *current_picture = v->s.current_picture_ptr; local
234 struct dxva2_picture_context *ctx_pic = current_picture->f.hwaccel_picture_private;
H A Dsvq3.c290 dest = s->current_picture.f.data[0] + x + y*s->linesize;
311 dest = s->current_picture.f.data[i] + (x >> 1) + (y >> 1) * s->uvlinesize;
427 fill_rectangle(s->current_picture.f.motion_val[dir][b_xy],
490 AV_COPY32(h->mv_cache[m][scan8[0] - 1 + i*8], s->current_picture.f.motion_val[m][b_xy - 1 + i*h->b_stride]);
498 memcpy(h->mv_cache[m][scan8[0] - 1*8], s->current_picture.f.motion_val[m][b_xy - h->b_stride], 4*2*sizeof(int16_t));
502 AV_COPY32(h->mv_cache[m][scan8[0] + 4 - 1*8], s->current_picture.f.motion_val[m][b_xy - h->b_stride + 4]);
509 AV_COPY32(h->mv_cache[m][scan8[0] - 1 - 1*8], s->current_picture.f.motion_val[m][b_xy - h->b_stride - 1]);
530 memset(s->current_picture.f.motion_val[0][b_xy + i*h->b_stride], 0, 4*2*sizeof(int16_t));
538 memset(s->current_picture.f.motion_val[1][b_xy + i*h->b_stride], 0, 4*2*sizeof(int16_t));
625 memset(s->current_picture
[all...]
H A Dintrax8.c307 s->current_picture.f.linesize[chroma>0],
616 s->dest[chroma], s->current_picture.f.linesize[!!chroma]);
640 dsp_x8_put_solidcolor(w->predicted_dc, s->dest[chroma], s->current_picture.f.linesize[!!chroma]);
644 s->current_picture.f.linesize[!!chroma] );
648 s->current_picture.f.linesize[!!chroma],
659 int linesize = s->current_picture.f.linesize[!!chroma];
674 const int linesize = s->current_picture.f.linesize[0];
675 const int uvlinesize = s->current_picture.f.linesize[1];
677 s->dest[0] = s->current_picture.f.data[0];
678 s->dest[1] = s->current_picture
[all...]

Completed in 159 milliseconds

1234