Lines Matching defs:src_reloc

2891 	struct radeon_cs_reloc *src_reloc, *dst_reloc, *dst2_reloc;
2940 r = r600_dma_cs_next_reloc(p, &src_reloc);
2971 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
2973 (uintmax_t)src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
2988 ib[idx+8] += (u32)(src_reloc->lobj.gpu_offset & 0xfffffffc);
2989 ib[idx+9] += upper_32_bits(src_reloc->lobj.gpu_offset) & 0xff;
3001 ib[idx+1] += (u32)(src_reloc->lobj.gpu_offset >> 8);
3007 ib[idx+7] += (u32)(src_reloc->lobj.gpu_offset & 0xfffffffc);
3008 ib[idx+8] += upper_32_bits(src_reloc->lobj.gpu_offset) & 0xff;
3031 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
3033 (uintmax_t)src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
3048 ib[idx+8] += (u32)(src_reloc->lobj.gpu_offset & 0xfffffffc);
3049 ib[idx+9] += upper_32_bits(src_reloc->lobj.gpu_offset) & 0xff;
3059 ib[idx+1] += (u32)(src_reloc->lobj.gpu_offset >> 8);
3069 ib[idx+7] += (u32)(src_reloc->lobj.gpu_offset & 0xfffffffc);
3070 ib[idx+8] += upper_32_bits(src_reloc->lobj.gpu_offset) & 0xff;
3076 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
3078 (uintmax_t)src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
3094 ib[idx+1] += (u32)(src_reloc->lobj.gpu_offset >> 8);
3115 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
3117 (uintmax_t)src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
3132 ib[idx+8] += (u32)(src_reloc->lobj.gpu_offset & 0xfffffffc);
3133 ib[idx+9] += upper_32_bits(src_reloc->lobj.gpu_offset) & 0xff;
3148 ib[idx+1] += (u32)(src_reloc->lobj.gpu_offset >> 8);
3158 ib[idx+7] += (u32)(src_reloc->lobj.gpu_offset & 0xfffffffc);
3159 ib[idx+8] += upper_32_bits(src_reloc->lobj.gpu_offset) & 0xff;
3165 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
3167 (uintmax_t)src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
3191 if ((src_offset + count) > radeon_bo_size(src_reloc->robj)) {
3193 (uintmax_t)src_offset + count, radeon_bo_size(src_reloc->robj));
3202 ib[idx+2] += (u32)(src_reloc->lobj.gpu_offset & 0xffffffff);
3204 ib[idx+4] += upper_32_bits(src_reloc->lobj.gpu_offset) & 0xff;
3213 ib[idx+1] += (u32)(src_reloc->lobj.gpu_offset & 0xffffffff);
3214 ib[idx+2] += upper_32_bits(src_reloc->lobj.gpu_offset) & 0xff;
3233 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
3235 (uintmax_t)src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
3250 ib[idx+3] += (u32)(src_reloc->lobj.gpu_offset & 0xfffffffc);
3253 ib[idx+6] += upper_32_bits(src_reloc->lobj.gpu_offset) & 0xff;
3266 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
3268 (uintmax_t)src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
3277 ib[idx+2] += (u32)(src_reloc->lobj.gpu_offset & 0xfffffffc);
3279 ib[idx+4] += upper_32_bits(src_reloc->lobj.gpu_offset) & 0xff;