• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /macosx-10.5.8/xnu-1228.15.4/osfmk/i386/commpage/

Lines Matching refs:edx

53         movl    %edi,%edx
54 subl %esi,%edx // (dest - source)
55 cmpl %ecx,%edx // must move in reverse if (dest - source) < length
78 movl %edi,%edx
79 subl %esi,%edx // (dest - source)
80 cmpl %ecx,%edx // must move in reverse if (dest - source) < length
91 movl %ecx,%edx // copy length
102 andl $3,%edx // any leftover bytes?
109 dec %edx
130 movl %edi,%edx // copy destination
131 negl %edx
132 andl $15,%edx // get #bytes to align destination
134 subl %edx,%ecx // decrement length
140 dec %edx
148 // edx = -(length to move), a multiple of 64
153 movl %ecx,%edx // copy length
155 andl $-64,%edx // get number of bytes we will copy in inner loop
156 addl %edx,%esi // point to 1st byte not copied
157 addl %edx,%edi
158 negl %edx // now generate offset to 1st byte to be copied
168 movdqa (%esi,%edx),%xmm0
169 movdqa 16(%esi,%edx),%xmm1
170 movdqa 32(%esi,%edx),%xmm2
171 movdqa 48(%esi,%edx),%xmm3
173 movdqa %xmm0,(%edi,%edx)
174 movdqa %xmm1,16(%edi,%edx)
175 movdqa %xmm2,32(%edi,%edx)
176 movdqa %xmm3,48(%edi,%edx)
178 addl $64,%edx
188 movdqu (%esi,%edx),%xmm0
189 movdqu 16(%esi,%edx),%xmm1
190 movdqu 32(%esi,%edx),%xmm2
191 movdqu 48(%esi,%edx),%xmm3
193 movdqa %xmm0,(%edi,%edx)
194 movdqa %xmm1,16(%edi,%edx)
195 movdqa %xmm2,32(%edi,%edx)
196 movdqa %xmm3,48(%edi,%edx)
198 addl $64,%edx
221 movl %ecx,%edx // copy length
232 andl $3,%edx // bytes?
239 dec %edx
254 movl %edi,%edx // copy destination
255 andl $15,%edx // get #bytes to align destination
257 subl %edx,%ecx // adjust length
263 dec %edx
269 movl %ecx,%edx // copy length
271 andl $-64,%edx // get number of bytes we will copy in inner loop
272 subl %edx,%esi // point to endpoint of copy
273 subl %edx,%edi
278 movdqa -16(%esi,%edx),%xmm0
279 movdqa -32(%esi,%edx),%xmm1
280 movdqa -48(%esi,%edx),%xmm2
281 movdqa -64(%esi,%edx),%xmm3
283 movdqa %xmm0,-16(%edi,%edx)
284 movdqa %xmm1,-32(%edi,%edx)
285 movdqa %xmm2,-48(%edi,%edx)
286 movdqa %xmm3,-64(%edi,%edx)
288 subl $64,%edx
297 movdqu -16(%esi,%edx),%xmm0
298 movdqu -32(%esi,%edx),%xmm1
299 movdqu -48(%esi,%edx),%xmm2
300 movdqu -64(%esi,%edx),%xmm3
302 movdqa %xmm0,-16(%edi,%edx)
303 movdqa %xmm1,-32(%edi,%edx)
304 movdqa %xmm2,-48(%edi,%edx)
305 movdqa %xmm3,-64(%edi,%edx)
307 subl $64,%edx