Lines Matching refs:o3
94 andcc %o1, 0xffffff80, %o3
101 andcc %o1, 0xffffff80, %o3
111 andcc %o1, 0xffffff80, %o3 ! num loop iterations
119 subcc %o3, 128, %o3 ! detract from loop iters
155 a, b, %o3; \
251 andcc %o3, 4, %g0
252 EX(ldd [%o0 + 0x00], %g2, and %o3, 0xf)
259 andcc %o3, 4, %g0
262 andcc %o3, 3, %o3
263 EX(ld [%o0 + 0x00], %g2, add %o3, 4)
268 andcc %o3, 3, %g0
271 addcc %o3, -1, %g0
273 subcc %o3, 2, %o3
276 2: EX(lduh [%o0 + 0x00], %o4, add %o3, 2)
294 srl %g1, 1, %o3
295 2: cmp %o3, 0
297 andcc %g1, 0xf, %o3
298 andcc %o3, %o0, %g0 ! Check %o0 only (%o1 has the same last 2 bits)
300 srl %o3, 1, %o3
349 5: CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x00,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
350 CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x20,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
351 CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x40,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
352 CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x60,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
362 andcc %g1, 0xf, %o3 ! get low bits of length (clears carry btw)
380 andcc %o3, 0xf, %g0 ! check for low bits set
382 andcc %o3, 8, %g0 ! begin checks for that code
385 ccdbl: CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x00,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
386 CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x20,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
387 CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x40,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
388 CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x60,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
497 sub %g1, %o2, %o3
502 andcc %g2, 15, %o3
506 add %o3, 1, %o3
507 and %o3, 14, %o3
508 sll %o3, 3, %o2
511 sub %g1, %g2, %o3
513 sub %o3, %o2, %o3
525 sub %o3, 16, %o3
530 add %o3, 0x70, %o3
537 sub %o3, %o2, %o3
541 add %g1, %g4, %o3