Lines Matching refs:offset

17 #define MOVE_BIGCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \  argument
18 ldd [%src + (offset) + 0x00], %t0; \
19 ldd [%src + (offset) + 0x08], %t2; \
20 ldd [%src + (offset) + 0x10], %t4; \
21 ldd [%src + (offset) + 0x18], %t6; \
22 st %t0, [%dst + (offset) + 0x00]; \
23 st %t1, [%dst + (offset) + 0x04]; \
24 st %t2, [%dst + (offset) + 0x08]; \
25 st %t3, [%dst + (offset) + 0x0c]; \
26 st %t4, [%dst + (offset) + 0x10]; \
27 st %t5, [%dst + (offset) + 0x14]; \
28 st %t6, [%dst + (offset) + 0x18]; \
29 st %t7, [%dst + (offset) + 0x1c];
31 #define MOVE_BIGALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument
32 ldd [%src + (offset) + 0x00], %t0; \
33 ldd [%src + (offset) + 0x08], %t2; \
34 ldd [%src + (offset) + 0x10], %t4; \
35 ldd [%src + (offset) + 0x18], %t6; \
36 std %t0, [%dst + (offset) + 0x00]; \
37 std %t2, [%dst + (offset) + 0x08]; \
38 std %t4, [%dst + (offset) + 0x10]; \
39 std %t6, [%dst + (offset) + 0x18];
41 #define MOVE_LASTCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument
42 ldd [%src - (offset) - 0x10], %t0; \
43 ldd [%src - (offset) - 0x08], %t2; \
44 st %t0, [%dst - (offset) - 0x10]; \
45 st %t1, [%dst - (offset) - 0x0c]; \
46 st %t2, [%dst - (offset) - 0x08]; \
47 st %t3, [%dst - (offset) - 0x04];
49 #define MOVE_LASTALIGNCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument
50 ldd [%src - (offset) - 0x10], %t0; \
51 ldd [%src - (offset) - 0x08], %t2; \
52 std %t0, [%dst - (offset) - 0x10]; \
53 std %t2, [%dst - (offset) - 0x08];
55 #define MOVE_SHORTCHUNK(src, dst, offset, t0, t1) \ argument
56 ldub [%src - (offset) - 0x02], %t0; \
57 ldub [%src - (offset) - 0x01], %t1; \
58 stb %t0, [%dst - (offset) - 0x02]; \
59 stb %t1, [%dst - (offset) - 0x01];
62 #define RMOVE_BIGCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument
63 ldd [%src - (offset) - 0x20], %t0; \
64 ldd [%src - (offset) - 0x18], %t2; \
65 ldd [%src - (offset) - 0x10], %t4; \
66 ldd [%src - (offset) - 0x08], %t6; \
67 st %t0, [%dst - (offset) - 0x20]; \
68 st %t1, [%dst - (offset) - 0x1c]; \
69 st %t2, [%dst - (offset) - 0x18]; \
70 st %t3, [%dst - (offset) - 0x14]; \
71 st %t4, [%dst - (offset) - 0x10]; \
72 st %t5, [%dst - (offset) - 0x0c]; \
73 st %t6, [%dst - (offset) - 0x08]; \
74 st %t7, [%dst - (offset) - 0x04];
76 #define RMOVE_BIGALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument
77 ldd [%src - (offset) - 0x20], %t0; \
78 ldd [%src - (offset) - 0x18], %t2; \
79 ldd [%src - (offset) - 0x10], %t4; \
80 ldd [%src - (offset) - 0x08], %t6; \
81 std %t0, [%dst - (offset) - 0x20]; \
82 std %t2, [%dst - (offset) - 0x18]; \
83 std %t4, [%dst - (offset) - 0x10]; \
84 std %t6, [%dst - (offset) - 0x08];
86 #define RMOVE_LASTCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument
87 ldd [%src + (offset) + 0x00], %t0; \
88 ldd [%src + (offset) + 0x08], %t2; \
89 st %t0, [%dst + (offset) + 0x00]; \
90 st %t1, [%dst + (offset) + 0x04]; \
91 st %t2, [%dst + (offset) + 0x08]; \
92 st %t3, [%dst + (offset) + 0x0c];
94 #define RMOVE_SHORTCHUNK(src, dst, offset, t0, t1) \ argument
95 ldub [%src + (offset) + 0x00], %t0; \
96 ldub [%src + (offset) + 0x01], %t1; \
97 stb %t0, [%dst + (offset) + 0x00]; \
98 stb %t1, [%dst + (offset) + 0x01];
100 #define SMOVE_CHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, prev, shil, shir, offset2) \ argument
101 ldd [%src + (offset) + 0x00], %t0; \
102 ldd [%src + (offset) + 0x08], %t2; \
113 std %t4, [%dst + (offset) + (offset2) - 0x04]; \
114 std %t0, [%dst + (offset) + (offset2) + 0x04]; \
118 #define SMOVE_ALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, prev, shil, shir, offset2) \ argument
119 ldd [%src + (offset) + 0x00], %t0; \
120 ldd [%src + (offset) + 0x08], %t2; \
133 std %t0, [%dst + (offset) + (offset2) + 0x00]; \
134 std %t2, [%dst + (offset) + (offset2) + 0x08];