1 
   2 
   3 
   4 #include <linux/linkage.h>
   5 #include "sysdep.h"
   6 
   7         .weak memmove
   8 ENTRY(__memmove)
   9 ENTRY(memmove)
  10         subu    r3, r0, r1
  11         cmphs   r3, r2
  12         bt      memcpy
  13 
  14         mov     r12, r0
  15         addu    r0, r0, r2
  16         addu    r1, r1, r2
  17 
  18         
  19         cmplti  r2, 4
  20         bt      .L_copy_by_byte
  21 
  22         andi    r13, r0, 3
  23         
  24         bnez    r13, .L_dest_not_aligned
  25         
  26 .L_dest_aligned:
  27         
  28         zext    r18, r2, 31, 4
  29         
  30         bez     r18, .L_len_less_16bytes
  31         movi    r19, 0
  32 
  33         
  34         LABLE_ALIGN
  35 .L_len_larger_16bytes:
  36         subi    r1, 16
  37         subi    r0, 16
  38 #if defined(__CK860__)
  39         ldw     r3, (r1, 12)
  40         stw     r3, (r0, 12)
  41         ldw     r3, (r1, 8)
  42         stw     r3, (r0, 8)
  43         ldw     r3, (r1, 4)
  44         stw     r3, (r0, 4)
  45         ldw     r3, (r1, 0)
  46         stw     r3, (r0, 0)
  47 #else
  48         ldw     r20, (r1, 0)
  49         ldw     r21, (r1, 4)
  50         ldw     r22, (r1, 8)
  51         ldw     r23, (r1, 12)
  52         stw     r20, (r0, 0)
  53         stw     r21, (r0, 4)
  54         stw     r22, (r0, 8)
  55         stw     r23, (r0, 12)
  56         PRE_BNEZAD (r18)
  57 #endif
  58         BNEZAD (r18, .L_len_larger_16bytes)
  59 
  60 .L_len_less_16bytes:
  61         zext    r18, r2, 3, 2
  62         bez     r18, .L_copy_by_byte
  63 .L_len_less_16bytes_loop:
  64         subi    r1, 4
  65         subi    r0, 4
  66         ldw     r3, (r1, 0)
  67         PRE_BNEZAD (r18)
  68         stw     r3, (r0, 0)
  69         BNEZAD (r18, .L_len_less_16bytes_loop)
  70 
  71         
  72 .L_copy_by_byte:
  73         zext    r18, r2, 1, 0
  74         bez     r18, .L_return
  75 .L_copy_by_byte_loop:
  76         subi    r1, 1
  77         subi    r0, 1
  78         ldb     r3, (r1, 0)
  79         PRE_BNEZAD (r18)
  80         stb     r3, (r0, 0)
  81         BNEZAD (r18, .L_copy_by_byte_loop)
  82 
  83 .L_return:
  84         mov     r0, r12
  85         rts
  86 
  87         
  88 
  89 .L_dest_not_aligned:
  90         sub     r2, r13
  91 .L_dest_not_aligned_loop:
  92         subi    r1, 1
  93         subi    r0, 1
  94         
  95         ldb     r3, (r1, 0)
  96         PRE_BNEZAD (r13)
  97         stb     r3, (r0, 0)
  98         BNEZAD (r13, .L_dest_not_aligned_loop)
  99         cmplti  r2, 4
 100         bt      .L_copy_by_byte
 101         
 102         jbr     .L_dest_aligned
 103 ENDPROC(memmove)
 104 ENDPROC(__memmove)