1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
|
#if defined __thumb__
#include "../../string/memcpy.c"
#else
#include <string.h>
#include "xscale.h"
void *
memcpy (void *dst0, const void *src0, size_t len)
{
int dummy;
asm volatile (
#ifndef __OPTIMIZE_SIZE__
"cmp %2, #0x3
bls 3f
and lr, %1, #0x3
and r3, %0, #0x3
cmp lr, r3
bne 3f
cmp lr, #0x0
beq 2f
b 1f
0:
ldrb r3, [%1], #1
"
PRELOADSTR ("%1")
"
tst %1, #0x3
strb r3, [%0], #1
beq 3f
1:
sub %2, %2, #1
cmn %2, #1
bne 0b
2:
cmp %2, #0xf
bls 1f
0:
ldmia %1!, { r3, r4, r5, lr }
"
PRELOADSTR ("%1")
"
sub %2, %2, #16
cmp %2, #0xf
stmia %0!, { r3, r4, r5, lr }
bhi 0b
1:
cmp %2, #0x7
bls 1f
0:
ldmia %1!, { r3, r4 }
"
PRELOADSTR ("%1")
"
sub %2, %2, #8
cmp %2, #0x7
stmia %0!, { r3, r4 }
bhi 0b
1:
cmp %2, #0x3
bls 3f
0:
sub %2, %2, #4
ldr r3, [%1], #4
"
PRELOADSTR ("%1")
"
cmp %2, #0x3
str r3, [%0], #4
bhi 0b
"
#endif /* !__OPTIMIZE_SIZE__ */
"
3:
"
PRELOADSTR ("%1")
"
sub %2, %2, #1
cmn %2, #1
beq 1f
0:
sub %2, %2, #1
ldrb r3, [%1], #1
"
PRELOADSTR ("%1")
"
cmn %2, #1
strb r3, [%0], #1
bne 0b
1:"
: "=&r" (dummy), "=&r" (src0), "=&r" (len)
: "0" (dst0), "1" (src0), "2" (len)
: "memory", "lr", "r3", "r4", "r5", "cc");
return dst0;
}
#endif
|