summaryrefslogtreecommitdiffstats
path: root/arch/arc/lib/strchr-700.S
blob: 9c548c7cf0014e1ce9c0823026039d60365b6192 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
/*
 * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 as
 * published by the Free Software Foundation.
 */

/* ARC700 has a relatively long pipeline and branch prediction, so we want
   to avoid branches that are hard to predict.  On the other hand, the
   presence of the norm instruction makes it easier to operate on whole
   words branch-free.  */

#include <asm/linkage.h>

ARC_ENTRY strchr
	extb_s	r1,r1
	asl	r5,r1,8
	bmsk	r2,r0,1
	or	r5,r5,r1
	mov_s	r3,0x01010101
	breq.d	r2,r0,.Laligned
	asl	r4,r5,16
	sub_s	r0,r0,r2
	asl	r7,r2,3
	ld_s	r2,[r0]
#ifdef __LITTLE_ENDIAN__
	asl	r7,r3,r7
#else
	lsr	r7,r3,r7
#endif
	or	r5,r5,r4
	ror	r4,r3
	sub	r12,r2,r7
	bic_s	r12,r12,r2
	and	r12,r12,r4
	brne.d	r12,0,.Lfound0_ua
	xor	r6,r2,r5
	ld.a	r2,[r0,4]
	sub	r12,r6,r7
	bic	r12,r12,r6
#ifdef __LITTLE_ENDIAN__
	and	r7,r12,r4
	breq	r7,0,.Loop ; For speed, we want this branch to be unaligned.
	b	.Lfound_char ; Likewise this one.
#else
	and	r12,r12,r4
	breq	r12,0,.Loop ; For speed, we want this branch to be unaligned.
	lsr_s	r12,r12,7
	bic 	r2,r7,r6
	b.d	.Lfound_char_b
	and_s	r2,r2,r12
#endif
; /* We require this code address to be unaligned for speed...  */
.Laligned:
	ld_s	r2,[r0]
	or	r5,r5,r4
	ror	r4,r3
; /* ... so that this code address is aligned, for itself and ...  */
.Loop:
	sub	r12,r2,r3
	bic_s	r12,r12,r2
	and	r12,r12,r4
	brne.d	r12,0,.Lfound0
	xor	r6,r2,r5
	ld.a	r2,[r0,4]
	sub	r12,r6,r3
	bic	r12,r12,r6
	and	r7,r12,r4
	breq	r7,0,.Loop /* ... so that this branch is unaligned.  */
	; Found searched-for character.  r0 has already advanced to next word.
#ifdef __LITTLE_ENDIAN__
/* We only need the information about the first matching byte
   (i.e. the least significant matching byte) to be exact,
   hence there is no problem with carry effects.  */
.Lfound_char:
	sub	r3,r7,1
	bic	r3,r3,r7
	norm	r2,r3
	sub_s	r0,r0,1
	asr_s	r2,r2,3
	j.d	[blink]
	sub_s	r0,r0,r2

	.balign	4
.Lfound0_ua:
	mov	r3,r7
.Lfound0:
	sub	r3,r6,r3
	bic	r3,r3,r6
	and	r2,r3,r4
	or_s	r12,r12,r2
	sub_s	r3,r12,1
	bic_s	r3,r3,r12
	norm	r3,r3
	add_s	r0,r0,3
	asr_s	r12,r3,3
	asl.f	0,r2,r3
	sub_s	r0,r0,r12
	j_s.d	[blink]
	mov.pl	r0,0
#else /* BIG ENDIAN */
.Lfound_char:
	lsr	r7,r7,7

	bic	r2,r7,r6
.Lfound_char_b:
	norm	r2,r2
	sub_s	r0,r0,4
	asr_s	r2,r2,3
	j.d	[blink]
	add_s	r0,r0,r2

.Lfound0_ua:
	mov_s	r3,r7
.Lfound0:
	asl_s	r2,r2,7
	or	r7,r6,r4
	bic_s	r12,r12,r2
	sub	r2,r7,r3
	or	r2,r2,r6
	bic	r12,r2,r12
	bic.f	r3,r4,r12
	norm	r3,r3

	add.pl	r3,r3,1
	asr_s	r12,r3,3
	asl.f	0,r2,r3
	add_s	r0,r0,r12
	j_s.d	[blink]
	mov.mi	r0,0
#endif /* ENDIAN */
ARC_EXIT strchr