1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
|
.text
.align 3
.globl __gmpn_nand_n
.type __gmpn_nand_n,@function
__gmpn_nand_n:
lsr x18, x3, #2
tbz x3, #0, .Lbx0
.Lbx1: ldr x7, [x1]
ldr x11, [x2]
and x15, x7, x11
mvn x15, x15
str x15, [x0],#8
tbnz x3, #1, .Lb11
.Lb01: cbz x18, .Lret
ldp x4, x5, [x1,#8]
ldp x8, x9, [x2,#8]
sub x1, x1, #8
sub x2, x2, #8
b .Lmid
.Lb11: ldp x6, x7, [x1,#8]
ldp x10, x11, [x2,#8]
add x1, x1, #8
add x2, x2, #8
cbz x18, .Lend
b .Ltop
.Lbx0: tbnz x3, #1, .Lb10
.Lb00: ldp x4, x5, [x1],#-16
ldp x8, x9, [x2],#-16
b .Lmid
.Lb10: ldp x6, x7, [x1]
ldp x10, x11, [x2]
cbz x18, .Lend
.align 4
.Ltop: ldp x4, x5, [x1,#16]
ldp x8, x9, [x2,#16]
and x12, x6, x10
and x13, x7, x11
mvn x12, x12
mvn x13, x13
stp x12, x13, [x0],#16
.Lmid: ldp x6, x7, [x1,#32]!
ldp x10, x11, [x2,#32]!
and x12, x4, x8
and x13, x5, x9
mvn x12, x12
mvn x13, x13
stp x12, x13, [x0],#16
sub x18, x18, #1
cbnz x18, .Ltop
.Lend: and x12, x6, x10
and x13, x7, x11
mvn x12, x12
mvn x13, x13
stp x12, x13, [x0]
.Lret: ret
.size __gmpn_nand_n,.-__gmpn_nand_n
|