summaryrefslogtreecommitdiff
path: root/vere/ext/gmp/gen/x86_64-windows/mpn/lshiftc.s
diff options
context:
space:
mode:
authorpolwex <polwex@sortug.com>2025-10-05 21:56:51 +0700
committerpolwex <polwex@sortug.com>2025-10-05 21:56:51 +0700
commitfcedfddf00b3f994e4f4e40332ac7fc192c63244 (patch)
tree51d38e62c7bdfcc5f9a5e9435fe820c93cfc9a3d /vere/ext/gmp/gen/x86_64-windows/mpn/lshiftc.s
claude is gud
Diffstat (limited to 'vere/ext/gmp/gen/x86_64-windows/mpn/lshiftc.s')
-rw-r--r--vere/ext/gmp/gen/x86_64-windows/mpn/lshiftc.s207
1 files changed, 207 insertions, 0 deletions
diff --git a/vere/ext/gmp/gen/x86_64-windows/mpn/lshiftc.s b/vere/ext/gmp/gen/x86_64-windows/mpn/lshiftc.s
new file mode 100644
index 0000000..192e440
--- /dev/null
+++ b/vere/ext/gmp/gen/x86_64-windows/mpn/lshiftc.s
@@ -0,0 +1,207 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ .text
+ .align 32, 0x90
+ .globl __gmpn_lshiftc
+
+ .def __gmpn_lshiftc
+ .scl 2
+ .type 32
+ .endef
+__gmpn_lshiftc:
+
+ push %rdi
+ push %rsi
+ mov %rcx, %rdi
+ mov %rdx, %rsi
+ mov %r8, %rdx
+ mov %r9, %rcx
+
+ neg %ecx
+ mov -8(%rsi,%rdx,8), %rax
+ shr %cl, %rax
+
+ neg %ecx
+ lea 1(%rdx), %r8d
+ and $3, %r8d
+ je Lrlx
+
+ dec %r8d
+ jne L1
+
+ mov -8(%rsi,%rdx,8), %r10
+ shl %cl, %r10
+ neg %ecx
+ mov -16(%rsi,%rdx,8), %r8
+ shr %cl, %r8
+ or %r8, %r10
+ not %r10
+ mov %r10, -8(%rdi,%rdx,8)
+ dec %rdx
+ jmp Lrll
+
+L1: dec %r8d
+ je L1x
+
+ mov -8(%rsi,%rdx,8), %r10
+ shl %cl, %r10
+ neg %ecx
+ mov -16(%rsi,%rdx,8), %r8
+ shr %cl, %r8
+ or %r8, %r10
+ not %r10
+ mov %r10, -8(%rdi,%rdx,8)
+ dec %rdx
+ neg %ecx
+L1x:
+ cmp $1, %rdx
+ je Last
+ mov -8(%rsi,%rdx,8), %r10
+ shl %cl, %r10
+ mov -16(%rsi,%rdx,8), %r11
+ shl %cl, %r11
+ neg %ecx
+ mov -16(%rsi,%rdx,8), %r8
+ mov -24(%rsi,%rdx,8), %r9
+ shr %cl, %r8
+ or %r8, %r10
+ shr %cl, %r9
+ or %r9, %r11
+ not %r10
+ not %r11
+ mov %r10, -8(%rdi,%rdx,8)
+ mov %r11, -16(%rdi,%rdx,8)
+ sub $2, %rdx
+
+Lrll: neg %ecx
+Lrlx: mov -8(%rsi,%rdx,8), %r10
+ shl %cl, %r10
+ mov -16(%rsi,%rdx,8), %r11
+ shl %cl, %r11
+
+ sub $4, %rdx
+ jb Lend
+ .align 16, 0x90
+Ltop:
+
+ neg %ecx
+ mov 16(%rsi,%rdx,8), %r8
+ mov 8(%rsi,%rdx,8), %r9
+ shr %cl, %r8
+ or %r8, %r10
+ shr %cl, %r9
+ or %r9, %r11
+ not %r10
+ not %r11
+ mov %r10, 24(%rdi,%rdx,8)
+ mov %r11, 16(%rdi,%rdx,8)
+
+ mov 0(%rsi,%rdx,8), %r8
+ mov -8(%rsi,%rdx,8), %r9
+ shr %cl, %r8
+ shr %cl, %r9
+
+
+ neg %ecx
+ mov 8(%rsi,%rdx,8), %r10
+ mov 0(%rsi,%rdx,8), %r11
+ shl %cl, %r10
+ or %r10, %r8
+ shl %cl, %r11
+ or %r11, %r9
+ not %r8
+ not %r9
+ mov %r8, 8(%rdi,%rdx,8)
+ mov %r9, 0(%rdi,%rdx,8)
+
+ mov -8(%rsi,%rdx,8), %r10
+ mov -16(%rsi,%rdx,8), %r11
+ shl %cl, %r10
+ shl %cl, %r11
+
+ sub $4, %rdx
+ jae Ltop
+Lend:
+ neg %ecx
+ mov 8(%rsi), %r8
+ shr %cl, %r8
+ or %r8, %r10
+ mov (%rsi), %r9
+ shr %cl, %r9
+ or %r9, %r11
+ not %r10
+ not %r11
+ mov %r10, 16(%rdi)
+ mov %r11, 8(%rdi)
+
+ neg %ecx
+Last: mov (%rsi), %r10
+ shl %cl, %r10
+ not %r10
+ mov %r10, (%rdi)
+ pop %rsi
+ pop %rdi
+ ret
+