aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSunil K Pandey <skpgkp2@gmail.com>2022-03-07 10:47:14 -0800
committerSunil K Pandey <skpgkp2@gmail.com>2022-03-07 21:44:09 -0800
commit26b648892a4ff2ee2c3f8cdb511d6550c8396731 (patch)
tree9154bc2c7eac684c1b47146c27cacf695c43ef23
parent2d4d1dc9ab1d1fd2bdf7fe13f8f2552a02ce2a5d (diff)
downloadglibc-26b648892a4ff2ee2c3f8cdb511d6550c8396731.tar.xz
glibc-26b648892a4ff2ee2c3f8cdb511d6550c8396731.zip
x86_64: Fix svml_d_log24_core_avx2.S code formatting
This commit contains following formatting changes 1. Instructions proceeded by a tab. 2. Instruction less than 8 characters in length have a tab between it and the first operand. 3. Instruction greater than 7 characters in length have a space between it and the first operand. 4. Tabs after `#define`d names and their value. 5. 8 space at the beginning of line replaced by tab. 6. Indent comments with code. 7. Remove redundent .text section. 8. 1 space between line content and line comment. 9. Space after all commas. Reviewed-by: Noah Goldstein <goldstein.w.n@gmail.com>
-rw-r--r--sysdeps/x86_64/fpu/multiarch/svml_d_log24_core_avx2.S2483
1 files changed, 1241 insertions, 1242 deletions
diff --git a/sysdeps/x86_64/fpu/multiarch/svml_d_log24_core_avx2.S b/sysdeps/x86_64/fpu/multiarch/svml_d_log24_core_avx2.S
index 6b24449edf..25d2edaae5 100644
--- a/sysdeps/x86_64/fpu/multiarch/svml_d_log24_core_avx2.S
+++ b/sysdeps/x86_64/fpu/multiarch/svml_d_log24_core_avx2.S
@@ -29,1296 +29,1295 @@
/* Offsets for data table __svml_dlog2_data_internal
*/
-#define Log_HA_table 0
-#define Log_LA_table 8224
-#define poly_coeff 12352
-#define ExpMask 12512
-#define Two10 12544
-#define MinNorm 12576
-#define MaxNorm 12608
-#define HalfMask 12640
-#define One 12672
-#define Threshold 12704
-#define Bias 12736
-#define Bias1 12768
+#define Log_HA_table 0
+#define Log_LA_table 8224
+#define poly_coeff 12352
+#define ExpMask 12512
+#define Two10 12544
+#define MinNorm 12576
+#define MaxNorm 12608
+#define HalfMask 12640
+#define One 12672
+#define Threshold 12704
+#define Bias 12736
+#define Bias1 12768
/* Lookup bias for data table __svml_dlog2_data_internal. */
-#define Table_Lookup_Bias -0x405fe0
+#define Table_Lookup_Bias -0x405fe0
#include <sysdep.h>
- .text
- .section .text.avx2,"ax",@progbits
+ .section .text.avx2, "ax", @progbits
ENTRY(_ZGVdN4v_log2_avx2)
- pushq %rbp
- cfi_def_cfa_offset(16)
- movq %rsp, %rbp
- cfi_def_cfa(6, 16)
- cfi_offset(6, -16)
- andq $-32, %rsp
- subq $96, %rsp
- lea Table_Lookup_Bias+__svml_dlog2_data_internal(%rip), %r8
- vmovapd %ymm0, %ymm3
+ pushq %rbp
+ cfi_def_cfa_offset(16)
+ movq %rsp, %rbp
+ cfi_def_cfa(6, 16)
+ cfi_offset(6, -16)
+ andq $-32, %rsp
+ subq $96, %rsp
+ lea Table_Lookup_Bias+__svml_dlog2_data_internal(%rip), %r8
+ vmovapd %ymm0, %ymm3
-/* preserve mantissa, set input exponent to 2^(-10) */
- vandpd ExpMask+__svml_dlog2_data_internal(%rip), %ymm3, %ymm4
- vorpd Two10+__svml_dlog2_data_internal(%rip), %ymm4, %ymm2
+ /* preserve mantissa, set input exponent to 2^(-10) */
+ vandpd ExpMask+__svml_dlog2_data_internal(%rip), %ymm3, %ymm4
+ vorpd Two10+__svml_dlog2_data_internal(%rip), %ymm4, %ymm2
-/* reciprocal approximation good to at least 11 bits */
- vcvtpd2ps %ymm2, %xmm5
+ /* reciprocal approximation good to at least 11 bits */
+ vcvtpd2ps %ymm2, %xmm5
-/* exponent bits */
- vpsrlq $20, %ymm3, %ymm7
- vmovupd One+__svml_dlog2_data_internal(%rip), %ymm14
- vrcpps %xmm5, %xmm6
+ /* exponent bits */
+ vpsrlq $20, %ymm3, %ymm7
+ vmovupd One+__svml_dlog2_data_internal(%rip), %ymm14
+ vrcpps %xmm5, %xmm6
-/* check range */
- vcmplt_oqpd MinNorm+__svml_dlog2_data_internal(%rip), %ymm3, %ymm11
- vcmpnle_uqpd MaxNorm+__svml_dlog2_data_internal(%rip), %ymm3, %ymm12
- vcvtps2pd %xmm6, %ymm9
+ /* check range */
+ vcmplt_oqpd MinNorm+__svml_dlog2_data_internal(%rip), %ymm3, %ymm11
+ vcmpnle_uqpd MaxNorm+__svml_dlog2_data_internal(%rip), %ymm3, %ymm12
+ vcvtps2pd %xmm6, %ymm9
-/* round reciprocal to nearest integer, will have 1+9 mantissa bits */
- vroundpd $0, %ymm9, %ymm1
+ /* round reciprocal to nearest integer, will have 1+9 mantissa bits */
+ vroundpd $0, %ymm9, %ymm1
-/* exponent */
- vmovupd Threshold+__svml_dlog2_data_internal(%rip), %ymm9
+ /* exponent */
+ vmovupd Threshold+__svml_dlog2_data_internal(%rip), %ymm9
-/*
- * prepare table index
- * table lookup
- */
- vpsrlq $40, %ymm1, %ymm15
+ /*
+ * prepare table index
+ * table lookup
+ */
+ vpsrlq $40, %ymm1, %ymm15
-/* argument reduction */
- vfmsub213pd %ymm14, %ymm1, %ymm2
+ /* argument reduction */
+ vfmsub213pd %ymm14, %ymm1, %ymm2
-/* polynomial */
- vmovupd poly_coeff+__svml_dlog2_data_internal(%rip), %ymm14
- vcmplt_oqpd %ymm1, %ymm9, %ymm1
- vfmadd213pd poly_coeff+32+__svml_dlog2_data_internal(%rip), %ymm2, %ymm14
- vorpd %ymm12, %ymm11, %ymm13
- vmulpd %ymm2, %ymm2, %ymm12
+ /* polynomial */
+ vmovupd poly_coeff+__svml_dlog2_data_internal(%rip), %ymm14
+ vcmplt_oqpd %ymm1, %ymm9, %ymm1
+ vfmadd213pd poly_coeff+32+__svml_dlog2_data_internal(%rip), %ymm2, %ymm14
+ vorpd %ymm12, %ymm11, %ymm13
+ vmulpd %ymm2, %ymm2, %ymm12
-/* combine and get argument value range mask */
- vmovmskpd %ymm13, %eax
- vextractf128 $1, %ymm7, %xmm8
- vshufps $221, %xmm8, %xmm7, %xmm10
+ /* combine and get argument value range mask */
+ vmovmskpd %ymm13, %eax
+ vextractf128 $1, %ymm7, %xmm8
+ vshufps $221, %xmm8, %xmm7, %xmm10
-/* biased exponent in DP format */
- vcvtdq2pd %xmm10, %ymm0
- vandpd Bias+__svml_dlog2_data_internal(%rip), %ymm1, %ymm10
- vorpd Bias1+__svml_dlog2_data_internal(%rip), %ymm10, %ymm11
- vsubpd %ymm11, %ymm0, %ymm1
- vmovupd poly_coeff+64+__svml_dlog2_data_internal(%rip), %ymm0
- vfmadd213pd poly_coeff+96+__svml_dlog2_data_internal(%rip), %ymm2, %ymm0
- vmulpd poly_coeff+128+__svml_dlog2_data_internal(%rip), %ymm2, %ymm2
- vfmadd213pd %ymm0, %ymm12, %ymm14
- vfmadd213pd %ymm2, %ymm12, %ymm14
- vextractf128 $1, %ymm15, %xmm6
- vmovd %xmm15, %edx
- vmovd %xmm6, %esi
- movslq %edx, %rdx
- vpextrd $2, %xmm15, %ecx
- movslq %esi, %rsi
- vpextrd $2, %xmm6, %edi
- movslq %ecx, %rcx
- movslq %edi, %rdi
- vmovsd (%r8,%rdx), %xmm4
- vmovsd (%r8,%rsi), %xmm7
- vmovhpd (%r8,%rcx), %xmm4, %xmm5
- vmovhpd (%r8,%rdi), %xmm7, %xmm8
- vinsertf128 $1, %xmm8, %ymm5, %ymm13
+ /* biased exponent in DP format */
+ vcvtdq2pd %xmm10, %ymm0
+ vandpd Bias+__svml_dlog2_data_internal(%rip), %ymm1, %ymm10
+ vorpd Bias1+__svml_dlog2_data_internal(%rip), %ymm10, %ymm11
+ vsubpd %ymm11, %ymm0, %ymm1
+ vmovupd poly_coeff+64+__svml_dlog2_data_internal(%rip), %ymm0
+ vfmadd213pd poly_coeff+96+__svml_dlog2_data_internal(%rip), %ymm2, %ymm0
+ vmulpd poly_coeff+128+__svml_dlog2_data_internal(%rip), %ymm2, %ymm2
+ vfmadd213pd %ymm0, %ymm12, %ymm14
+ vfmadd213pd %ymm2, %ymm12, %ymm14
+ vextractf128 $1, %ymm15, %xmm6
+ vmovd %xmm15, %edx
+ vmovd %xmm6, %esi
+ movslq %edx, %rdx
+ vpextrd $2, %xmm15, %ecx
+ movslq %esi, %rsi
+ vpextrd $2, %xmm6, %edi
+ movslq %ecx, %rcx
+ movslq %edi, %rdi
+ vmovsd (%r8, %rdx), %xmm4
+ vmovsd (%r8, %rsi), %xmm7
+ vmovhpd (%r8, %rcx), %xmm4, %xmm5
+ vmovhpd (%r8, %rdi), %xmm7, %xmm8
+ vinsertf128 $1, %xmm8, %ymm5, %ymm13
-/* reconstruction */
- vaddpd %ymm14, %ymm13, %ymm0
- vaddpd %ymm0, %ymm1, %ymm0
- testl %eax, %eax
+ /* reconstruction */
+ vaddpd %ymm14, %ymm13, %ymm0
+ vaddpd %ymm0, %ymm1, %ymm0
+ testl %eax, %eax
-/* Go to special inputs processing branch */
- jne L(SPECIAL_VALUES_BRANCH)
- # LOE rbx r12 r13 r14 r15 eax ymm0 ymm3
+ /* Go to special inputs processing branch */
+ jne L(SPECIAL_VALUES_BRANCH)
+ # LOE rbx r12 r13 r14 r15 eax ymm0 ymm3
-/* Restore registers
- * and exit the function
- */
+ /* Restore registers
+ * and exit the function
+ */
L(EXIT):
- movq %rbp, %rsp
- popq %rbp
- cfi_def_cfa(7, 8)
- cfi_restore(6)
- ret
- cfi_def_cfa(6, 16)
- cfi_offset(6, -16)
+ movq %rbp, %rsp
+ popq %rbp
+ cfi_def_cfa(7, 8)
+ cfi_restore(6)
+ ret
+ cfi_def_cfa(6, 16)
+ cfi_offset(6, -16)
-/* Branch to process
- * special inputs
- */
+ /* Branch to process
+ * special inputs
+ */
L(SPECIAL_VALUES_BRANCH):
- vmovupd %ymm3, 32(%rsp)
- vmovupd %ymm0, 64(%rsp)
- # LOE rbx r12 r13 r14 r15 eax ymm0
+ vmovupd %ymm3, 32(%rsp)
+ vmovupd %ymm0, 64(%rsp)
+ # LOE rbx r12 r13 r14 r15 eax ymm0
- xorl %edx, %edx
- # LOE rbx r12 r13 r14 r15 eax edx
+ xorl %edx, %edx
+ # LOE rbx r12 r13 r14 r15 eax edx
- vzeroupper
- movq %r12, 16(%rsp)
- /* DW_CFA_expression: r12 (r12) (DW_OP_lit8; DW_OP_minus; DW_OP_const4s: -32; DW_OP_and; DW_OP_const4s: -80; DW_OP_plus) */
- .cfi_escape 0x10, 0x0c, 0x0e, 0x38, 0x1c, 0x0d, 0xe0, 0xff, 0xff, 0xff, 0x1a, 0x0d, 0xb0, 0xff, 0xff, 0xff, 0x22
- movl %edx, %r12d
- movq %r13, 8(%rsp)
- /* DW_CFA_expression: r13 (r13) (DW_OP_lit8; DW_OP_minus; DW_OP_const4s: -32; DW_OP_and; DW_OP_const4s: -88; DW_OP_plus) */
- .cfi_escape 0x10, 0x0d, 0x0e, 0x38, 0x1c, 0x0d, 0xe0, 0xff, 0xff, 0xff, 0x1a, 0x0d, 0xa8, 0xff, 0xff, 0xff, 0x22
- movl %eax, %r13d
- movq %r14, (%rsp)
- /* DW_CFA_expression: r14 (r14) (DW_OP_lit8; DW_OP_minus; DW_OP_const4s: -32; DW_OP_and; DW_OP_const4s: -96; DW_OP_plus) */
- .cfi_escape 0x10, 0x0e, 0x0e, 0x38, 0x1c, 0x0d, 0xe0, 0xff, 0xff, 0xff, 0x1a, 0x0d, 0xa0, 0xff, 0xff, 0xff, 0x22
- # LOE rbx r15 r12d r13d
+ vzeroupper
+ movq %r12, 16(%rsp)
+ /* DW_CFA_expression: r12 (r12) (DW_OP_lit8; DW_OP_minus; DW_OP_const4s: -32; DW_OP_and; DW_OP_const4s: -80; DW_OP_plus) */
+ .cfi_escape 0x10, 0x0c, 0x0e, 0x38, 0x1c, 0x0d, 0xe0, 0xff, 0xff, 0xff, 0x1a, 0x0d, 0xb0, 0xff, 0xff, 0xff, 0x22
+ movl %edx, %r12d
+ movq %r13, 8(%rsp)
+ /* DW_CFA_expression: r13 (r13) (DW_OP_lit8; DW_OP_minus; DW_OP_const4s: -32; DW_OP_and; DW_OP_const4s: -88; DW_OP_plus) */
+ .cfi_escape 0x10, 0x0d, 0x0e, 0x38, 0x1c, 0x0d, 0xe0, 0xff, 0xff, 0xff, 0x1a, 0x0d, 0xa8, 0xff, 0xff, 0xff, 0x22
+ movl %eax, %r13d
+ movq %r14, (%rsp)
+ /* DW_CFA_expression: r14 (r14) (DW_OP_lit8; DW_OP_minus; DW_OP_const4s: -32; DW_OP_and; DW_OP_const4s: -96; DW_OP_plus) */
+ .cfi_escape 0x10, 0x0e, 0x0e, 0x38, 0x1c, 0x0d, 0xe0, 0xff, 0xff, 0xff, 0x1a, 0x0d, 0xa0, 0xff, 0xff, 0xff, 0x22
+ # LOE rbx r15 r12d r13d
-/* Range mask
- * bits check
- */
+ /* Range mask
+ * bits check
+ */
L(RANGEMASK_CHECK):
- btl %r12d, %r13d
+ btl %r12d, %r13d
-/* Call scalar math function */
- jc L(SCALAR_MATH_CALL)
- # LOE rbx r15 r12d r13d
+ /* Call scalar math function */
+ jc L(SCALAR_MATH_CALL)
+ # LOE rbx r15 r12d r13d
-/* Special inputs
- * processing loop
- */
+ /* Special inputs
+ * processing loop
+ */
L(SPECIAL_VALUES_LOOP):
- incl %r12d
- cmpl $4, %r12d
+ incl %r12d
+ cmpl $4, %r12d
-/* Check bits in range mask */
- jl L(RANGEMASK_CHECK)
- # LOE rbx r15 r12d r13d
+ /* Check bits in range mask */
+ jl L(RANGEMASK_CHECK)
+ # LOE rbx r15 r12d r13d
- movq 16(%rsp), %r12
- cfi_restore(12)
- movq 8(%rsp), %r13
- cfi_restore(13)
- movq (%rsp), %r14
- cfi_restore(14)
- vmovupd 64(%rsp), %ymm0
+ movq 16(%rsp), %r12
+ cfi_restore(12)
+ movq 8(%rsp), %r13
+ cfi_restore(13)
+ movq (%rsp), %r14
+ cfi_restore(14)
+ vmovupd 64(%rsp), %ymm0
-/* Go to exit */
- jmp L(EXIT)
- /* DW_CFA_expression: r12 (r12) (DW_OP_lit8; DW_OP_minus; DW_OP_const4s: -32; DW_OP_and; DW_OP_const4s: -80; DW_OP_plus) */
- .cfi_escape 0x10, 0x0c, 0x0e, 0x38, 0x1c, 0x0d, 0xe0, 0xff, 0xff, 0xff, 0x1a, 0x0d, 0xb0, 0xff, 0xff, 0xff, 0x22
- /* DW_CFA_expression: r13 (r13) (DW_OP_lit8; DW_OP_minus; DW_OP_const4s: -32; DW_OP_and; DW_OP_const4s: -88; DW_OP_plus) */
- .cfi_escape 0x10, 0x0d, 0x0e, 0x38, 0x1c, 0x0d, 0xe0, 0xff, 0xff, 0xff, 0x1a, 0x0d, 0xa8, 0xff, 0xff, 0xff, 0x22
- /* DW_CFA_expression: r14 (r14) (DW_OP_lit8; DW_OP_minus; DW_OP_const4s: -32; DW_OP_and; DW_OP_const4s: -96; DW_OP_plus) */
- .cfi_escape 0x10, 0x0e, 0x0e, 0x38, 0x1c, 0x0d, 0xe0, 0xff, 0xff, 0xff, 0x1a, 0x0d, 0xa0, 0xff, 0xff, 0xff, 0x22
- # LOE rbx r12 r13 r14 r15 ymm0
+ /* Go to exit */
+ jmp L(EXIT)
+ /* DW_CFA_expression: r12 (r12) (DW_OP_lit8; DW_OP_minus; DW_OP_const4s: -32; DW_OP_and; DW_OP_const4s: -80; DW_OP_plus) */
+ .cfi_escape 0x10, 0x0c, 0x0e, 0x38, 0x1c, 0x0d, 0xe0, 0xff, 0xff, 0xff, 0x1a, 0x0d, 0xb0, 0xff, 0xff, 0xff, 0x22
+ /* DW_CFA_expression: r13 (r13) (DW_OP_lit8; DW_OP_minus; DW_OP_const4s: -32; DW_OP_and; DW_OP_const4s: -88; DW_OP_plus) */
+ .cfi_escape 0x10, 0x0d, 0x0e, 0x38, 0x1c, 0x0d, 0xe0, 0xff, 0xff, 0xff, 0x1a, 0x0d, 0xa8, 0xff, 0xff, 0xff, 0x22
+ /* DW_CFA_expression: r14 (r14) (DW_OP_lit8; DW_OP_minus; DW_OP_const4s: -32; DW_OP_and; DW_OP_const4s: -96; DW_OP_plus) */
+ .cfi_escape 0x10, 0x0e, 0x0e, 0x38, 0x1c, 0x0d, 0xe0, 0xff, 0xff, 0xff, 0x1a, 0x0d, 0xa0, 0xff, 0xff, 0xff, 0x22
+ # LOE rbx r12 r13 r14 r15 ymm0
-/* Scalar math fucntion call
- * to process special input
- */
+ /* Scalar math fucntion call
+ * to process special input
+ */
L(SCALAR_MATH_CALL):
- movl %r12d, %r14d
- movsd 32(%rsp,%r14,8), %xmm0
- call log2@PLT
- # LOE rbx r14 r15 r12d r13d xmm0
+ movl %r12d, %r14d
+ movsd 32(%rsp, %r14, 8), %xmm0
+ call log2@PLT
+ # LOE rbx r14 r15 r12d r13d xmm0
- movsd %xmm0, 64(%rsp,%r14,8)
+ movsd %xmm0, 64(%rsp, %r14, 8)
-/* Process special inputs in loop */
- jmp L(SPECIAL_VALUES_LOOP)
- # LOE rbx r15 r12d r13d
+ /* Process special inputs in loop */
+ jmp L(SPECIAL_VALUES_LOOP)
+ # LOE rbx r15 r12d r13d
END(_ZGVdN4v_log2_avx2)
- .section .rodata, "a"
- .align 32
+ .section .rodata, "a"
+ .align 32
#ifdef __svml_dlog2_data_internal_typedef
typedef unsigned int VUINT32;
typedef struct {
- __declspec(align(32)) VUINT32 Log_HA_table[(1<<10)+2][2];
- __declspec(align(32)) VUINT32 Log_LA_table[(1<<9)+1][2];
- __declspec(align(32)) VUINT32 poly_coeff[5][4][2];
- __declspec(align(32)) VUINT32 ExpMask[4][2];
- __declspec(align(32)) VUINT32 Two10[4][2];
- __declspec(align(32)) VUINT32 MinNorm[4][2];
- __declspec(align(32)) VUINT32 MaxNorm[4][2];
- __declspec(align(32)) VUINT32 HalfMask[4][2];
- __declspec(align(32)) VUINT32 One[4][2];
- __declspec(align(32)) VUINT32 Threshold[4][2];
- __declspec(align(32)) VUINT32 Bias[4][2];
- __declspec(align(32)) VUINT32 Bias1[4][2];
+ __declspec(align(32)) VUINT32 Log_HA_table[(1<<10)+2][2];
+ __declspec(align(32)) VUINT32 Log_LA_table[(1<<9)+1][2];
+ __declspec(align(32)) VUINT32 poly_coeff[5][4][2];
+ __declspec(align(32)) VUINT32 ExpMask[4][2];
+ __declspec(align(32)) VUINT32 Two10[4][2];
+ __declspec(align(32)) VUINT32 MinNorm[4][2];
+ __declspec(align(32)) VUINT32 MaxNorm[4][2];
+ __declspec(align(32)) VUINT32 HalfMask[4][2];
+ __declspec(align(32)) VUINT32 One[4][2];
+ __declspec(align(32)) VUINT32 Threshold[4][2];
+ __declspec(align(32)) VUINT32 Bias[4][2];
+ __declspec(align(32)) VUINT32 Bias1[4][2];
} __svml_dlog2_data_internal;
#endif
__svml_dlog2_data_internal:
- /* Log_HA_table */
- .quad 0xc08ff00000000000, 0x0000000000000000
- .quad 0xc08ff0040038c920, 0x3d52bfc81744e999
- .quad 0xc08ff007ff0f0190, 0xbd59b2cedc63c895
- .quad 0xc08ff00bfc839e88, 0xbd28e365e6741d71
- .quad 0xc08ff00ff8979428, 0x3d4027998f69a77d
- .quad 0xc08ff013f34bd5a0, 0x3d5dd2cb33fe6a89
- .quad 0xc08ff017eca15518, 0xbd526514cdf2c019
- .quad 0xc08ff01be49903d8, 0xbd44bfeeba165e04
- .quad 0xc08ff01fdb33d218, 0xbd3fa79ee110cec3
- .quad 0xc08ff023d072af20, 0xbd4eebb642c7fd60
- .quad 0xc08ff027c4568948, 0x3d429b13d7093443
- .quad 0xc08ff02bb6e04de8, 0x3d50f346bd36551e
- .quad 0xc08ff02fa810e968, 0xbd5020bb662f1536
- .quad 0xc08ff03397e94750, 0x3d5de76b56340995
- .quad 0xc08ff037866a5218, 0x3d58065ff3304090
- .quad 0xc08ff03b7394f360, 0x3d561fc9322fb785
- .quad 0xc08ff03f5f6a13d0, 0x3d0abecd17d0d778
- .quad 0xc08ff04349ea9b28, 0xbd588f3ad0ce4d44
- .quad 0xc08ff04733177040, 0xbd4454ba4ac5f44d
- .quad 0xc08ff04b1af178f8, 0xbd556f78faaa0887
- .quad 0xc08ff04f01799a58, 0x3d49db8976de7469
- .quad 0xc08ff052e6b0b868, 0xbd5cdb6fce17ef00
- .quad 0xc08ff056ca97b668, 0xbd576de8c0412f09
- .quad 0xc08ff05aad2f76a0, 0x3d30142c7ec6475c
- .quad 0xc08ff05e8e78da70, 0xbd1e685afc26de72
- .quad 0xc08ff0626e74c260, 0xbd40b64c954078a3
- .quad 0xc08ff0664d240e10, 0xbd5fcde393462d7d
- .quad 0xc08ff06a2a879c48, 0xbd537245eeeecc53
- .quad 0xc08ff06e06a04ae8, 0x3d4ac306eb47b436
- .quad 0xc08ff071e16ef6e8, 0xbd5a1fd9d3758f6b
- .quad 0xc08ff075baf47c80, 0x3d2401fbaaa67e3c
- .quad 0xc08ff0799331b6f0, 0x3d4f8dbef47a4d53
- .quad 0xc08ff07d6a2780a8, 0x3d51215e0abb42d1
- .quad 0xc08ff0813fd6b340, 0x3d57ce6249eddb35
- .quad 0xc08ff08514402770, 0xbd38a803c7083a25
- .quad 0xc08ff088e764b528, 0x3d42218beba5073e
- .quad 0xc08ff08cb9453370, 0x3d447b66f1c6248f
- .quad 0xc08ff09089e27880, 0xbd53d9297847e995
- .quad 0xc08ff094593d59c8, 0xbd12b6979cc77aa9
- .quad 0xc08ff0982756abd0, 0xbd55308545ecd702
- .quad 0xc08ff09bf42f4260, 0xbd578fa97c3b936f
- .quad 0xc08ff09fbfc7f068, 0xbd41828408ce869d
- .quad 0xc08ff0a38a218808, 0x3d555da6ce7251a6
- .quad 0xc08ff0a7533cda88, 0xbd41f3cd14bfcb02
- .quad 0xc08ff0ab1b1ab878, 0xbd1f028da6bf1852
- .quad 0xc08ff0aee1bbf188, 0xbd4cf04de3267f54
- .quad 0xc08ff0b2a72154a8, 0xbd4556e47019db10
- .quad 0xc08ff0b66b4baff8, 0x3d1e7ba00b15fbe4
- .quad 0xc08ff0ba2e3bd0d0, 0x3d5bfde1c52c2f28
- .quad 0xc08ff0bdeff283b8, 0x3d48d63fe20ee5d6
- .quad 0xc08ff0c1b0709480, 0x3d57f551980838ff
- .quad 0xc08ff0c56fb6ce20, 0xbd4189091f293c81
- .quad 0xc08ff0c92dc5fae0, 0x3d4d549f05f06169
- .quad 0xc08ff0ccea9ee428, 0xbd5982466074e1e3
- .quad 0xc08ff0d0a64252b8, 0xbd5d30a6b16c0e4b
- .quad 0xc08ff0d460b10e80, 0xbd3138bf3b51a201
- .quad 0xc08ff0d819ebdea8, 0xbd454e680c0801d6
- .quad 0xc08ff0dbd1f389a8, 0x3d584db361385926
- .quad 0xc08ff0df88c8d520, 0xbd564f2252a82c03
- .quad 0xc08ff0e33e6c8610, 0xbd5c78c35ed5d034
- .quad 0xc08ff0e6f2df60a8, 0xbd52eb9f29ca3d75
- .quad 0xc08ff0eaa6222860, 0x3d5340c0c01b5ff8
- .quad 0xc08ff0ee58359fe8, 0x3d10c2acaffa64b6
- .quad 0xc08ff0f2091a8948, 0xbd3fced311301ebe
- .quad 0xc08ff0f5b8d1a5c8, 0x3d41ee5d591af30b
- .quad 0xc08ff0f9675bb5f0, 0x3d4873546b0e668c
- .quad 0xc08ff0fd14b97998, 0x3d5a99928177a119
- .quad 0xc08ff100c0ebafd8, 0x3d378ead132adcac
- .quad 0xc08ff1046bf31720, 0x3d51a538bc597d48
- .quad 0xc08ff10815d06d18, 0xbd540ee2f35efd7e
- .quad 0xc08ff10bbe846ec8, 0xbd59cf94753adacc
- .quad 0xc08ff10f660fd878, 0xbd5201a3d6862895
- .quad 0xc08ff1130c7365c0, 0x3d383e25d0822d03
- .quad 0xc08ff116b1afd180, 0xbd0b7389bbea8f7b
- .quad 0xc08ff11a55c5d5f0, 0xbd4df278087a6617
- .quad 0xc08ff11df8b62c98, 0xbd48daeb8ec01e26
- .quad 0xc08ff1219a818e50, 0x3d57c9312e0a14da
- .quad 0xc08ff1253b28b330, 0xbd5f0fbc0e4d507e
- .quad 0xc08ff128daac52c8, 0xbd222afdee008687
- .quad 0xc08ff12c790d23d8, 0x3d17c71747bcef8b
- .quad 0xc08ff130164bdc88, 0x3d5d69cfd051af50
- .quad 0xc08ff133b2693248, 0x3d59dff064e9433a
- .quad 0xc08ff1374d65d9e8, 0x3d4f71a30db3240b
- .quad 0xc08ff13ae7428788, 0xbd5e56afa9524606
- .quad 0xc08ff13e7fffeeb0, 0xbd44acd84e6f8518
- .quad 0xc08ff142179ec228, 0xbd519845ade5e121
- .quad 0xc08ff145ae1fb420, 0xbd5b3b4a38ddec70
- .quad 0xc08ff14943837620, 0xbd5ea4bb5bc137c7
- .quad 0xc08ff14cd7cab910, 0x3d5610f3bf8eb6ce
- .quad 0xc08ff1506af62d20, 0x3d57b1170d6184cf
- .quad 0xc08ff153fd0681f0, 0x3d5791a688a3660e
- .quad 0xc08ff1578dfc6678, 0x3d5d41ecf8abac2e
- .quad 0xc08ff15b1dd88908, 0x3cf0bd995d64d573
- .quad 0xc08ff15eac9b9758, 0xbd5e3653cd796d01
- .quad 0xc08ff1623a463e80, 0xbd597573005ef2d8
- .quad 0xc08ff165c6d92af0, 0xbd4ee222d6439c41
- .quad 0xc08ff16952550880, 0x3d5913b845e75950
- .quad 0xc08ff16cdcba8258, 0xbd558e7ba239077e
- .quad 0xc08ff170660a4328, 0x3d5a0e174a2cae66
- .quad 0xc08ff173ee44f4d8, 0x3d22b8db103db712
- .quad 0xc08ff177756b40d8, 0x3d5cc610480853c4
- .quad 0xc08ff17afb7dcfe0, 0xbd304a8bc84e5c0f
- .quad 0xc08ff17e807d4a28, 0x3d3639d185da5f7d
- .quad 0xc08ff182046a5738, 0xbd534705d06d788f
- .quad 0xc08ff18587459e10, 0xbd540d25b28a51fd
- .quad 0xc08ff189090fc510, 0xbd02d804afa7080a
- .quad 0xc08ff18c89c97200, 0x3d5f2a5d305818ba
- .quad 0xc08ff19009734a08, 0xbd3a602e9d05c3e4
- .quad 0xc08ff193880df1d0, 0xbd533d6fdcd54875
- .quad 0xc08ff197059a0d60, 0x3d24eaf0a9490202
- .quad 0xc08ff19a82184020, 0xbd5685666d98eb59
- .quad 0xc08ff19dfd892cf8, 0xbd509f8745f0868b
- .quad 0xc08ff1a177ed7630, 0xbd2dcba340a9d268
- .quad 0xc08ff1a4f145bd80, 0x3d4916fcd0331266
- .quad 0xc08ff1a86992a408, 0xbd548cd033a49073
- .quad 0xc08ff1abe0d4ca68, 0xbd5252f40e5df1a2
- .quad 0xc08ff1af570cd0a0, 0xbd541d623bd02248
- .quad 0xc08ff1b2cc3b5628, 0xbd258dc48235c071
- .quad 0xc08ff1b64060f9e0, 0xbd4b4bd8f02ed3f2
- .quad 0xc08ff1b9b37e5a28, 0x3d4e8d20a88cd0a2
- .quad 0xc08ff1bd259414c0, 0x3d3b669b6380bc55
- .quad 0xc08ff1c096a2c6e8, 0xbd45d54159d51094
- .quad 0xc08ff1c406ab0d58, 0x3d59f684ffbca44d
- .quad 0xc08ff1c775ad8428, 0x3d543b1b1d508399
- .quad 0xc08ff1cae3aac6f8, 0x3d5c30953a12fc6e
- .quad 0xc08ff1ce50a370d0, 0xbd1763b04f9aad5f
- .quad 0xc08ff1d1bc981c40, 0x3d573c6fa54f46c2
- .quad 0xc08ff1d527896338, 0x3d48ccfb9ffd7455
- .quad 0xc08ff1d89177df30, 0x3d42756f80d6f7ce
- .quad 0xc08ff1dbfa642910, 0xbd3c2bfbc353c5a5
- .quad 0xc08ff1df624ed940, 0x3d1d6064f5dc380b
- .quad 0xc08ff1e2c9388798, 0x3ce327c6b30711cf
- .quad 0xc08ff1e62f21cb70, 0x3d140aa9546525bc
- .quad 0xc08ff1e9940b3b98, 0xbd15c1ff43c21863
- .quad 0xc08ff1ecf7f56e60, 0x3d590ba680120498
- .quad 0xc08ff1f05ae0f988, 0x3d5390c6b62dff50
- .quad 0xc08ff1f3bcce7258, 0x3d4da0c90878457f
- .quad 0xc08ff1f71dbe6d90, 0x3d30697edc85b98c
- .quad 0xc08ff1fa7db17f70, 0x3d04d81188510a79
- .quad 0xc08ff1fddca83bb0, 0xbd5f2ddc983ce25c
- .quad 0xc08ff2013aa33598, 0x3d46c22f0fae6844
- .quad 0xc08ff20497a2ffd0, 0xbd53359b714c3d03
- .quad 0xc08ff207f3a82ca0, 0xbd4aefaa5524f88b
- .quad 0xc08ff20b4eb34dc0, 0x3d39bf4a4a73d01d
- .quad 0xc08ff20ea8c4f468, 0x3d44217befdb12e6
- .quad 0xc08ff21201ddb158, 0x3d5219b281d4b6f8
- .quad 0xc08ff21559fe14c8, 0xbd5e3b123373d370
- .quad 0xc08ff218b126ae88, 0xbd59b525a6edc3cb
- .quad 0xc08ff21c07580dd8, 0xbd4b494e7737c4dc
- .quad 0xc08ff21f5c92c180, 0xbd3989b7d67e3e54
- .quad 0xc08ff222b0d757d0, 0x3d486c8f098ad3cf
- .quad 0xc08ff22604265e98, 0x3d5254956d8e15b2
- .quad 0xc08ff22956806330, 0x3d3f14730a362959
- .quad 0xc08ff22ca7e5f278, 0xbd40e8ed02e32ea1
- .quad 0xc08ff22ff85798d8, 0xbd40fb2b9b1e0261
- .quad 0xc08ff23347d5e238, 0xbd5bfeb1e13c8bc3
- .quad 0xc08ff23696615a18, 0x3d5b891f041e037b
- .quad 0xc08ff239e3fa8b60, 0xbd36255027582bb9
- .quad 0xc08ff23d30a200a8, 0x3d56bb5a92a55361
- .quad 0xc08ff2407c5843f0, 0xbd31902fb4417244
- .quad 0xc08ff243c71dded8, 0xbd5a8a7c3c4a2cc6
- .quad 0xc08ff24710f35a88, 0xbd23be1be6941016
- .quad 0xc08ff24a59d93fa8, 0x3d55c85afafa1d46
- .quad 0xc08ff24da1d01668, 0xbd5b4b05a0adcbf1
- .quad 0xc08ff250e8d866a0, 0x3d134d191476f74b
- .quad 0xc08ff2542ef2b798, 0x3d5e78ce963395e1
- .quad 0xc08ff257741f9028, 0x3d3f9219a8f57c17
- .quad 0xc08ff25ab85f76c8, 0x3d5cfc6f47ac691b
- .quad 0xc08ff25dfbb2f168, 0x3d4ab3b720b5ca71
- .quad 0xc08ff2613e1a8598, 0x3d54a4ab99feb71a
- .quad 0xc08ff2647f96b868, 0xbd42daa69d79d724
- .quad 0xc08ff267c0280e88, 0xbd344d9115018f45
- .quad 0xc08ff26affcf0c28, 0xbd56673e143d2ac0
- .quad 0xc08ff26e3e8c3518, 0x3d3aac889e91c638
- .quad 0xc08ff2717c600ca8, 0x3d4cf65b41d006e7
- .quad 0xc08ff274b94b15c0, 0xbd4c821320391e76
- .quad 0xc08ff277f54dd2e8, 0x3d51abd6e2ddc2a1
- .quad 0xc08ff27b3068c620, 0xbd2f1bdd1264e703
- .quad 0xc08ff27e6a9c7110, 0xbd58437b4f032f15
- .quad 0xc08ff281a3e954f0, 0xbd4f8e063b069a7d
- .quad 0xc08ff284dc4ff288, 0x3d5276d0723a662a
- .quad 0xc08ff28813d0ca28, 0xbd5731f7c6d8f6eb
- .quad 0xc08ff28b4a6c5bd0, 0xbd58b587f08307ec
- .quad 0xc08ff28e80232708, 0x3d57f19a7a352baf
- .quad 0xc08ff291b4f5aae0, 0x3d570d99aff32790
- .quad 0xc08ff294e8e46610, 0x3d4efafaad4f59db
- .quad 0xc08ff2981befd6e0, 0xbd41eb1728371564
- .quad 0xc08ff29b4e187b38, 0x3d458465b4e080d7
- .quad 0xc08ff29e7f5ed088, 0x3d46acb4a035a820
- .quad 0xc08ff2a1afc353e0, 0xbd39fc68238dd5d3