From c7c7a6d0a688ac9e40cfc772e2c18db9c487823b Mon Sep 17 00:00:00 2001 From: "duanyi.aster" Date: Fri, 22 Jul 2022 19:00:02 +0800 Subject: [PATCH] fix:(native) skip return `ERR_EOF` error instead of `ERR_INVAL` when run out buffer --- internal/native/avx/native_amd64.s | 7857 +++++++------- internal/native/avx/native_subr_amd64.go | 58 +- internal/native/avx2/native_amd64.s | 10914 ++++++++++---------- internal/native/avx2/native_subr_amd64.go | 50 +- issue_test/issue263_test.go | 91 + native/scanning.c | 12 +- 6 files changed, 9653 insertions(+), 9329 deletions(-) create mode 100644 issue_test/issue263_test.go diff --git a/internal/native/avx/native_amd64.s b/internal/native/avx/native_amd64.s index 04ff9ae17..97030f05d 100644 --- a/internal/native/avx/native_amd64.s +++ b/internal/native/avx/native_amd64.s @@ -15,75 +15,89 @@ _lzero: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp CMPQ SI, $32 - JB LBB0_3 + JB LBB0_5 + LEAQ -32(SI), AX + MOVQ AX, CX + ANDQ $-32, CX + LEAQ 32(CX)(DI*1), CX + ANDL $31, AX -LBB0_1: +LBB0_2: LONG $0x076ffec5 // vmovdqu (%rdi), %ymm0 LONG $0x177de2c4; BYTE $0xc0 // vptest %ymm0, %ymm0 - JNE LBB0_9 + JNE LBB0_13 ADDQ $32, DI ADDQ $-32, SI CMPQ SI, $31 - JA LBB0_1 + JA LBB0_2 + MOVQ AX, SI + MOVQ CX, DI -LBB0_3: +LBB0_5: WORD $0xf8c5; BYTE $0x77 // vzeroupper CMPQ SI, $16 - JB LBB0_6 + JB LBB0_10 + LEAQ -16(SI), AX + MOVQ AX, CX + ANDQ $-16, CX + LEAQ 16(CX)(DI*1), CX + ANDL $15, AX -LBB0_4: +LBB0_7: LONG $0x076ffac5 // vmovdqu (%rdi), %xmm0 LONG $0x1779e2c4; BYTE $0xc0 // vptest %xmm0, %xmm0 - JNE LBB0_10 + JNE LBB0_14 ADDQ $16, DI ADDQ $-16, SI CMPQ SI, $15 - JA LBB0_4 + JA LBB0_7 + MOVQ AX, SI + MOVQ CX, DI -LBB0_6: +LBB0_10: CMPQ SI, $8 - JB LBB0_12 + JB LBB0_16 MOVL $1, AX CMPQ 0(DI), $0 - JNE LBB0_8 + JNE LBB0_12 ADDQ $8, DI ADDQ $-8, SI -LBB0_12: +LBB0_16: CMPQ SI, $4 - JB LBB0_15 + JB LBB0_19 MOVL $1, AX CMPL 0(DI), $0 - JNE LBB0_8 + JNE LBB0_12 ADDQ $4, DI ADDQ $-4, SI -LBB0_15: +LBB0_19: CMPQ SI, $2 - JB LBB0_18 + JB LBB0_22 MOVL $1, AX CMPW 0(DI), $0 - JNE LBB0_8 + JNE LBB0_12 ADDQ $2, DI ADDQ $-2, SI -LBB0_18: +LBB0_22: XORL AX, AX TESTQ SI, SI - JE LBB0_8 + JE LBB0_12 CMPB 0(DI), $0 SETNE AX BYTE $0x5d // popq %rbp RET -LBB0_8: +LBB0_12: BYTE $0x5d // popq %rbp RET -LBB0_9: +LBB0_13: WORD $0xf8c5; BYTE $0x77 // vzeroupper -LBB0_10: +LBB0_14: MOVL $1, AX BYTE $0x5d // popq %rbp RET @@ -103,107 +117,110 @@ LCPI1_3: _lspace: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - MOVQ DX, AX - LEAQ 0(DI)(DX*1), R10 - MOVQ SI, DX - SUBQ AX, DX + LEAQ 0(DI)(DX*1), AX + SUBQ DX, SI JE LBB1_7 - MOVL R10, CX + MOVL AX, CX ANDL $15, CX TESTQ CX, CX JE LBB1_7 - LEAQ 1(DI), R9 - LEAQ -1(SI), CX + LEAQ -1(SI), R9 + XORL DX, DX MOVQ $4294977024, R8 LBB1_3: - MOVBLSX 0(DI)(AX*1), DX - CMPL DX, $32 - JA LBB1_22 - BTQ DX, R8 - JAE LBB1_22 - LEAQ 1(AX), DX - CMPQ CX, AX + MOVBLSX 0(AX)(DX*1), CX + CMPL CX, $32 + JA LBB1_5 + BTQ CX, R8 + JAE LBB1_5 + LEAQ 1(DX), R10 + CMPQ R9, DX JE LBB1_6 - ADDL R9, AX - ANDL $15, AX - TESTQ AX, AX - MOVQ DX, AX + LEAQ 1(AX)(DX*1), CX + ANDL $15, CX + MOVQ R10, DX + TESTQ CX, CX JNE LBB1_3 LBB1_6: - LEAQ 0(DI)(DX*1), R10 - SUBQ DX, SI - MOVQ SI, DX + ADDQ R10, AX + SUBQ R10, SI LBB1_7: - CMPQ DX, $16 + CMPQ SI, $16 JB LBB1_13 - MOVQ DI, SI - SUBQ R10, SI - QUAD $0xffffff44056ffac5 // vmovdqu $-188(%rip), %xmm0 /* LCPI1_0(%rip) */ - QUAD $0xffffff4c0d6ffac5 // vmovdqu $-180(%rip), %xmm1 /* LCPI1_1(%rip) */ - QUAD $0xffffff54156ffac5 // vmovdqu $-172(%rip), %xmm2 /* LCPI1_2(%rip) */ - QUAD $0xffffff5c1d6ffac5 // vmovdqu $-164(%rip), %xmm3 /* LCPI1_3(%rip) */ + LEAQ -16(SI), CX + MOVQ CX, DX + ANDQ $-16, DX + LEAQ 16(DX)(AX*1), R8 + ANDL $15, CX + QUAD $0xffffff42056ffac5 // vmovdqu $-190(%rip), %xmm0 /* LCPI1_0(%rip) */ + QUAD $0xffffff4a0d6ffac5 // vmovdqu $-182(%rip), %xmm1 /* LCPI1_1(%rip) */ + QUAD $0xffffff52156ffac5 // vmovdqu $-174(%rip), %xmm2 /* LCPI1_2(%rip) */ + QUAD $0xffffff5a1d6ffac5 // vmovdqu $-166(%rip), %xmm3 /* LCPI1_3(%rip) */ LBB1_9: - LONG $0x6f79c1c4; BYTE $0x22 // vmovdqa (%r10), %xmm4 - LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 - LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 - LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 - LONG $0xf274d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm6 - LONG $0xe374d9c5 // vpcmpeqb %xmm3, %xmm4, %xmm4 - LONG $0xe6ebd9c5 // vpor %xmm6, %xmm4, %xmm4 - LONG $0xe5ebd9c5 // vpor %xmm5, %xmm4, %xmm4 - LONG $0xc4d7f9c5 // vpmovmskb %xmm4, %eax - CMPW AX, $-1 + LONG $0x206ff9c5 // vmovdqa (%rax), %xmm4 + LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 + LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 + LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 + LONG $0xf274d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm6 + LONG $0xe374d9c5 // vpcmpeqb %xmm3, %xmm4, %xmm4 + LONG $0xe6ebd9c5 // vpor %xmm6, %xmm4, %xmm4 + LONG $0xe5ebd9c5 // vpor %xmm5, %xmm4, %xmm4 + LONG $0xd4d7f9c5 // vpmovmskb %xmm4, %edx + CMPW DX, $-1 JNE LBB1_10 - ADDQ $16, R10 - ADDQ $-16, DX + ADDQ $16, AX ADDQ $-16, SI - CMPQ DX, $15 + CMPQ SI, $15 JA LBB1_9 + MOVQ CX, SI + MOVQ R8, AX LBB1_13: - TESTQ DX, DX - JE LBB1_20 - LEAQ 0(R10)(DX*1), R8 - XORL AX, AX - MOVQ $4294977024, R9 + TESTQ SI, SI + JE LBB1_22 + LEAQ 0(AX)(SI*1), R8 + INCQ AX + MOVQ $4294977024, DX LBB1_15: - MOVBLSX 0(R10)(AX*1), SI - CMPL SI, $32 + MOVBLSX -1(AX), CX + CMPL CX, $32 JA LBB1_17 - BTQ SI, R9 + BTQ CX, DX JAE LBB1_17 - ADDQ $1, AX - CMPQ DX, AX + DECQ SI + INCQ AX + TESTQ SI, SI JNE LBB1_15 - MOVQ R8, R10 + MOVQ R8, AX + JMP LBB1_22 -LBB1_20: - SUBQ DI, R10 +LBB1_10: + MOVWLZX DX, CX + SUBQ DI, AX + NOTL CX + BSFL CX, CX + ADDQ CX, AX + BYTE $0x5d // popq %rbp + RET -LBB1_21: - MOVQ R10, AX +LBB1_5: + ADDQ DX, AX LBB1_22: - BYTE $0x5d // popq %rbp - RET - -LBB1_10: - MOVWLZX AX, AX - NOTL AX - BSFL AX, AX - SUBQ SI, AX - BYTE $0x5d // popq %rbp + SUBQ DI, AX + BYTE $0x5d // popq %rbp RET LBB1_17: - SUBQ DI, R10 - ADDQ AX, R10 - JMP LBB1_21 + NOTQ DI + ADDQ DI, AX + BYTE $0x5d // popq %rbp + RET LCPI2_0: QUAD $0x3030303030303030; QUAD $0x3030303030303030 // .space 16, '0000000000000000' @@ -222,24 +239,24 @@ _f64toa: LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax XORL R9, R9 TESTQ AX, AX - JS LBB2_113 + JS LBB2_108 TESTQ AX, AX - JE LBB2_114 + JE LBB2_109 LBB2_2: MOVQ AX, DX SHRQ $52, DX XORL BX, BX CMPL DX, $2047 - JE LBB2_112 + JE LBB2_107 MOVQ $4503599627370495, DI ANDQ DI, AX - ADDQ $1, DI + INCQ DI ORQ AX, DI LEAL -1023(DX), CX CMPL CX, $52 MOVL R9, -44(BP) - MOVQ R13, -56(BP) + MOVQ R13, -64(BP) JA LBB2_5 MOVL $1075, CX SUBQ DX, CX @@ -255,14 +272,14 @@ LBB2_5: LEAL -1077(DX), CX MOVL $-1076, R11 LONG $0xd9450f44 // cmovnel %ecx, %r11d - MOVQ DI, -64(BP) + MOVQ DI, -72(BP) LEAQ 0(DI*4), R8 TESTQ AX, AX SETNE AX CMPL DX, $2 SETCS R13 ORB AX, R13 - MOVBLZX R13, R14 + MOVBLZX R13, R9 TESTL R11, R11 JS LBB2_12 LONG $0x41e36945; WORD $0x0134; BYTE $0x00 // imull $78913, %r11d, %r12d @@ -274,16 +291,16 @@ LBB2_5: LONG $0x4fdc6941; WORD $0x1293; BYTE $0x00 // imull $1217359, %r12d, %ebx MOVQ R12, AX SHLQ $4, AX - LONG $0xf10d8d48; WORD $0x0080; BYTE $0x00 // leaq $33009(%rip), %rcx /* _DOUBLE_POW5_INV_SPLIT(%rip) */ + LONG $0x230d8d48; WORD $0x0080; BYTE $0x00 // leaq $32803(%rip), %rcx /* _DOUBLE_POW5_INV_SPLIT(%rip) */ MOVQ R8, DI ORQ $2, DI MOVQ 0(AX)(CX*1), R10 - MOVQ 8(AX)(CX*1), R9 + MOVQ 8(AX)(CX*1), R14 MOVQ R10, AX MULQ DI MOVQ DX, CX - MOVQ R9, AX - MOVQ R9, -72(BP) + MOVQ R14, AX + MOVQ R14, -56(BP) MULQ DI MOVQ AX, R13 MOVQ DX, SI @@ -296,23 +313,23 @@ LBB2_5: ADDB $61, CX LONG $0xf5ad0f49 // shrdq %cl, %rsi, %r13 SHRQ CX, SI - NOTQ R14 - ADDQ R8, R14 + NOTQ R9 + ADDQ R8, R9 MOVQ R10, AX - MULQ R14 + MULQ R9 MOVQ DX, R15 - MOVQ R9, AX - MULQ R14 - MOVQ DX, R9 + MOVQ R14, AX + MULQ R9 + MOVQ DX, R14 MOVQ AX, BX ADDQ R15, BX - ADCQ $0, R9 - LONG $0xcbad0f4c // shrdq %cl, %r9, %rbx - SHRQ CX, R9 + ADCQ $0, R14 + LONG $0xf3ad0f4c // shrdq %cl, %r14, %rbx + SHRQ CX, R14 MOVQ R10, AX MULQ R8 MOVQ DX, R11 - MOVQ -72(BP), AX + MOVQ -56(BP), AX MULQ R8 MOVQ DX, R10 ADDQ R11, AX @@ -321,7 +338,7 @@ LBB2_5: SHRQ CX, R10 TESTB $64, CX LONG $0xf5440f49 // cmoveq %r13, %rsi - LONG $0xcb440f4c // cmoveq %rbx, %r9 + LONG $0xf3440f4c // cmoveq %rbx, %r14 LONG $0xd0440f4c // cmoveq %rax, %r10 CMPL R12, $21 JA LBB2_23 @@ -331,14 +348,14 @@ LBB2_5: IMULQ AX, DX CMPQ DX, CX JBE LBB2_17 - TESTB $1, -64(BP) + TESTB $1, -72(BP) JNE LBB2_20 MOVL $-1, DX LBB2_10: - IMULQ AX, R14 - ADDL $1, DX - CMPQ R14, CX + IMULQ AX, R9 + INCL DX + CMPQ R9, CX JBE LBB2_10 CMPL DX, R12 SETCC R13 @@ -361,15 +378,15 @@ LBB2_12: SHRL $19, BX MOVLQSX AX, SI SHLQ $4, SI - LONG $0x1b0d8d4c; WORD $0x0095; BYTE $0x00 // leaq $38171(%rip), %r9 /* _DOUBLE_POW5_SPLIT(%rip) */ + LONG $0x4e158d4c; WORD $0x0094; BYTE $0x00 // leaq $37966(%rip), %r10 /* _DOUBLE_POW5_SPLIT(%rip) */ MOVQ R8, DI ORQ $2, DI - MOVQ 0(SI)(R9*1), R15 - MOVQ R15, AX - MOVQ R15, -72(BP) + MOVQ 0(SI)(R10*1), R14 + MOVQ R14, AX + MOVQ R14, -56(BP) MULQ DI MOVQ DX, CX - MOVQ 8(SI)(R9*1), R10 + MOVQ 8(SI)(R10*1), R10 MOVQ R10, AX MULQ DI MOVQ DX, DI @@ -381,20 +398,20 @@ LBB2_12: ADDB $60, CX LONG $0xfead0f48 // shrdq %cl, %rdi, %rsi SHRQ CX, DI - NOTQ R14 - ADDQ R8, R14 - MOVQ R15, AX - MULQ R14 + NOTQ R9 + ADDQ R8, R9 + MOVQ R14, AX + MULQ R9 MOVQ DX, R15 MOVQ R10, AX - MULQ R14 - MOVQ DX, R9 + MULQ R9 + MOVQ DX, R14 MOVQ AX, BX ADDQ R15, BX - ADCQ $0, R9 - LONG $0xcbad0f4c // shrdq %cl, %r9, %rbx - SHRQ CX, R9 - MOVQ -72(BP), AX + ADCQ $0, R14 + LONG $0xf3ad0f4c // shrdq %cl, %r14, %rbx + SHRQ CX, R14 + MOVQ -56(BP), AX MULQ R8 MOVQ DX, R15 MOVQ R10, AX @@ -407,19 +424,20 @@ LBB2_12: TESTB $64, CX LONG $0xfe440f48 // cmoveq %rsi, %rdi MOVQ DI, SI - LONG $0xcb440f4c // cmoveq %rbx, %r9 + LONG $0xf3440f4c // cmoveq %rbx, %r14 LONG $0xd0440f4c // cmoveq %rax, %r10 CMPL R12, $1 JA LBB2_15 - MOVQ -64(BP), AX + MOVQ -72(BP), AX ANDL $1, AX TESTQ AX, AX SETEQ CX ANDB CX, R13 SUBQ AX, SI MOVB $1, R15 - MOVL R11, R12 - JMP LBB2_29 + MOVL R11, AX + MOVQ AX, -56(BP) + JMP LBB2_30 LBB2_14: MOVL CX, CX @@ -427,8 +445,8 @@ LBB2_14: XORL R12, R12 MOVL $1, R15 CMPQ DI, $10 - JAE LBB2_45 - JMP LBB2_60 + JAE LBB2_46 + JMP LBB2_61 LBB2_15: CMPL R12, $62 @@ -446,7 +464,7 @@ LBB2_17: LBB2_18: IMULQ AX, R8 - ADDL $1, DX + INCL DX CMPQ R8, CX JBE LBB2_18 CMPL DX, R12 @@ -461,11 +479,13 @@ LBB2_20: LBB2_21: IMULQ AX, DI - ADDL $1, DX + INCL DX CMPQ DI, CX JBE LBB2_21 + XORL AX, AX CMPL DX, R12 - ADCQ $-1, SI + SETCC AX + SUBQ AX, SI LBB2_23: XORL R15, R15 @@ -493,13 +513,13 @@ LBB2_26: MULQ DI MOVQ DX, CX SHRQ $2, CX - MOVQ R9, AX + MOVQ R14, AX SHRQ $2, AX MULQ DI MOVQ DX, SI SHRQ $2, SI CMPQ CX, SI - JBE LBB2_37 + JBE LBB2_39 MOVQ R10, AX SHRQ $2, AX MULQ DI @@ -510,144 +530,150 @@ LBB2_26: CMPL AX, $49 SETHI DI MOVL $2, R11 - MOVQ SI, R9 + MOVQ SI, R14 MOVQ CX, AX MOVQ DX, R10 - MOVQ -56(BP), R13 - JMP LBB2_38 + MOVQ -64(BP), R13 + JMP LBB2_40 LBB2_29: - MOVQ $-3689348814741910323, R14 + MOVQ R12, -56(BP) + +LBB2_30: + MOVQ $-3689348814741910323, BX MOVQ SI, AX - MULQ R14 - MOVQ DX, SI - MOVQ R9, AX - MULQ R14 - SHRQ $3, SI + MULQ BX + MOVQ DX, R12 + MOVQ R14, AX + MULQ BX + SHRQ $3, R12 SHRQ $3, DX - XORL DI, DI + XORL SI, SI XORL R11, R11 - CMPQ SI, DX - JBE LBB2_35 + CMPQ R12, DX + JBE LBB2_37 XORL CX, CX -LBB2_31: +LBB2_32: MOVQ DX, R8 LEAL 0(DX)(DX*1), DI MOVQ R10, AX - MULQ R14 - MOVQ DX, BX + MULQ BX + MOVQ DX, R9 LEAL 0(DI)(DI*4), AX - SHRQ $3, BX - LEAL 0(BX)(BX*1), DX - LEAL 0(DX)(DX*4), DI - NEGL DI - ADDB R10, DI - CMPL AX, R9 + SHRQ $3, R9 + LEAL 0(R9)(R9*1), DX + LEAL 0(DX)(DX*4), SI + NEGL SI + ADDB R10, SI + CMPL R14, AX SETEQ AX ANDB AX, R13 TESTB CX, CX SETEQ AX ANDB AX, R15 - ADDL $1, R11 - MOVQ SI, AX - MULQ R14 - MOVQ DX, SI - SHRQ $3, SI + INCL R11 + MOVQ R12, AX + MULQ BX + MOVQ DX, R12 + SHRQ $3, R12 MOVQ R8, AX - MULQ R14 + MULQ BX SHRQ $3, DX - MOVQ BX, R10 - MOVQ R8, R9 - MOVL DI, CX - CMPQ SI, DX - JA LBB2_31 + MOVQ R9, R10 + MOVQ R8, R14 + MOVL SI, CX + CMPQ R12, DX + JA LBB2_32 TESTB R13, R13 - JE LBB2_36 + JE LBB2_38 -LBB2_33: +LBB2_34: MOVQ R8, AX - MULQ R14 + MULQ BX MOVQ DX, CX SHRQ $3, CX LEAL 0(CX)(CX*1), AX LEAL 0(AX)(AX*4), AX - CMPL AX, R8 - MOVQ -64(BP), R10 - JNE LBB2_42 + CMPL R8, AX + MOVQ -72(BP), DI + MOVQ -56(BP), R12 + JNE LBB2_44 + MOVQ BX, R14 -LBB2_34: - MOVQ BX, AX +LBB2_36: + MOVQ R9, AX MULQ R14 - MOVQ DX, R9 + MOVQ DX, R10 MOVQ CX, R8 - SHRQ $3, R9 - LEAL 0(R9)(R9*1), AX - LEAL 0(AX)(AX*4), SI - NEGL SI - ADDB BX, SI - TESTB DI, DI + SHRQ $3, R10 + LEAL 0(R10)(R10*1), AX + LEAL 0(AX)(AX*4), BX + NEGL BX + ADDB R9, BX + TESTB SI, SI SETEQ AX ANDB AX, R15 - ADDL $1, R11 + INCL R11 MOVQ CX, AX MULQ R14 MOVQ DX, CX SHRQ $3, CX LEAL 0(CX)(CX*1), AX LEAL 0(AX)(AX*4), AX - MOVQ R9, BX - MOVL SI, DI - CMPL AX, R8 - JE LBB2_34 - JMP LBB2_43 - -LBB2_35: - MOVQ R9, R8 - MOVQ R10, BX + MOVQ R10, R9 + MOVL BX, SI + CMPL R8, AX + JE LBB2_36 + JMP LBB2_45 + +LBB2_37: + MOVQ R14, R8 + MOVQ R10, R9 TESTB R13, R13 - JNE LBB2_33 + JNE LBB2_34 -LBB2_36: - MOVL DI, SI - MOVQ BX, R9 - MOVQ -64(BP), R10 - JMP LBB2_43 +LBB2_38: + MOVL SI, BX + MOVQ R9, R10 + MOVQ -72(BP), DI + MOVQ -56(BP), R12 + JMP LBB2_45 -LBB2_37: +LBB2_39: XORL R11, R11 XORL DI, DI - MOVQ -56(BP), R13 + MOVQ -64(BP), R13 MOVQ BX, AX -LBB2_38: +LBB2_40: MOVQ $-3689348814741910323, BX MULQ BX MOVQ DX, CX SHRQ $3, CX - MOVQ R9, AX + MOVQ R14, AX MULQ BX SHRQ $3, DX CMPQ CX, DX - JBE LBB2_41 + JBE LBB2_43 -LBB2_39: +LBB2_41: MOVQ R10, SI - MOVQ DX, R9 + MOVQ DX, R14 MOVQ R10, AX MULQ BX MOVQ DX, R10 SHRQ $3, R10 - ADDL $1, R11 + INCL R11 MOVQ CX, AX MULQ BX MOVQ DX, CX SHRQ $3, CX - MOVQ R9, AX + MOVQ R14, AX MULQ BX SHRQ $3, DX CMPQ CX, DX - JA LBB2_39 + JA LBB2_41 LEAL 0(R10)(R10*1), AX LEAL 0(AX)(AX*4), AX SUBL AX, SI @@ -655,9 +681,9 @@ LBB2_39: CMPL SI, $4 SETHI DI -LBB2_41: +LBB2_43: XORL AX, AX - CMPQ R10, R9 + CMPQ R10, R14 SETEQ AX ORQ DI, AX ADDQ R10, AX @@ -665,129 +691,128 @@ LBB2_41: ADDL R11, R12 MOVL $1, R15 CMPQ DI, $10 - JB LBB2_60 - JMP LBB2_45 + JAE LBB2_46 + JMP LBB2_61 -LBB2_42: - MOVL DI, SI - MOVQ BX, R9 +LBB2_44: + MOVL SI, BX + MOVQ R9, R10 -LBB2_43: +LBB2_45: TESTB R13, R13 - SETEQ AX + SETEQ SI TESTB R15, R15 - SETNE CX - CMPB SI, $5 + SETNE AX + CMPB BX, $5 SETEQ DX - TESTB $1, R9 - SETEQ BX - ANDB CX, BX - ANDB DX, BX - CMPQ R9, R8 + TESTB $1, R10 SETEQ CX - ORB AX, R10 - ANDB CX, R10 - CMPB SI, $4 + ANDB AX, CX + ANDB DX, CX + CMPQ R10, R8 + SETEQ DX + ORB SI, DI + CMPB BX, $4 SETHI AX - XORB BX, AX - ORB R10, AX - MOVBLZX AX, DI - ADDQ R9, DI - MOVQ -56(BP), R13 + XORB CX, AX + ANDB DX, DI + ORB AX, DI + MOVBLZX DI, DI + ADDQ R10, DI + MOVQ -64(BP), R13 ADDL R11, R12 MOVL $1, R15 CMPQ DI, $10 - JB LBB2_60 + JB LBB2_61 -LBB2_45: +LBB2_46: MOVL $2, R15 CMPQ DI, $100 - JB LBB2_60 + JB LBB2_61 MOVL $3, R15 CMPQ DI, $1000 - JB LBB2_60 + JB LBB2_61 MOVL $4, R15 CMPQ DI, $10000 - JB LBB2_60 + JB LBB2_61 MOVL $5, R15 CMPQ DI, $100000 - JB LBB2_60 + JB LBB2_61 MOVL $6, R15 CMPQ DI, $1000000 - JB LBB2_60 + JB LBB2_61 MOVL $7, R15 CMPQ DI, $10000000 - JB LBB2_60 + JB LBB2_61 MOVL $8, R15 CMPQ DI, $100000000 - JB LBB2_60 + JB LBB2_61 MOVL $9, R15 CMPQ DI, $1000000000 - JB LBB2_60 + JB LBB2_61 MOVQ $8589934464, AX ADDQ $1410065536, AX MOVL $10, R15 CMPQ DI, AX - JB LBB2_60 + JB LBB2_61 MOVQ DI, AX SHRQ $11, AX MOVL $11, R15 CMPQ AX, $48828125 - JB LBB2_60 + JB LBB2_61 MOVQ DI, AX SHRQ $12, AX MOVL $12, R15 CMPQ AX, $244140625 - JB LBB2_60 + JB LBB2_61 MOVQ DI, AX SHRQ $13, AX MOVL $13, R15 CMPQ AX, $1220703125 - JB LBB2_60 + JB LBB2_61 MOVL $14, R15 MOVQ $100000000000000, AX CMPQ DI, AX - JB LBB2_60 + JB LBB2_61 MOVL $15, R15 MOVQ $1000000000000000, AX CMPQ DI, AX - JB LBB2_60 + JB LBB2_61 MOVQ $10000000000000000, AX CMPQ DI, AX MOVL $17, R15 SBBL $0, R15 -LBB2_60: +LBB2_61: LEAL 0(R15)(R12*1), R14 - LEAL 0(R15)(R12*1), AX - ADDL $5, AX + LEAL 5(R15)(R12*1), AX CMPL AX, $27 - JB LBB2_66 + JB LBB2_67 LEAQ 1(R13), BX MOVQ BX, SI MOVL R15, DX - LONG $0x004a4ce8; BYTE $0x00 // callq _print_mantissa + LONG $0x0049b5e8; BYTE $0x00 // callq _print_mantissa MOVB 1(R13), AX MOVB AX, 0(R13) MOVL $1, AX CMPL R15, $2 - JB LBB2_63 + JB LBB2_64 MOVB $46, 0(BX) - ADDL $1, R15 + INCL R15 MOVL R15, AX -LBB2_63: +LBB2_64: MOVL AX, BX MOVB $101, 0(R13)(BX*1) - ADDQ $1, BX + INCQ BX TESTL R14, R14 - JLE LBB2_68 - ADDL $-1, R14 + JLE LBB2_69 + DECL R14 MOVL -44(BP), R9 CMPL R14, $100 - JL LBB2_69 + JL LBB2_70 -LBB2_65: +LBB2_66: MOVL R14, AX MOVL $3435973837, CX IMULQ AX, CX @@ -795,27 +820,28 @@ LBB2_65: LEAL 0(CX)(CX*1), AX LEAL 0(AX)(AX*4), AX SUBL AX, R14 - LONG $0xfa058d48; WORD $0x00a3; BYTE $0x00 // leaq $41978(%rip), %rax /* _Digits(%rip) */ + LONG $0x20058d48; WORD $0x00a3; BYTE $0x00 // leaq $41760(%rip), %rax /* _Digits(%rip) */ MOVWLZX 0(AX)(CX*2), AX MOVL BX, CX MOVW AX, 0(R13)(CX*1) ORB $48, R14 - MOVB R14, 2(R13)(CX*1) + MOVLQSX BX, AX + MOVB R14, 2(AX)(R13*1) ADDL $3, BX - JMP LBB2_112 + JMP LBB2_107 -LBB2_66: +LBB2_67: TESTL R14, R14 - JLE LBB2_71 + JLE LBB2_72 MOVL R12, R13 SARL $31, R13 ANDL R14, R13 XORL BX, BX TESTL R12, R12 LONG $0xe3480f44 // cmovsl %ebx, %r12d - JMP LBB2_73 + JMP LBB2_74 -LBB2_68: +LBB2_69: ADDL $2, AX MOVB $45, 0(R13)(BX*1) MOVL $1, CX @@ -824,192 +850,182 @@ LBB2_68: MOVL AX, BX MOVL -44(BP), R9 CMPL R14, $100 - JGE LBB2_65 + JGE LBB2_66 -LBB2_69: +LBB2_70: CMPL R14, $10 - JL LBB2_84 + JL LBB2_85 MOVLQSX R14, AX - LONG $0x8f0d8d48; WORD $0x00a3; BYTE $0x00 // leaq $41871(%rip), %rcx /* _Digits(%rip) */ + LONG $0xb20d8d48; WORD $0x00a2; BYTE $0x00 // leaq $41650(%rip), %rcx /* _Digits(%rip) */ MOVWLZX 0(CX)(AX*2), AX MOVL BX, CX MOVW AX, 0(R13)(CX*1) ADDL $2, BX - JMP LBB2_112 + JMP LBB2_107 -LBB2_71: +LBB2_72: MOVW $11824, 0(R13) TESTL R14, R14 - JS LBB2_85 + JS LBB2_86 XORL R13, R13 MOVL $2, BX XORL R12, R12 -LBB2_73: +LBB2_74: MOVL BX, SI - ADDQ -56(BP), SI + ADDQ -64(BP), SI MOVL R15, DX - LONG $0x004948e8; BYTE $0x00 // callq _print_mantissa + LONG $0x0048b1e8; BYTE $0x00 // callq _print_mantissa TESTL R13, R13 - JE LBB2_77 - LEAL 0(BX)(R13*1), AX + JE LBB2_78 + LEAL 0(R13)(BX*1), AX CMPL R13, R15 - JGE LBB2_79 + JGE LBB2_80 LEAL 0(BX)(R15*1), CX MOVL AX, AX - MOVQ -56(BP), R10 + MOVQ -64(BP), R10 -LBB2_76: +LBB2_77: MOVBLZX -1(R10)(CX*1), DX MOVB DX, 0(R10)(CX*1) - LEAQ -1(CX), DX - MOVQ DX, CX - CMPQ DX, AX - JG LBB2_76 - JMP LBB2_80 + DECQ CX + CMPQ CX, AX + JG LBB2_77 + JMP LBB2_81 -LBB2_77: - MOVQ -56(BP), R10 +LBB2_78: + MOVQ -64(BP), R10 ADDL R15, BX TESTL R12, R12 - JNE LBB2_81 - JMP LBB2_111 + JNE LBB2_82 + JMP LBB2_106 -LBB2_79: +LBB2_80: MOVL AX, AX - MOVQ -56(BP), R10 + MOVQ -64(BP), R10 -LBB2_80: +LBB2_81: MOVB $46, 0(R10)(AX*1) ORL $1, BX ADDL R15, BX TESTL R12, R12 - JE LBB2_111 + JE LBB2_106 -LBB2_81: +LBB2_82: MOVLQSX BX, BX LEAL -1(R12), R8 XORL AX, AX CMPL R8, $127 MOVL -44(BP), R9 - JB LBB2_94 - ADDQ $1, R8 + JB LBB2_95 + INCQ R8 MOVQ R8, AX ANDQ $-128, AX - LEAQ -128(AX), CX - MOVQ CX, SI + LEAQ -128(AX), DX + MOVQ DX, SI SHRQ $7, SI - ADDQ $1, SI - MOVL SI, DX - ANDL $3, DX - CMPQ CX, $384 - JAE LBB2_88 - XORL DI, DI - JMP LBB2_90 - -LBB2_84: + INCQ SI + MOVL SI, CX + ANDL $3, CX + CMPQ DX, $384 + JAE LBB2_89 + XORL SI, SI + JMP LBB2_91 + +LBB2_85: ADDB $48, R14 MOVL BX, AX - ADDL $1, BX + INCL BX MOVB R14, 0(R13)(AX*1) - JMP LBB2_112 + JMP LBB2_107 -LBB2_85: +LBB2_86: MOVL $2, BX SUBL R14, BX LEAQ -2(BX), R8 - MOVL $2, DX - CMPQ R8, $4 - JB LBB2_109 + MOVL $2, AX CMPQ R8, $128 - JAE LBB2_96 - XORL CX, CX - JMP LBB2_105 - -LBB2_88: - LEAQ 0(BX)(R10*1), CX - ADDQ $480, CX - ANDQ $-4, SI - NEGQ SI - XORL DI, DI - QUAD $0xfffff68e056ffec5 // vmovdqu $-2418(%rip), %ymm0 /* LCPI2_0(%rip) */ + JB LBB2_104 + MOVQ R8, AX + ANDQ $-128, AX + LEAQ -128(AX), DX + MOVQ DX, SI + SHRQ $7, SI + INCQ SI + MOVL SI, CX + ANDL $3, CX + CMPQ DX, $384 + JAE LBB2_97 + XORL SI, SI + JMP LBB2_99 LBB2_89: - QUAD $0xfffe2039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-480(%rcx,%rdi) - QUAD $0xfffe4039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-448(%rcx,%rdi) - QUAD $0xfffe6039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-416(%rcx,%rdi) - QUAD $0xfffe8039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-384(%rcx,%rdi) - QUAD $0xfffea039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-352(%rcx,%rdi) - QUAD $0xfffec039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-320(%rcx,%rdi) - QUAD $0xfffee039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-288(%rcx,%rdi) - QUAD $0xffff0039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-256(%rcx,%rdi) - QUAD $0xffff2039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-224(%rcx,%rdi) - QUAD $0xffff4039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-192(%rcx,%rdi) - QUAD $0xffff6039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-160(%rcx,%rdi) - LONG $0x447ffec5; WORD $0x8039 // vmovdqu %ymm0, $-128(%rcx,%rdi) - LONG $0x447ffec5; WORD $0xa039 // vmovdqu %ymm0, $-96(%rcx,%rdi) - LONG $0x447ffec5; WORD $0xc039 // vmovdqu %ymm0, $-64(%rcx,%rdi) - LONG $0x447ffec5; WORD $0xe039 // vmovdqu %ymm0, $-32(%rcx,%rdi) - LONG $0x047ffec5; BYTE $0x39 // vmovdqu %ymm0, (%rcx,%rdi) - ADDQ $512, DI - ADDQ $4, SI - JNE LBB2_89 + LEAQ 480(BX)(R10*1), DI + MOVQ CX, DX + SUBQ SI, DX + XORL SI, SI + QUAD $0xfffff66d056ffec5 // vmovdqu $-2451(%rip), %ymm0 /* LCPI2_0(%rip) */ LBB2_90: - TESTQ DX, DX - JE LBB2_93 - ADDQ BX, DI - LEAQ 0(R10)(DI*1), CX - ADDQ $96, CX - SHLQ $7, DX - XORL SI, SI - QUAD $0xfffff5df056ffec5 // vmovdqu $-2593(%rip), %ymm0 /* LCPI2_0(%rip) */ + QUAD $0xfffe2037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-480(%rdi,%rsi) + QUAD $0xfffe4037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-448(%rdi,%rsi) + QUAD $0xfffe6037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-416(%rdi,%rsi) + QUAD $0xfffe8037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-384(%rdi,%rsi) + QUAD $0xfffea037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-352(%rdi,%rsi) + QUAD $0xfffec037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-320(%rdi,%rsi) + QUAD $0xfffee037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-288(%rdi,%rsi) + QUAD $0xffff0037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-256(%rdi,%rsi) + QUAD $0xffff2037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-224(%rdi,%rsi) + QUAD $0xffff4037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-192(%rdi,%rsi) + QUAD $0xffff6037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-160(%rdi,%rsi) + LONG $0x447ffec5; WORD $0x8037 // vmovdqu %ymm0, $-128(%rdi,%rsi) + LONG $0x447ffec5; WORD $0xa037 // vmovdqu %ymm0, $-96(%rdi,%rsi) + LONG $0x447ffec5; WORD $0xc037 // vmovdqu %ymm0, $-64(%rdi,%rsi) + LONG $0x447ffec5; WORD $0xe037 // vmovdqu %ymm0, $-32(%rdi,%rsi) + LONG $0x047ffec5; BYTE $0x37 // vmovdqu %ymm0, (%rdi,%rsi) + ADDQ $512, SI + ADDQ $4, DX + JNE LBB2_90 -LBB2_92: - LONG $0x447ffec5; WORD $0xa031 // vmovdqu %ymm0, $-96(%rcx,%rsi) - LONG $0x447ffec5; WORD $0xc031 // vmovdqu %ymm0, $-64(%rcx,%rsi) - LONG $0x447ffec5; WORD $0xe031 // vmovdqu %ymm0, $-32(%rcx,%rsi) - LONG $0x047ffec5; BYTE $0x31 // vmovdqu %ymm0, (%rcx,%rsi) - SUBQ $-128, SI - CMPQ DX, SI - JNE LBB2_92 +LBB2_91: + TESTQ CX, CX + JE LBB2_94 + ADDQ BX, SI + LEAQ 96(R10)(SI*1), DX + NEGQ CX + QUAD $0xfffff5c4056ffec5 // vmovdqu $-2620(%rip), %ymm0 /* LCPI2_0(%rip) */ LBB2_93: + LONG $0x427ffec5; BYTE $0xa0 // vmovdqu %ymm0, $-96(%rdx) + LONG $0x427ffec5; BYTE $0xc0 // vmovdqu %ymm0, $-64(%rdx) + LONG $0x427ffec5; BYTE $0xe0 // vmovdqu %ymm0, $-32(%rdx) + LONG $0x027ffec5 // vmovdqu %ymm0, (%rdx) + SUBQ $-128, DX + INCQ CX + JNE LBB2_93 + +LBB2_94: ADDQ AX, BX CMPQ R8, AX - JE LBB2_112 + JE LBB2_107 -LBB2_94: +LBB2_95: SUBL AX, R12 -LBB2_95: +LBB2_96: MOVB $48, 0(R10)(BX*1) - ADDQ $1, BX - ADDL $-1, R12 - JNE LBB2_95 - JMP LBB2_112 + INCQ BX + DECL R12 + JNE LBB2_96 + JMP LBB2_107 -LBB2_96: - MOVQ R8, CX - ANDQ $-128, CX - LEAQ -128(CX), SI - MOVQ SI, DX - SHRQ $7, DX - ADDQ $1, DX - MOVL DX, AX - ANDL $3, AX - CMPQ SI, $384 - JAE LBB2_98 +LBB2_97: + MOVQ CX, DX + SUBQ SI, DX XORL SI, SI - JMP LBB2_100 + QUAD $0xfffff577056ffec5 // vmovdqu $-2697(%rip), %ymm0 /* LCPI2_0(%rip) */ LBB2_98: - ANDQ $-4, DX - NEGQ DX - XORL SI, SI - QUAD $0xfffff560056ffec5 // vmovdqu $-2720(%rip), %ymm0 /* LCPI2_0(%rip) */ - -LBB2_99: LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x02 // vmovdqu %ymm0, $2(%r13,%rsi) LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x22 // vmovdqu %ymm0, $34(%r13,%rsi) LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x42 // vmovdqu %ymm0, $66(%r13,%rsi) @@ -1028,15 +1044,15 @@ LBB2_99: QUAD $0x01e235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $482(%r13,%rsi) ADDQ $512, SI ADDQ $4, DX - JNE LBB2_99 + JNE LBB2_98 -LBB2_100: - TESTQ AX, AX - JE LBB2_103 - NEGQ AX - QUAD $0xfffff4ab056ffec5 // vmovdqu $-2901(%rip), %ymm0 /* LCPI2_0(%rip) */ +LBB2_99: + TESTQ CX, CX + JE LBB2_102 + NEGQ CX + QUAD $0xfffff4c2056ffec5 // vmovdqu $-2878(%rip), %ymm0 /* LCPI2_0(%rip) */ -LBB2_102: +LBB2_101: MOVQ SI, DX ORQ $2, DX LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x00 // vmovdqu %ymm0, (%r13,%rdx) @@ -1044,70 +1060,51 @@ LBB2_102: LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x40 // vmovdqu %ymm0, $64(%r13,%rdx) LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x60 // vmovdqu %ymm0, $96(%r13,%rdx) SUBQ $-128, SI - INCQ AX - JNE LBB2_102 - -LBB2_103: - CMPQ R8, CX - JE LBB2_110 - TESTB $124, R8 - JE LBB2_108 - -LBB2_105: - MOVQ R8, SI - ANDQ $-4, SI - LEAQ 2(SI), DX + INCQ CX + JNE LBB2_101 -LBB2_106: - MOVL $808464432, 2(R13)(CX*1) - ADDQ $4, CX - CMPQ SI, CX - JNE LBB2_106 - CMPQ R8, SI - JNE LBB2_109 - JMP LBB2_110 - -LBB2_108: - ORQ $2, CX - MOVQ CX, DX +LBB2_102: + CMPQ R8, AX + JE LBB2_105 + ORQ $2, AX -LBB2_109: - MOVB $48, 0(R13)(DX*1) - ADDQ $1, DX - CMPQ BX, DX - JNE LBB2_109 +LBB2_104: + MOVB $48, 0(R13)(AX*1) + INCQ AX + CMPQ BX, AX + JNE LBB2_104 -LBB2_110: +LBB2_105: ADDQ BX, R13 MOVQ R13, SI MOVL R15, DX WORD $0xf8c5; BYTE $0x77 // vzeroupper - LONG $0x0045e3e8; BYTE $0x00 // callq _print_mantissa + LONG $0x00459fe8; BYTE $0x00 // callq _print_mantissa ADDL BX, R15 MOVL R15, BX -LBB2_111: +LBB2_106: MOVL -44(BP), R9 -LBB2_112: +LBB2_107: ADDL R9, BX - JMP LBB2_115 + JMP LBB2_110 -LBB2_113: +LBB2_108: MOVQ $9223372036854775807, CX ANDQ CX, AX MOVB $45, 0(R13) - ADDQ $1, R13 + INCQ R13 MOVL $1, R9 TESTQ AX, AX JNE LBB2_2 -LBB2_114: +LBB2_109: MOVB $48, 0(R13) - ADDL $1, R9 + INCL R9 MOVL R9, BX -LBB2_115: +LBB2_110: MOVL BX, AX ADDQ $40, SP BYTE $0x5b // popq %rbx @@ -1128,10 +1125,10 @@ LBB3_1: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp MOVB $45, 0(DI) - ADDQ $1, DI + INCQ DI NEGQ SI - LONG $0x000045e8; BYTE $0x00 // callq _u64toa - ADDL $1, AX + LONG $0x000044e8; BYTE $0x00 // callq _u64toa + INCL AX BYTE $0x5d // popq %rbp RET @@ -1175,7 +1172,7 @@ _u64toa: ADDQ AX, AX CMPL SI, $1000 JB LBB4_3 - LONG $0xfa0d8d48; WORD $0x009e; BYTE $0x00 // leaq $40698(%rip), %rcx /* _Digits(%rip) */ + LONG $0x740d8d48; WORD $0x009e; BYTE $0x00 // leaq $40564(%rip), %rcx /* _Digits(%rip) */ MOVB 0(DX)(CX*1), CX MOVB CX, 0(DI) MOVL $1, CX @@ -1189,26 +1186,26 @@ LBB4_3: LBB4_4: MOVWLZX DX, DX ORQ $1, DX - LONG $0xd9358d48; WORD $0x009e; BYTE $0x00 // leaq $40665(%rip), %rsi /* _Digits(%rip) */ + LONG $0x53358d48; WORD $0x009e; BYTE $0x00 // leaq $40531(%rip), %rsi /* _Digits(%rip) */ MOVB 0(DX)(SI*1), DX MOVL CX, SI - ADDL $1, CX + INCL CX MOVB DX, 0(DI)(SI*1) LBB4_6: - LONG $0xc7158d48; WORD $0x009e; BYTE $0x00 // leaq $40647(%rip), %rdx /* _Digits(%rip) */ + LONG $0x42158d48; WORD $0x009e; BYTE $0x00 // leaq $40514(%rip), %rdx /* _Digits(%rip) */ MOVB 0(AX)(DX*1), DX MOVL CX, SI - ADDL $1, CX + INCL CX MOVB DX, 0(DI)(SI*1) LBB4_7: MOVWLZX AX, AX ORQ $1, AX - LONG $0xae158d48; WORD $0x009e; BYTE $0x00 // leaq $40622(%rip), %rdx /* _Digits(%rip) */ + LONG $0x2a158d48; WORD $0x009e; BYTE $0x00 // leaq $40490(%rip), %rdx /* _Digits(%rip) */ MOVB 0(AX)(DX*1), AX MOVL CX, DX - ADDL $1, CX + INCL CX MOVB AX, 0(DI)(DX*1) MOVL CX, AX BYTE $0x5d // popq %rbp @@ -1252,7 +1249,7 @@ LBB4_8: ADDQ R11, R11 CMPL SI, $10000000 JB LBB4_11 - LONG $0x16058d48; WORD $0x009e; BYTE $0x00 // leaq $40470(%rip), %rax /* _Digits(%rip) */ + LONG $0x93058d48; WORD $0x009d; BYTE $0x00 // leaq $40339(%rip), %rax /* _Digits(%rip) */ MOVB 0(R10)(AX*1), AX MOVB AX, 0(DI) MOVL $1, CX @@ -1266,39 +1263,39 @@ LBB4_11: LBB4_12: MOVL R10, AX ORQ $1, AX - LONG $0xf1358d48; WORD $0x009d; BYTE $0x00 // leaq $40433(%rip), %rsi /* _Digits(%rip) */ + LONG $0x6e358d48; WORD $0x009d; BYTE $0x00 // leaq $40302(%rip), %rsi /* _Digits(%rip) */ MOVB 0(AX)(SI*1), AX MOVL CX, SI - ADDL $1, CX + INCL CX MOVB AX, 0(DI)(SI*1) LBB4_14: - LONG $0xdf058d48; WORD $0x009d; BYTE $0x00 // leaq $40415(%rip), %rax /* _Digits(%rip) */ + LONG $0x5d058d48; WORD $0x009d; BYTE $0x00 // leaq $40285(%rip), %rax /* _Digits(%rip) */ MOVB 0(R9)(AX*1), AX MOVL CX, SI - ADDL $1, CX + INCL CX MOVB AX, 0(DI)(SI*1) LBB4_15: MOVWLZX R9, AX ORQ $1, AX - LONG $0xc4358d48; WORD $0x009d; BYTE $0x00 // leaq $40388(%rip), %rsi /* _Digits(%rip) */ + LONG $0x43358d48; WORD $0x009d; BYTE $0x00 // leaq $40259(%rip), %rsi /* _Digits(%rip) */ MOVB 0(AX)(SI*1), AX MOVL CX, DX - MOVB AX, 0(DI)(DX*1) + MOVB AX, 0(DX)(DI*1) MOVB 0(R8)(SI*1), AX - MOVB AX, 1(DI)(DX*1) + MOVB AX, 1(DX)(DI*1) MOVWLZX R8, AX ORQ $1, AX MOVB 0(AX)(SI*1), AX - MOVB AX, 2(DI)(DX*1) + MOVB AX, 2(DX)(DI*1) MOVB 0(R11)(SI*1), AX - MOVB AX, 3(DI)(DX*1) + MOVB AX, 3(DX)(DI*1) MOVWLZX R11, AX ORQ $1, AX MOVB 0(AX)(SI*1), AX ADDL $5, CX - MOVB AX, 4(DI)(DX*1) + MOVB AX, 4(DX)(DI*1) MOVL CX, AX BYTE $0x5d // popq %rbp RET @@ -1320,7 +1317,7 @@ LBB4_16: LONG $0xe100c269; WORD $0x05f5 // imull $100000000, %edx, %eax SUBL AX, SI LONG $0xc26ef9c5 // vmovd %edx, %xmm0 - QUAD $0xfffffdc60d6ffac5 // vmovdqu $-570(%rip), %xmm1 /* LCPI4_0(%rip) */ + QUAD $0xfffffdcb0d6ffac5 // vmovdqu $-565(%rip), %xmm1 /* LCPI4_0(%rip) */ LONG $0xd1f4f9c5 // vpmuludq %xmm1, %xmm0, %xmm2 LONG $0xd273e9c5; BYTE $0x2d // vpsrlq $45, %xmm2, %xmm2 MOVL $10000, AX @@ -1331,11 +1328,11 @@ LBB4_16: LONG $0xf073f9c5; BYTE $0x02 // vpsllq $2, %xmm0, %xmm0 LONG $0xc070fbc5; BYTE $0x50 // vpshuflw $80, %xmm0, %xmm0 LONG $0xc070f9c5; BYTE $0x50 // vpshufd $80, %xmm0, %xmm0 - QUAD $0xfffffdc01512fbc5 // vmovddup $-576(%rip), %xmm2 /* LCPI4_1(%rip) */ + QUAD $0xfffffdc51512fbc5 // vmovddup $-571(%rip), %xmm2 /* LCPI4_1(%rip) */ LONG $0xc2e4f9c5 // vpmulhuw %xmm2, %xmm0, %xmm0 - QUAD $0xfffffdbc2512fbc5 // vmovddup $-580(%rip), %xmm4 /* LCPI4_2(%rip) */ + QUAD $0xfffffdc12512fbc5 // vmovddup $-575(%rip), %xmm4 /* LCPI4_2(%rip) */ LONG $0xc4e4f9c5 // vpmulhuw %xmm4, %xmm0, %xmm0 - QUAD $0xfffffd882d6ffac5 // vmovdqu $-632(%rip), %xmm5 /* LCPI4_3(%rip) */ + QUAD $0xfffffd8d2d6ffac5 // vmovdqu $-627(%rip), %xmm5 /* LCPI4_3(%rip) */ LONG $0xf5d5f9c5 // vpmullw %xmm5, %xmm0, %xmm6 LONG $0xf673c9c5; BYTE $0x10 // vpsllq $16, %xmm6, %xmm6 LONG $0xc6f9f9c5 // vpsubw %xmm6, %xmm0, %xmm0 @@ -1354,17 +1351,17 @@ LBB4_16: LONG $0xf273e9c5; BYTE $0x10 // vpsllq $16, %xmm2, %xmm2 LONG $0xcaf9f1c5 // vpsubw %xmm2, %xmm1, %xmm1 LONG $0xc167f9c5 // vpackuswb %xmm1, %xmm0, %xmm0 - QUAD $0xfffffd420dfcf9c5 // vpaddb $-702(%rip), %xmm0, %xmm1 /* LCPI4_4(%rip) */ + QUAD $0xfffffd470dfcf9c5 // vpaddb $-697(%rip), %xmm0, %xmm1 /* LCPI4_4(%rip) */ LONG $0xd2efe9c5 // vpxor %xmm2, %xmm2, %xmm2 LONG $0xc274f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm0 LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax - NOTL AX ORL $32768, AX + XORL $-32769, AX BSFL AX, AX MOVL $16, CX SUBL AX, CX SHLQ $4, AX - LONG $0x3c158d48; WORD $0x009d; BYTE $0x00 // leaq $40252(%rip), %rdx /* _VecShiftShuffles(%rip) */ + LONG $0xb8158d48; WORD $0x009c; BYTE $0x00 // leaq $40120(%rip), %rdx /* _VecShiftShuffles(%rip) */ LONG $0x0071e2c4; WORD $0x1004 // vpshufb (%rax,%rdx), %xmm1, %xmm0 LONG $0x077ffac5 // vmovdqu %xmm0, (%rdi) MOVL CX, AX @@ -1390,7 +1387,7 @@ LBB4_20: CMPL DX, $99 JA LBB4_22 MOVL DX, AX - LONG $0x1f0d8d48; WORD $0x009c; BYTE $0x00 // leaq $39967(%rip), %rcx /* _Digits(%rip) */ + LONG $0x9b0d8d48; WORD $0x009b; BYTE $0x00 // leaq $39835(%rip), %rcx /* _Digits(%rip) */ MOVB 0(CX)(AX*2), DX MOVB 1(CX)(AX*2), AX MOVB DX, 0(DI) @@ -1415,7 +1412,7 @@ LBB4_22: WORD $0xc96b; BYTE $0x64 // imull $100, %ecx, %ecx SUBL CX, AX MOVWLZX AX, AX - LONG $0xce0d8d48; WORD $0x009b; BYTE $0x00 // leaq $39886(%rip), %rcx /* _Digits(%rip) */ + LONG $0x4a0d8d48; WORD $0x009b; BYTE $0x00 // leaq $39754(%rip), %rcx /* _Digits(%rip) */ MOVB 0(CX)(AX*2), DX MOVB 1(CX)(AX*2), AX MOVB DX, 1(DI) @@ -1427,7 +1424,7 @@ LBB4_24: WORD $0xc86b; BYTE $0x64 // imull $100, %eax, %ecx SUBL CX, DX MOVWLZX AX, AX - LONG $0xab058d4c; WORD $0x009b; BYTE $0x00 // leaq $39851(%rip), %r8 /* _Digits(%rip) */ + LONG $0x27058d4c; WORD $0x009b; BYTE $0x00 // leaq $39719(%rip), %r8 /* _Digits(%rip) */ MOVB 0(R8)(AX*2), CX MOVB 1(R8)(AX*2), AX MOVB CX, 0(DI) @@ -1448,7 +1445,7 @@ LBB4_25: MULQ DX SHRQ $26, DX LONG $0xc26ef9c5 // vmovd %edx, %xmm0 - QUAD $0xfffffbe40d6ffac5 // vmovdqu $-1052(%rip), %xmm1 /* LCPI4_0(%rip) */ + QUAD $0xfffffbe60d6ffac5 // vmovdqu $-1050(%rip), %xmm1 /* LCPI4_0(%rip) */ LONG $0xd1f4f9c5 // vpmuludq %xmm1, %xmm0, %xmm2 LONG $0xd273e9c5; BYTE $0x2d // vpsrlq $45, %xmm2, %xmm2 MOVL $10000, AX @@ -1459,11 +1456,11 @@ LBB4_25: LONG $0xf073f9c5; BYTE $0x02 // vpsllq $2, %xmm0, %xmm0 LONG $0xc070fbc5; BYTE $0x50 // vpshuflw $80, %xmm0, %xmm0 LONG $0xc070f9c5; BYTE $0x50 // vpshufd $80, %xmm0, %xmm0 - QUAD $0xfffffbde1512fbc5 // vmovddup $-1058(%rip), %xmm2 /* LCPI4_1(%rip) */ + QUAD $0xfffffbe01512fbc5 // vmovddup $-1056(%rip), %xmm2 /* LCPI4_1(%rip) */ LONG $0xc2e4f9c5 // vpmulhuw %xmm2, %xmm0, %xmm0 - QUAD $0xfffffbda2512fbc5 // vmovddup $-1062(%rip), %xmm4 /* LCPI4_2(%rip) */ + QUAD $0xfffffbdc2512fbc5 // vmovddup $-1060(%rip), %xmm4 /* LCPI4_2(%rip) */ LONG $0xc4e4f9c5 // vpmulhuw %xmm4, %xmm0, %xmm0 - QUAD $0xfffffba62d6ffac5 // vmovdqu $-1114(%rip), %xmm5 /* LCPI4_3(%rip) */ + QUAD $0xfffffba82d6ffac5 // vmovdqu $-1112(%rip), %xmm5 /* LCPI4_3(%rip) */ LONG $0xf5d5f9c5 // vpmullw %xmm5, %xmm0, %xmm6 LONG $0xf673c9c5; BYTE $0x10 // vpsllq $16, %xmm6, %xmm6 LONG $0xc6f9f9c5 // vpsubw %xmm6, %xmm0, %xmm0 @@ -1484,7 +1481,7 @@ LBB4_25: LONG $0xf273e9c5; BYTE $0x10 // vpsllq $16, %xmm2, %xmm2 LONG $0xcaf9f1c5 // vpsubw %xmm2, %xmm1, %xmm1 LONG $0xc167f9c5 // vpackuswb %xmm1, %xmm0, %xmm0 - QUAD $0xfffffb5805fcf9c5 // vpaddb $-1192(%rip), %xmm0, %xmm0 /* LCPI4_4(%rip) */ + QUAD $0xfffffb5a05fcf9c5 // vpaddb $-1190(%rip), %xmm0, %xmm0 /* LCPI4_4(%rip) */ MOVL CX, AX LONG $0x047ffac5; BYTE $0x07 // vmovdqu %xmm0, (%rdi,%rax) ORL $16, CX @@ -1509,344 +1506,322 @@ _quote: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $32, SP + BYTE $0x50 // pushq %rax MOVQ CX, R11 - MOVQ DX, R15 - MOVQ 0(CX), R10 - TESTB $1, R8 + MOVQ DX, R12 MOVQ SI, AX - LONG $0xa40d8d48; WORD $0x009b; BYTE $0x00 // leaq $39844(%rip), %rcx /* __SingleQuoteTab(%rip) */ - LONG $0x9d258d4c; WORD $0x00ab; BYTE $0x00 // leaq $43933(%rip), %r12 /* __DoubleQuoteTab(%rip) */ - LONG $0xe1440f4c // cmoveq %rcx, %r12 + MOVQ 0(CX), R14 + TESTB $1, R8 + LONG $0x230d8d48; WORD $0x009b; BYTE $0x00 // leaq $39715(%rip), %rcx /* __SingleQuoteTab(%rip) */ + LONG $0x1c058d4c; WORD $0x00ab; BYTE $0x00 // leaq $43804(%rip), %r8 /* __DoubleQuoteTab(%rip) */ + LONG $0xc1440f4c // cmoveq %rcx, %r8 LEAQ 0(SI*8), CX - CMPQ R10, CX - JGE LBB5_56 - MOVQ R15, R14 - MOVQ DI, R9 + CMPQ R14, CX + JGE LBB5_51 + MOVQ R12, R15 + MOVQ DI, R10 TESTQ AX, AX - JE LBB5_80 - MOVQ R11, -56(BP) - QUAD $0xffffff71056ffac5 // vmovdqu $-143(%rip), %xmm0 /* LCPI5_0(%rip) */ - QUAD $0xffffff790d6ffac5 // vmovdqu $-135(%rip), %xmm1 /* LCPI5_1(%rip) */ - QUAD $0xffffff81156ffac5 // vmovdqu $-127(%rip), %xmm2 /* LCPI5_2(%rip) */ + JE LBB5_74 + QUAD $0xffffff78056ffac5 // vmovdqu $-136(%rip), %xmm0 /* LCPI5_0(%rip) */ + QUAD $0xffffff800d6ffac5 // vmovdqu $-128(%rip), %xmm1 /* LCPI5_1(%rip) */ + QUAD $0xffffff88156ffac5 // vmovdqu $-120(%rip), %xmm2 /* LCPI5_2(%rip) */ LONG $0xdb76e1c5 // vpcmpeqd %xmm3, %xmm3, %xmm3 - MOVQ DI, CX - MOVQ R15, -48(BP) - MOVQ R15, R14 - MOVQ R12, -64(BP) + MOVQ DI, R10 + MOVQ R12, R15 + MOVQ R12, -48(BP) LBB5_3: - MOVQ CX, R9 + MOVQ R11, DX CMPQ AX, $15 - SETGT BX + SETGT R11 + MOVQ R14, R9 + MOVQ R15, R13 + MOVQ AX, SI MOVQ R10, R12 - MOVQ R14, R15 - MOVQ AX, R11 - MOVQ CX, R13 - CMPQ R10, $16 - JL LBB5_10 + CMPQ R14, $16 + JL LBB5_9 CMPQ AX, $16 - JL LBB5_10 - XORL R15, R15 - MOVQ AX, CX - MOVQ R10, DX + JL LBB5_9 + MOVQ R10, R12 + MOVQ AX, SI + MOVQ R15, R13 + MOVQ R14, BX LBB5_6: - LONG $0x6f7a81c4; WORD $0x3924 // vmovdqu (%r9,%r15), %xmm4 + LONG $0x6f7ac1c4; WORD $0x2424 // vmovdqu (%r12), %xmm4 LONG $0xec64f9c5 // vpcmpgtb %xmm4, %xmm0, %xmm5 LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 LONG $0xfa74d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm7 LONG $0xf6ebc1c5 // vpor %xmm6, %xmm7, %xmm6 - LONG $0x7f7a81c4; WORD $0x3e24 // vmovdqu %xmm4, (%r14,%r15) + LONG $0x7f7ac1c4; WORD $0x0065 // vmovdqu %xmm4, (%r13) LONG $0xe364d9c5 // vpcmpgtb %xmm3, %xmm4, %xmm4 - LONG $0xe4dbd1c5 // vpand %xmm4, %xmm5, %xmm4 + LONG $0xe5dbd9c5 // vpand %xmm5, %xmm4, %xmm4 LONG $0xe4ebc9c5 // vpor %xmm4, %xmm6, %xmm4 - LONG $0xf4d7f9c5 // vpmovmskb %xmm4, %esi - TESTW SI, SI - JNE LBB5_23 - LEAQ -16(CX), R11 - LEAQ -16(DX), R12 - CMPQ CX, $31 - SETGT BX - ADDQ $16, R15 - CMPQ CX, $32 + LONG $0xccd7f9c5 // vpmovmskb %xmm4, %ecx + TESTW CX, CX + JNE LBB5_19 + ADDQ $16, R12 + ADDQ $16, R13 + LEAQ -16(BX), R9 + CMPQ SI, $31 + SETGT R11 + CMPQ SI, $32 + LEAQ -16(SI), SI JL LBB5_9 - MOVQ R11, CX - CMPQ DX, $31 - MOVQ R12, DX + CMPQ BX, $31 + MOVQ R9, BX JG LBB5_6 LBB5_9: - LEAQ 0(R9)(R15*1), R13 - ADDQ R14, R15 - -LBB5_10: - TESTB BX, BX - JE LBB5_14 - LONG $0x6f7ac1c4; WORD $0x0065 // vmovdqu (%r13), %xmm4 + TESTB R11, R11 + JE LBB5_13 + LONG $0x6f7ac1c4; WORD $0x2424 // vmovdqu (%r12), %xmm4 LONG $0xec64f9c5 // vpcmpgtb %xmm4, %xmm0, %xmm5 LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 LONG $0xfa74d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm7 LONG $0xf6ebc1c5 // vpor %xmm6, %xmm7, %xmm6 LONG $0xfb64d9c5 // vpcmpgtb %xmm3, %xmm4, %xmm7 - LONG $0xefdbd1c5 // vpand %xmm7, %xmm5, %xmm5 + LONG $0xeddbc1c5 // vpand %xmm5, %xmm7, %xmm5 LONG $0xedebc9c5 // vpor %xmm5, %xmm6, %xmm5 LONG $0xcdd7f9c5 // vpmovmskb %xmm5, %ecx ORL $65536, CX BSFL CX, R11 LONG $0x7ef9e1c4; BYTE $0xe1 // vmovq %xmm4, %rcx - CMPQ R12, R11 - JGE LBB5_24 - CMPQ R12, $8 - JB LBB5_28 - MOVQ CX, 0(R15) - LEAQ 8(R13), R11 - ADDQ $8, R15 - LEAQ -8(R12), BX - CMPQ BX, $4 - JAE LBB5_29 - JMP LBB5_30 - -LBB5_14: - TESTQ R12, R12 - JLE LBB5_21 - TESTQ R11, R11 - JLE LBB5_21 - MOVQ R9, -72(BP) - XORL R9, R9 - XORL CX, CX - -LBB5_17: - MOVBLZX 0(R13)(R9*1), SI - MOVQ SI, BX - SHLQ $4, BX - LONG $0x2d158d48; WORD $0x009a; BYTE $0x00 // leaq $39469(%rip), %rdx /* __SingleQuoteTab(%rip) */ - CMPQ 0(BX)(DX*1), $0 - JNE LBB5_27 - LEAQ 0(R11)(CX*1), R8 - MOVB SI, 0(R15)(R9*1) - LEAQ -1(CX), BX - CMPQ R8, $2 - JL LBB5_20 - ADDQ R12, CX - ADDQ $1, R9 - CMPQ CX, $1 + CMPQ R9, R11 + JGE LBB5_20 + CMPQ R9, $8 + JB LBB5_23 + MOVQ CX, 0(R13) + LEAQ 8(R12), R11 + ADDQ $8, R13 + LEAQ -8(R9), SI + CMPQ SI, $4 + JAE LBB5_24 + JMP LBB5_25 + +LBB5_13: + TESTQ R9, R9 + MOVQ DX, R11 + JLE LBB5_18 + TESTQ SI, SI + JLE LBB5_18 + +LBB5_15: + MOVBLZX 0(R12), BX MOVQ BX, CX - JG LBB5_17 + SHLQ $4, CX + LONG $0xba158d48; WORD $0x0099; BYTE $0x00 // leaq $39354(%rip), %rdx /* __SingleQuoteTab(%rip) */ + CMPQ 0(CX)(DX*1), $0 + JNE LBB5_22 + INCQ R12 + MOVB BX, 0(R13) + CMPQ SI, $2 + LEAQ -1(SI), SI + JL LBB5_18 + INCQ R13 + CMPQ R9, $1 + LEAQ -1(R9), R9 + JG LBB5_15 + +LBB5_18: + SUBQ R10, R12 + NEGQ SI + SBBQ R9, R9 + XORQ R12, R9 + JMP LBB5_36 + +LBB5_19: + MOVWLZX CX, CX + SUBQ R10, R12 + BSFL CX, R9 + ADDQ R12, R9 + JMP LBB5_35 LBB5_20: - SUBQ BX, R13 - ADDQ BX, R11 - MOVQ -72(BP), R9 + CMPL R11, $8 + JB LBB5_29 + MOVQ CX, 0(R13) + LEAQ 8(R12), BX + ADDQ $8, R13 + LEAQ -8(R11), SI + CMPQ SI, $4 + JAE LBB5_30 + JMP LBB5_31 -LBB5_21: - TESTQ R11, R11 - MOVQ -64(BP), R12 - JE LBB5_26 - NOTQ R13 - ADDQ R9, R13 - JMP LBB5_41 +LBB5_22: + SUBQ R10, R12 + MOVQ R12, R9 + JMP LBB5_36 LBB5_23: - MOVWLZX SI, CX - BSFL CX, R13 - ADDQ R15, R13 - JMP LBB5_40 + MOVQ R12, R11 + MOVQ R9, SI + CMPQ SI, $4 + JB LBB5_25 LBB5_24: - CMPL R11, $8 - JB LBB5_34 - MOVQ CX, 0(R15) - LEAQ 8(R13), R12 - ADDQ $8, R15 - LEAQ -8(R11), BX - CMPQ BX, $4 - JAE LBB5_35 - JMP LBB5_36 + MOVL 0(R11), CX + MOVL CX, 0(R13) + ADDQ $4, R11 + ADDQ $4, R13 + ADDQ $-4, SI + +LBB5_25: + CMPQ SI, $2 + JB LBB5_26 + MOVWLZX 0(R11), CX + MOVW CX, 0(R13) + ADDQ $2, R11 + ADDQ $2, R13 + ADDQ $-2, SI + TESTQ SI, SI + JNE LBB5_27 + JMP LBB5_28 LBB5_26: - SUBQ R9, R13 - JMP LBB5_41 + TESTQ SI, SI + JE LBB5_28 LBB5_27: - MOVQ -72(BP), R9 - SUBQ R9, R13 - SUBQ CX, R13 - JMP LBB5_40 + MOVB 0(R11), CX + MOVB CX, 0(R13) LBB5_28: - MOVQ R13, R11 - MOVQ R12, BX - CMPQ BX, $4 - JB LBB5_30 + SUBQ R10, R9 + ADDQ R12, R9 + NOTQ R9 + JMP LBB5_35 LBB5_29: - MOVL 0(R11), CX - MOVL CX, 0(R15) - ADDQ $4, R11 - ADDQ $4, R15 - ADDQ $-4, BX + MOVQ R12, BX + MOVQ R11, SI + CMPQ SI, $4 + JB LBB5_31 LBB5_30: - CMPQ BX, $2 - JB LBB5_31 - MOVWLZX 0(R11), CX - MOVW CX, 0(R15) - ADDQ $2, R11 - ADDQ $2, R15 - ADDQ $-2, BX - TESTQ BX, BX - JNE LBB5_32 - JMP LBB5_33 + MOVL 0(BX), CX + MOVL CX, 0(R13) + ADDQ $4, BX + ADDQ $4, R13 + ADDQ $-4, SI LBB5_31: - TESTQ BX, BX - JE LBB5_33 + CMPQ SI, $2 + JB LBB5_32 + MOVWLZX 0(BX), CX + MOVW CX, 0(R13) + ADDQ $2, BX + ADDQ $2, R13 + ADDQ $-2, SI + TESTQ SI, SI + JNE LBB5_33 + JMP LBB5_34 LBB5_32: - MOVB 0(R11), CX - MOVB CX, 0(R15) + TESTQ SI, SI + JE LBB5_34 LBB5_33: - NOTQ R12 - ADDQ R9, R12 - SUBQ R13, R12 - MOVQ R12, R13 - JMP LBB5_40 + MOVB 0(BX), CX + MOVB CX, 0(R13) LBB5_34: - MOVQ R13, R12 - MOVQ R11, BX - CMPQ BX, $4 - JB LBB5_36 + SUBQ R10, R12 + ADDQ R11, R12 + MOVQ R12, R9 LBB5_35: - MOVL 0(R12), CX - MOVL CX, 0(R15) - ADDQ $4, R12 - ADDQ $4, R15 - ADDQ $-4, BX + MOVQ DX, R11 LBB5_36: - CMPQ BX, $2 - JB LBB5_37 - MOVWLZX 0(R12), CX - MOVW CX, 0(R15) - ADDQ $2, R12 - ADDQ $2, R15 - ADDQ $-2, BX - TESTQ BX, BX - JNE LBB5_38 - JMP LBB5_39 - -LBB5_37: - TESTQ BX, BX - JE LBB5_39 - -LBB5_38: - MOVB 0(R12), CX - MOVB CX, 0(R15) + TESTQ R9, R9 + MOVQ -48(BP), R12 + MOVQ $12884901889, R13 + JS LBB5_78 + ADDQ R9, R10 + ADDQ R9, R15 + CMPQ AX, R9 + JE LBB5_74 + SUBQ R9, R14 + SUBQ AX, R9 + JMP LBB5_40 LBB5_39: - SUBQ R9, R13 - ADDQ R11, R13 + INCQ R10 + ADDQ AX, R15 + INCQ R9 + JE LBB5_74 LBB5_40: - MOVQ -64(BP), R12 - -LBB5_41: - MOVQ $12884901889, R11 - TESTQ R13, R13 - JS LBB5_83 - ADDQ R13, R14 - CMPQ AX, R13 - JE LBB5_79 - SUBQ R13, R10 - JMP LBB5_45 - -LBB5_44: - ADDQ SI, R14 - ADDQ $1, R13 - CMPQ AX, R13 - JE LBB5_79 - -LBB5_45: - MOVBLZX 0(R9)(R13*1), CX + MOVBLZX 0(R10), CX SHLQ $4, CX - MOVQ 0(R12)(CX*1), DX - TESTL DX, DX - JE LBB5_53 - MOVLQSX DX, SI - SUBQ SI, R10 - JL LBB5_81 - SHLQ $32, DX - LEAQ 0(R12)(CX*1), BX - ADDQ $8, BX - CMPQ DX, R11 - JL LBB5_49 - MOVL 0(BX), DX - MOVL DX, 0(R14) - LEAQ 0(R12)(CX*1), BX - ADDQ $12, BX - LEAQ 4(R14), R8 - LEAQ -4(SI), CX + MOVQ 0(R8)(CX*1), BX + TESTL BX, BX + JE LBB5_48 + MOVLQSX BX, AX + SUBQ AX, R14 + JL LBB5_75 + SHLQ $32, BX + LEAQ 8(R8)(CX*1), SI + CMPQ BX, R13 + JL LBB5_44 + MOVL 0(SI), SI + MOVL SI, 0(R15) + LEAQ 12(R8)(CX*1), SI + LEAQ 4(R15), BX + LEAQ -4(AX), CX CMPQ CX, $2 - JGE LBB5_50 - JMP LBB5_51 + JGE LBB5_45 + JMP LBB5_46 -LBB5_49: - MOVQ R14, R8 - MOVQ SI, CX +LBB5_44: + MOVQ R15, BX + MOVQ AX, CX CMPQ CX, $2 - JL LBB5_51 + JL LBB5_46 -LBB5_50: - MOVWLZX 0(BX), DX - MOVW DX, 0(R8) +LBB5_45: + MOVWLZX 0(SI), DX + MOVW DX, 0(BX) + ADDQ $2, SI ADDQ $2, BX - ADDQ $2, R8 ADDQ $-2, CX -LBB5_51: +LBB5_46: TESTQ CX, CX - JLE LBB5_44 - MOVBLZX 0(BX), CX - MOVB CX, 0(R8) - JMP LBB5_44 - -LBB5_53: - LEAQ 0(R9)(R13*1), CX - SUBQ R13, AX - JNE LBB5_3 - -LBB5_79: - ADDQ R13, R9 - MOVQ -56(BP), R11 - MOVQ -48(BP), R15 - -LBB5_80: - SUBQ R15, R14 - MOVQ R14, 0(R11) - SUBQ DI, R9 - MOVQ R9, AX - JMP LBB5_82 - -LBB5_56: - LONG $0x0b0d8d4c; WORD $0x00b8; BYTE $0x00 // leaq $47115(%rip), %r9 /* __EscTab(%rip) */ - QUAD $0xfffffc07056ffac5 // vmovdqu $-1017(%rip), %xmm0 /* LCPI5_0(%rip) */ - QUAD $0xfffffc0f0d6ffac5 // vmovdqu $-1009(%rip), %xmm1 /* LCPI5_1(%rip) */ - QUAD $0xfffffc17156ffac5 // vmovdqu $-1001(%rip), %xmm2 /* LCPI5_2(%rip) */ + JLE LBB5_39 + MOVBLZX 0(SI), CX + MOVB CX, 0(BX) + JMP LBB5_39 + +LBB5_48: + MOVQ R9, AX + NEGQ AX + TESTQ R9, R9 + JNE LBB5_3 + +LBB5_74: + SUBQ R12, R15 + MOVQ R15, 0(R11) + SUBQ DI, R10 + JMP LBB5_76 + +LBB5_51: + LONG $0xcc0d8d4c; WORD $0x00b7; BYTE $0x00 // leaq $47052(%rip), %r9 /* __EscTab(%rip) */ + QUAD $0xfffffc4c056ffac5 // vmovdqu $-948(%rip), %xmm0 /* LCPI5_0(%rip) */ + QUAD $0xfffffc540d6ffac5 // vmovdqu $-940(%rip), %xmm1 /* LCPI5_1(%rip) */ + QUAD $0xfffffc5c156ffac5 // vmovdqu $-932(%rip), %xmm2 /* LCPI5_2(%rip) */ LONG $0xdb76e1c5 // vpcmpeqd %xmm3, %xmm3, %xmm3 - MOVQ R15, BX + MOVQ R12, BX MOVQ AX, R10 -LBB5_57: +LBB5_52: CMPQ R10, $16 - JL LBB5_62 + JL LBB5_57 MOVL $16, CX XORL SI, SI -LBB5_59: +LBB5_54: LONG $0x246ffac5; BYTE $0x37 // vmovdqu (%rdi,%rsi), %xmm4 LONG $0xec64f9c5 // vpcmpgtb %xmm4, %xmm0, %xmm5 LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 @@ -1854,24 +1829,23 @@ LBB5_59: LONG $0xf6ebc1c5 // vpor %xmm6, %xmm7, %xmm6 LONG $0x247ffac5; BYTE $0x33 // vmovdqu %xmm4, (%rbx,%rsi) LONG $0xe364d9c5 // vpcmpgtb %xmm3, %xmm4, %xmm4 - LONG $0xe4dbd1c5 // vpand %xmm4, %xmm5, %xmm4 + LONG $0xe5dbd9c5 // vpand %xmm5, %xmm4, %xmm4 LONG $0xe4ebc9c5 // vpor %xmm4, %xmm6, %xmm4 LONG $0xd4d7f9c5 // vpmovmskb %xmm4, %edx TESTW DX, DX - JNE LBB5_72 + JNE LBB5_67 ADDQ $16, SI - LEAQ 0(R10)(CX*1), DX - ADDQ $-16, DX + LEAQ -16(R10)(CX*1), DX ADDQ $-16, CX CMPQ DX, $31 - JG LBB5_59 + JG LBB5_54 ADDQ SI, DI SUBQ SI, R10 ADDQ SI, BX -LBB5_62: +LBB5_57: CMPQ R10, $8 - JL LBB5_66 + JL LBB5_61 MOVBLZX 0(DI), CX MOVBLZX 0(CX)(R9*1), CX MOVBLZX 1(DI), DX @@ -1889,7 +1863,7 @@ LBB5_62: MOVQ 0(DI), DX MOVQ DX, 0(BX) TESTB CX, CX - JNE LBB5_76 + JNE LBB5_71 MOVBLZX 4(DI), CX MOVBLZX 0(CX)(R9*1), CX MOVBLZX 5(DI), DX @@ -1905,14 +1879,14 @@ LBB5_62: ORL SI, CX ORL DX, CX TESTB CX, CX - JNE LBB5_77 + JNE LBB5_72 ADDQ $8, BX ADDQ $8, DI ADDQ $-8, R10 -LBB5_66: +LBB5_61: CMPQ R10, $4 - JL LBB5_69 + JL LBB5_64 MOVBLZX 0(DI), CX MOVBLZX 0(CX)(R9*1), CX MOVBLZX 1(DI), DX @@ -1930,29 +1904,28 @@ LBB5_66: MOVL 0(DI), DX MOVL DX, 0(BX) TESTB CX, CX - JNE LBB5_76 + JNE LBB5_71 ADDQ $4, BX ADDQ $4, DI ADDQ $-4, R10 -LBB5_69: +LBB5_64: TESTQ R10, R10 - JLE LBB5_78 + JLE LBB5_73 -LBB5_70: +LBB5_65: MOVBLZX 0(DI), CX CMPB 0(CX)(R9*1), $0 - JNE LBB5_73 - ADDQ $1, DI + JNE LBB5_68 + INCQ DI MOVB CX, 0(BX) - ADDQ $1, BX - LEAQ -1(R10), CX + INCQ BX CMPQ R10, $1 - MOVQ CX, R10 - JG LBB5_70 - JMP LBB5_78 + LEAQ -1(R10), R10 + JG LBB5_65 + JMP LBB5_73 -LBB5_72: +LBB5_67: MOVWLZX DX, CX BSFL CX, CX ADDQ CX, DI @@ -1962,59 +1935,57 @@ LBB5_72: ADDQ CX, BX ADDQ SI, BX -LBB5_73: +LBB5_68: MOVB 0(DI), CX -LBB5_74: +LBB5_69: MOVQ BX, DX MOVBLZX CX, CX SHLQ $4, CX - MOVLQSX 0(R12)(CX*1), BX - MOVQ 8(R12)(CX*1), CX + MOVLQSX 0(R8)(CX*1), BX + MOVQ 8(R8)(CX*1), CX MOVQ CX, 0(DX) ADDQ DX, BX CMPQ R10, $2 - JL LBB5_78 - ADDQ $-1, R10 + JL LBB5_73 + DECQ R10 MOVBLZX 1(DI), CX - ADDQ $1, DI + INCQ DI CMPB 0(CX)(R9*1), $0 - JNE LBB5_74 - JMP LBB5_57 + JNE LBB5_69 + JMP LBB5_52 -LBB5_76: +LBB5_71: BSFL CX, CX ADDQ CX, DI SUBQ CX, R10 ADDQ CX, BX - JMP LBB5_73 + JMP LBB5_68 -LBB5_77: +LBB5_72: BSFL CX, CX LEAQ 4(CX), DX - ADDQ CX, DI - ADDQ $4, DI + LEAQ 4(DI)(CX*1), DI SUBQ DX, R10 - ADDQ CX, BX - ADDQ $4, BX - JMP LBB5_73 + LEAQ 4(BX)(CX*1), BX + JMP LBB5_68 -LBB5_78: - SUBQ R15, BX +LBB5_73: + SUBQ R12, BX MOVQ BX, 0(R11) - JMP LBB5_82 + JMP LBB5_77 -LBB5_81: - SUBQ -48(BP), R14 - MOVQ -56(BP), AX - MOVQ R14, 0(AX) - SUBQ R9, DI - NOTQ R13 - ADDQ DI, R13 - MOVQ R13, AX +LBB5_75: + SUBQ R12, R15 + MOVQ R15, 0(R11) + NOTQ R10 + ADDQ DI, R10 + +LBB5_76: + MOVQ R10, AX -LBB5_82: - ADDQ $32, SP +LBB5_77: + ADDQ $8, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -2023,17 +1994,15 @@ LBB5_82: BYTE $0x5d // popq %rbp RET -LBB5_83: - SUBQ -48(BP), R14 - MOVQ R13, AX - NOTQ AX - ADDQ R14, AX - MOVQ -56(BP), CX - MOVQ AX, 0(CX) - SUBQ R9, DI - ADDQ R13, DI - MOVQ DI, AX - JMP LBB5_82 +LBB5_78: + SUBQ R12, R15 + NOTQ R9 + ADDQ R9, R15 + MOVQ R15, 0(R11) + SUBQ DI, R10 + ADDQ R9, R10 + NOTQ R10 + JMP LBB5_76 LCPI6_0: QUAD $0x5c5c5c5c5c5c5c5c; QUAD $0x5c5c5c5c5c5c5c5c // .space 16, '\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' @@ -2046,326 +2015,308 @@ _unquote: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $32, SP + SUBQ $40, SP TESTQ SI, SI - JE LBB6_1 - MOVQ CX, -48(BP) + JE LBB6_82 + MOVQ SI, R11 + MOVQ CX, -56(BP) + MOVQ R8, AX + MOVQ R8, -72(BP) MOVL R8, R10 ANDL $1, R10 - LONG $0x7e0d8d48; WORD $0x00b6; BYTE $0x00 // leaq $46718(%rip), %rcx /* __UnquoteTab(%rip) */ - QUAD $0xffffffbc056ffac5 // vmovdqu $-68(%rip), %xmm0 /* LCPI6_0(%rip) */ + LONG $0x53058d4c; WORD $0x00b6; BYTE $0x00 // leaq $46675(%rip), %r8 /* __UnquoteTab(%rip) */ + QUAD $0xffffffb2056ffac5 // vmovdqu $-78(%rip), %xmm0 /* LCPI6_0(%rip) */ MOVQ DI, R9 MOVQ SI, R13 MOVQ DX, AX -LBB6_3: +LBB6_2: CMPB 0(R9), $92 - JNE LBB6_5 - XORL R14, R14 - JMP LBB6_17 + JNE LBB6_4 + XORL SI, SI + JMP LBB6_13 -LBB6_5: +LBB6_4: MOVQ R13, R15 - MOVQ AX, R12 + MOVQ AX, SI MOVQ R9, R14 CMPQ R13, $16 - JL LBB6_11 - XORL R12, R12 - MOVQ R13, R14 + JL LBB6_7 -LBB6_7: - LONG $0x6f7a81c4; WORD $0x210c // vmovdqu (%r9,%r12), %xmm1 - LONG $0x7f7aa1c4; WORD $0x200c // vmovdqu %xmm1, (%rax,%r12) - LONG $0xc874f1c5 // vpcmpeqb %xmm0, %xmm1, %xmm1 - LONG $0xd9d7f9c5 // vpmovmskb %xmm1, %ebx +LBB6_5: + LONG $0x6f7ac1c4; BYTE $0x0e // vmovdqu (%r14), %xmm1 + LONG $0x0e7ffac5 // vmovdqu %xmm1, (%rsi) + LONG $0xc874f1c5 // vpcmpeqb %xmm0, %xmm1, %xmm1 + LONG $0xd9d7f9c5 // vpmovmskb %xmm1, %ebx TESTW BX, BX - JNE LBB6_8 - LEAQ -16(R14), R15 - ADDQ $16, R12 - CMPQ R14, $31 - MOVQ R15, R14 - JG LBB6_7 - LEAQ 0(R9)(R12*1), R14 - ADDQ AX, R12 + JNE LBB6_12 + ADDQ $16, R14 + ADDQ $16, SI + CMPQ R15, $31 + LEAQ -16(R15), R15 + JG LBB6_5 -LBB6_11: +LBB6_7: TESTQ R15, R15 - JE LBB6_115 + JE LBB6_83 XORL BX, BX -LBB6_13: - MOVBLZX 0(R14)(BX*1), R11 - CMPB R11, $92 - JE LBB6_15 - MOVB R11, 0(R12)(BX*1) - ADDQ $1, BX +LBB6_9: + MOVBLZX 0(R14)(BX*1), CX + CMPB CX, $92 + JE LBB6_11 + MOVB CX, 0(SI)(BX*1) + INCQ BX CMPQ R15, BX - JNE LBB6_13 - JMP LBB6_115 + JNE LBB6_9 + JMP LBB6_83 -LBB6_15: - SUBQ R9, R14 +LBB6_11: ADDQ BX, R14 - CMPQ R14, $-1 - JNE LBB6_17 - JMP LBB6_115 + SUBQ R9, R14 + MOVQ R14, SI + CMPQ SI, $-1 + JNE LBB6_13 + JMP LBB6_83 -LBB6_8: - MOVWLZX BX, BX - BSFQ BX, R14 - ADDQ R12, R14 - CMPQ R14, $-1 - JE LBB6_115 - -LBB6_17: - LEAQ 2(R14), BX - SUBQ BX, R13 - JS LBB6_18 - ADDQ R14, R9 - ADDQ $2, R9 +LBB6_12: + MOVWLZX BX, CX + SUBQ R9, R14 + BSFQ CX, SI + ADDQ R14, SI + CMPQ SI, $-1 + JE LBB6_83 + +LBB6_13: + LEAQ 2(SI), CX + SUBQ CX, R13 + JS LBB6_94 + LEAQ 2(R9)(SI*1), R9 TESTQ R10, R10 - JNE LBB6_20 - -LBB6_31: - ADDQ R14, AX - MOVBLZX -1(R9), BX - MOVB 0(BX)(CX*1), BX - CMPB BX, $-1 - JE LBB6_35 - TESTB BX, BX - JE LBB6_33 - MOVB BX, 0(AX) - ADDQ $1, AX - TESTQ R13, R13 - JNE LBB6_3 - JMP LBB6_114 - -LBB6_35: + JNE LBB6_58 + +LBB6_15: + ADDQ SI, AX + MOVBLZX -1(R9), CX + MOVB 0(CX)(R8*1), CX + CMPB CX, $-1 + JE LBB6_18 + TESTB CX, CX + JE LBB6_95 + MOVB CX, 0(AX) + INCQ AX + JMP LBB6_57 + +LBB6_18: CMPQ R13, $3 - JLE LBB6_18 - MOVL 0(R9), R14 - MOVL R14, R15 - NOTL R15 - LEAL -808464432(R14), BX - ANDL $-2139062144, R15 - TESTL BX, R15 - JNE LBB6_40 - LEAL 421075225(R14), BX - ORL R14, BX + JLE LBB6_94 + MOVL 0(R9), SI + MOVL SI, CX + NOTL CX + LEAL -808464432(SI), BX + ANDL $-2139062144, CX + TESTL BX, CX + JNE LBB6_85 + LEAL 421075225(SI), BX + ORL SI, BX TESTL $-2139062144, BX - JNE LBB6_40 - MOVQ CX, R12 - MOVL R14, BX + JNE LBB6_85 + MOVL SI, BX ANDL $2139062143, BX - MOVL $-1061109568, CX - SUBL BX, CX - LEAL 1179010630(BX), R11 - ANDL R15, CX - TESTL R11, CX - JNE LBB6_40 - MOVL $-522133280, CX - SUBL BX, CX + MOVL $-1061109568, R14 + SUBL BX, R14 + LEAL 1179010630(BX), R15 + ANDL CX, R14 + TESTL R15, R14 + JNE LBB6_85 + MOVL $-522133280, R14 + SUBL BX, R14 ADDL $960051513, BX - ANDL CX, R15 - TESTL BX, R15 - JNE LBB6_40 - BSWAPL R14 - MOVL R14, CX + ANDL R14, CX + TESTL BX, CX + JNE LBB6_85 + BSWAPL SI + MOVL SI, CX SHRL $4, CX NOTL CX ANDL $16843009, CX LEAL 0(CX)(CX*8), CX - ANDL $252645135, R14 - ADDL CX, R14 - MOVL R14, CX + ANDL $252645135, SI + ADDL CX, SI + MOVL SI, CX SHRL $4, CX - ORL R14, CX + ORL SI, CX MOVBLZX CX, R15 SHRL $8, CX ANDL $65280, CX ORL CX, R15 LEAQ -4(R13), R14 CMPL R15, $128 - JB LBB6_50 - XORL R11, R11 + JB LBB6_66 + XORL R12, R12 TESTQ R10, R10 - JNE LBB6_52 + JE LBB6_40 -LBB6_54: +LBB6_25: CMPL R15, $2048 - JB LBB6_53 + JB LBB6_68 MOVL R15, CX ANDL $-2048, CX CMPL CX, $55296 - JNE LBB6_69 + JNE LBB6_54 + TESTQ R14, R14 + JLE LBB6_72 + CMPB 4(R9)(R12*1), $92 + JNE LBB6_73 CMPL R15, $56319 - JA LBB6_84 - CMPQ R14, $6 - JL LBB6_84 - CMPB 4(R9)(R11*1), $92 - JNE LBB6_84 - CMPB 5(R9)(R11*1), $117 - JNE LBB6_84 - MOVL 6(R9)(R11*1), CX - MOVL CX, BX - NOTL BX - MOVQ CX, -64(BP) - ADDL $-808464432, CX - ANDL $-2139062144, BX - MOVL BX, -68(BP) - TESTL CX, BX - JNE LBB6_117 - MOVQ -64(BP), BX - LEAL 421075225(BX), CX - ORL BX, CX - TESTL $-2139062144, CX - JNE LBB6_117 - MOVQ -64(BP), BX + JA LBB6_70 + CMPQ R14, $7 + JL LBB6_70 + CMPB 5(R9)(R12*1), $92 + JNE LBB6_70 + CMPB 6(R9)(R12*1), $117 + JNE LBB6_70 + MOVL 7(R9)(R12*1), SI + MOVL SI, CX + NOTL CX + LEAL -808464432(SI), BX + ANDL $-2139062144, CX + TESTL BX, CX + JNE LBB6_99 + LEAL 421075225(SI), BX + ORL SI, BX + TESTL $-2139062144, BX + JNE LBB6_99 + MOVL SI, BX ANDL $2139062143, BX - MOVL $-1061109568, CX - SUBL BX, CX - MOVL CX, -72(BP) - LEAL 1179010630(BX), CX - MOVL CX, -52(BP) - MOVL -72(BP), CX - ANDL -68(BP), CX - TESTL CX, -52(BP) - JNE LBB6_117 - MOVL $-522133280, CX - SUBL BX, CX - MOVL CX, -52(BP) + MOVL $-1061109568, -44(BP) + SUBL BX, -44(BP) + MOVQ SI, -80(BP) + LEAL 1179010630(BX), SI + MOVL SI, -60(BP) + ANDL CX, -44(BP) + MOVL -60(BP), SI + TESTL SI, -44(BP) + MOVQ -80(BP), SI + JNE LBB6_99 + MOVL $-522133280, -44(BP) + SUBL BX, -44(BP) ADDL $960051513, BX - MOVL -68(BP), CX - ANDL -52(BP), CX + ANDL -44(BP), CX TESTL BX, CX - JNE LBB6_117 - MOVQ -64(BP), BX - BSWAPL BX - MOVL BX, CX + JNE LBB6_99 + BSWAPL SI + MOVL SI, CX SHRL $4, CX NOTL CX ANDL $16843009, CX LEAL 0(CX)(CX*8), CX - ANDL $252645135, BX - ADDL CX, BX - MOVL BX, CX + ANDL $252645135, SI + ADDL CX, SI + MOVL SI, CX SHRL $4, CX - ORL BX, CX - MOVL CX, BX - SHRL $8, BX - ANDL $65280, BX - MOVL BX, -64(BP) + ORL SI, CX + MOVL CX, SI + SHRL $8, SI + ANDL $65280, SI MOVBLZX CX, BX - ADDL -64(BP), BX + ORL SI, BX ANDL $16515072, CX CMPL CX, $14417920 - JE LBB6_109 - TESTB $2, R8 - JE LBB6_118 - ADDQ $-6, R14 + JE LBB6_77 + TESTB $2, -72(BP) + JE LBB6_114 + ADDQ $-7, R14 MOVW $-16401, 0(AX) MOVB $-67, 2(AX) ADDQ $3, AX - ADDQ $6, R11 + ADDQ $7, R12 MOVL BX, R15 CMPL BX, $127 - JA LBB6_54 - JMP LBB6_67 + JA LBB6_25 + JMP LBB6_53 -LBB6_52: +LBB6_40: CMPL R15, $2048 - JB LBB6_53 + JB LBB6_68 MOVL R15, CX ANDL $-2048, CX CMPL CX, $55296 - JNE LBB6_69 - TESTQ R14, R14 - JLE LBB6_72 - CMPB 4(R9)(R11*1), $92 - JNE LBB6_80 + JNE LBB6_54 CMPL R15, $56319 - JA LBB6_79 - CMPQ R14, $7 - JL LBB6_79 - CMPB 5(R9)(R11*1), $92 - JNE LBB6_79 - CMPB 6(R9)(R11*1), $117 - JNE LBB6_79 - MOVL 7(R9)(R11*1), CX - MOVL CX, BX - NOTL BX - MOVQ CX, -64(BP) - ADDL $-808464432, CX - ANDL $-2139062144, BX - MOVL BX, -68(BP) - TESTL CX, BX - JNE LBB6_94 - MOVQ -64(BP), BX - LEAL 421075225(BX), CX - ORL BX, CX - TESTL $-2139062144, CX - JNE LBB6_94 - MOVQ -64(BP), BX + JA LBB6_69 + CMPQ R14, $6 + JL LBB6_69 + CMPB 4(R9)(R12*1), $92 + JNE LBB6_69 + CMPB 5(R9)(R12*1), $117 + JNE LBB6_69 + MOVL 6(R9)(R12*1), SI + MOVL SI, CX + NOTL CX + LEAL -808464432(SI), BX + ANDL $-2139062144, CX + TESTL BX, CX + JNE LBB6_98 + LEAL 421075225(SI), BX + ORL SI, BX + TESTL $-2139062144, BX + JNE LBB6_98 + MOVL SI, BX ANDL $2139062143, BX - MOVL $-1061109568, CX - SUBL BX, CX - MOVL CX, -72(BP) - LEAL 1179010630(BX), CX - MOVL CX, -52(BP) - MOVL -72(BP), CX - ANDL -68(BP), CX - TESTL CX, -52(BP) - JNE LBB6_94 - MOVL $-522133280, CX - SUBL BX, CX - MOVL CX, -52(BP) + MOVL $-1061109568, -44(BP) + SUBL BX, -44(BP) + MOVQ SI, -80(BP) + LEAL 1179010630(BX), SI + MOVL SI, -60(BP) + ANDL CX, -44(BP) + MOVL -60(BP), SI + TESTL SI, -44(BP) + MOVQ -80(BP), SI + JNE LBB6_98 + MOVL $-522133280, -44(BP) + SUBL BX, -44(BP) ADDL $960051513, BX - MOVL -68(BP), CX - ANDL -52(BP), CX + ANDL -44(BP), CX TESTL BX, CX - JNE LBB6_94 - MOVQ -64(BP), BX - BSWAPL BX - MOVL BX, CX + JNE LBB6_98 + BSWAPL SI + MOVL SI, CX SHRL $4, CX NOTL CX ANDL $16843009, CX LEAL 0(CX)(CX*8), CX - ANDL $252645135, BX - ADDL CX, BX - MOVL BX, CX + ANDL $252645135, SI + ADDL CX, SI + MOVL SI, CX SHRL $4, CX - ORL BX, CX - MOVL CX, BX - SHRL $8, BX - ANDL $65280, BX - MOVL BX, -64(BP) + ORL SI, CX + MOVL CX, SI + SHRL $8, SI + ANDL $65280, SI MOVBLZX CX, BX - ADDL -64(BP), BX + ORL SI, BX ANDL $16515072, CX CMPL CX, $14417920 - JE LBB6_110 - TESTB $2, R8 - JE LBB6_106 - ADDQ $-7, R14 + JE LBB6_76 + TESTB $2, -72(BP) + JE LBB6_113 + ADDQ $-6, R14 MOVW $-16401, 0(AX) MOVB $-67, 2(AX) ADDQ $3, AX - ADDQ $7, R11 - MOVQ BX, CX - MOVL CX, R15 - CMPL CX, $128 - JAE LBB6_52 + ADDQ $6, R12 + MOVL BX, R15 + CMPL BX, $128 + JAE LBB6_40 -LBB6_67: - ADDQ R11, R9 - ADDQ $4, R9 +LBB6_53: + LEAQ 4(R9)(R12*1), R9 MOVL BX, R15 - JMP LBB6_68 + JMP LBB6_67 -LBB6_69: - ADDQ R11, R9 - ADDQ $4, R9 +LBB6_54: + LEAQ 4(R9)(R12*1), R9 MOVL R15, CX SHRL $12, CX ORB $-32, CX @@ -2379,146 +2330,147 @@ LBB6_69: ORB $-128, R15 MOVB R15, 2(AX) -LBB6_87: - ADDQ $3, AX - MOVQ R14, R13 - MOVQ R12, CX +LBB6_55: + ADDQ $3, AX + +LBB6_56: + MOVQ R14, R13 + +LBB6_57: TESTQ R13, R13 - JNE LBB6_3 - JMP LBB6_114 + JNE LBB6_2 + JMP LBB6_96 -LBB6_20: +LBB6_58: TESTL R13, R13 - JE LBB6_18 + JE LBB6_94 CMPB -1(R9), $92 - JNE LBB6_22 + JNE LBB6_97 CMPB 0(R9), $92 - JNE LBB6_30 + JNE LBB6_65 CMPL R13, $1 - JLE LBB6_18 - MOVB 1(R9), BX - CMPB BX, $34 - JE LBB6_29 - CMPB BX, $92 - JNE LBB6_28 - -LBB6_29: - ADDQ $1, R9 - ADDQ $-1, R13 - -LBB6_30: - ADDQ $1, R9 - ADDQ $-1, R13 - JMP LBB6_31 - -LBB6_50: + JLE LBB6_94 + MOVB 1(R9), CX + CMPB CX, $34 + JE LBB6_64 + CMPB CX, $92 + JNE LBB6_109 + +LBB6_64: + INCQ R9 + DECQ R13 + +LBB6_65: + INCQ R9 + DECQ R13 + JMP LBB6_15 + +LBB6_66: ADDQ $4, R9 -LBB6_68: - MOVQ R12, CX - MOVB R15, 0(AX) - ADDQ $1, AX - MOVQ R14, R13 - TESTQ R13, R13 - JNE LBB6_3 - JMP LBB6_114 +LBB6_67: + MOVB R15, 0(AX) + INCQ AX + JMP LBB6_56 -LBB6_53: - ADDQ R11, R9 - ADDQ $4, R9 - MOVL R15, CX - SHRL $6, CX - ORB $-64, CX - MOVB CX, 0(AX) - ANDB $63, R15 - ORB $-128, R15 - MOVB R15, 1(AX) - ADDQ $2, AX - MOVQ R14, R13 - MOVQ R12, CX - TESTQ R13, R13 - JNE LBB6_3 - JMP LBB6_114 +LBB6_68: + LEAQ 4(R9)(R12*1), R9 + MOVL R15, CX + SHRL $6, CX + ORB $-64, CX + MOVB CX, 0(AX) + ANDB $63, R15 + ORB $-128, R15 + MOVB R15, 1(AX) + ADDQ $2, AX + JMP LBB6_56 -LBB6_84: - ADDQ R11, R9 - ADDQ $4, R9 - JMP LBB6_85 +LBB6_69: + LEAQ 4(R9)(R12*1), R9 + JMP LBB6_71 -LBB6_79: - ADDQ R11, R9 - ADDQ $5, R9 - SUBQ R11, R13 +LBB6_70: + LEAQ 5(R9)(R12*1), R9 + SUBQ R12, R13 ADDQ $-5, R13 MOVQ R13, R14 -LBB6_85: - TESTB $2, R8 - JE LBB6_89 +LBB6_71: + TESTB $2, -72(BP) + JNE LBB6_75 + JMP LBB6_116 -LBB6_86: - MOVW $-16401, 0(AX) - MOVB $-67, 2(AX) - JMP LBB6_87 +LBB6_72: + TESTB $2, -72(BP) + JNE LBB6_74 + JMP LBB6_94 -LBB6_80: - TESTB $2, R8 - JE LBB6_82 - ADDQ R11, R9 - ADDQ $4, R9 - JMP LBB6_86 +LBB6_73: + TESTB $2, -72(BP) + JE LBB6_117 -LBB6_109: - ADDQ R11, R9 - ADDQ $10, R9 - SUBQ R11, R13 +LBB6_74: + LEAQ 4(R9)(R12*1), R9 + +LBB6_75: + MOVW $-16401, 0(AX) + MOVB $-67, 2(AX) + JMP LBB6_55 + +LBB6_76: + LEAQ 10(R9)(R12*1), R9 + SUBQ R12, R13 ADDQ $-10, R13 - JMP LBB6_111 + JMP LBB6_78 -LBB6_110: - ADDQ R11, R9 - ADDQ $11, R9 - SUBQ R11, R13 +LBB6_77: + LEAQ 11(R9)(R12*1), R9 + SUBQ R12, R13 ADDQ $-11, R13 -LBB6_111: +LBB6_78: SHLL $10, R15 - MOVL BX, R14 - ADDL R15, R14 - LEAL 0(BX)(R15*1), R11 - ADDL $-56613888, R11 - MOVL R11, BX - SHRL $18, BX - ORB $-16, BX - MOVB BX, 0(AX) - MOVL R11, CX - SHRL $12, CX - ANDB $63, CX - ORB $-128, CX - MOVB CX, 1(AX) - SHRL $6, R11 - ANDB $63, R11 - ORB $-128, R11 - MOVB R11, 2(AX) - ANDB $63, R14 - ORB $-128, R14 - MOVB R14, 3(AX) - ADDQ $4, AX - MOVQ R12, CX - TESTQ R13, R13 - JNE LBB6_3 - JMP LBB6_114 + LEAL -56613888(R15)(BX*1), CX + CMPL CX, $1114112 + JB LBB6_81 + TESTB $2, -72(BP) + JE LBB6_111 + MOVW $-16401, 0(AX) + MOVB $-67, 2(AX) + ADDQ $3, AX + JMP LBB6_57 + +LBB6_81: + MOVL CX, SI + SHRL $18, SI + ORB $-16, SI + MOVB SI, 0(AX) + MOVL CX, SI + SHRL $12, SI + ANDB $63, SI + ORB $-128, SI + MOVB SI, 1(AX) + MOVL CX, SI + SHRL $6, SI + ANDB $63, SI + ORB $-128, SI + MOVB SI, 2(AX) + ANDB $63, CX + ORB $-128, CX + MOVB CX, 3(AX) + ADDQ $4, AX + JMP LBB6_57 -LBB6_1: +LBB6_82: XORL R13, R13 MOVQ DX, AX -LBB6_115: +LBB6_83: ADDQ R13, AX SUBQ DX, AX -LBB6_116: - ADDQ $32, SP +LBB6_84: + ADDQ $40, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -2527,204 +2479,197 @@ LBB6_116: BYTE $0x5d // popq %rbp RET -LBB6_40: +LBB6_85: MOVQ R9, DX SUBQ DI, DX - MOVQ -48(BP), DI + MOVQ -56(BP), DI MOVQ DX, 0(DI) - MOVB 0(R9), SI - LEAL -48(SI), CX + MOVB 0(R9), CX + LEAL -48(CX), SI MOVQ $-2, AX - CMPB CX, $10 - JB LBB6_42 - ANDB $-33, SI - ADDB $-65, SI - CMPB SI, $5 - JA LBB6_116 + CMPB SI, $10 + JB LBB6_87 + ANDB $-33, CX + ADDB $-65, CX + CMPB CX, $5 + JA LBB6_84 -LBB6_42: +LBB6_87: LEAQ 1(DX), CX MOVQ CX, 0(DI) - MOVB 1(R9), SI - LEAL -48(SI), CX - CMPB CX, $10 - JB LBB6_44 - ANDB $-33, SI - ADDB $-65, SI - CMPB SI, $5 - JA LBB6_116 + MOVB 1(R9), CX + LEAL -48(CX), SI + CMPB SI, $10 + JB LBB6_89 + ANDB $-33, CX + ADDB $-65, CX + CMPB CX, $5 + JA LBB6_84 -LBB6_44: +LBB6_89: LEAQ 2(DX), CX MOVQ CX, 0(DI) - MOVB 2(R9), SI - LEAL -48(SI), CX - CMPB CX, $10 - JB LBB6_46 - ANDB $-33, SI - ADDB $-65, SI - CMPB SI, $5 - JA LBB6_116 - -LBB6_46: + MOVB 2(R9), CX + LEAL -48(CX), SI + CMPB SI, $10 + JB LBB6_91 + ANDB $-33, CX + ADDB $-65, CX + CMPB CX, $5 + JA LBB6_84 + +LBB6_91: LEAQ 3(DX), CX MOVQ CX, 0(DI) - MOVB 3(R9), SI - LEAL -48(SI), CX - CMPB CX, $10 - JB LBB6_48 - ANDB $-33, SI - ADDB $-65, SI - CMPB SI, $5 - JA LBB6_116 - -LBB6_48: + MOVB 3(R9), CX + LEAL -48(CX), SI + CMPB SI, $10 + JB LBB6_93 + ANDB $-33, CX + ADDB $-65, CX + CMPB CX, $5 + JA LBB6_84 + +LBB6_93: ADDQ $4, DX MOVQ DX, 0(DI) - JMP LBB6_116 + JMP LBB6_84 -LBB6_33: +LBB6_94: + MOVQ -56(BP), AX + MOVQ R11, 0(AX) + MOVQ $-1, AX + JMP LBB6_84 + +LBB6_95: NOTQ DI ADDQ DI, R9 - MOVQ -48(BP), AX + MOVQ -56(BP), AX MOVQ R9, 0(AX) MOVQ $-3, AX - JMP LBB6_116 + JMP LBB6_84 + +LBB6_96: + XORL R13, R13 + JMP LBB6_83 -LBB6_22: +LBB6_97: NOTQ DI ADDQ DI, R9 + JMP LBB6_110 -LBB6_23: - MOVQ -48(BP), AX - MOVQ R9, 0(AX) - MOVQ $-2, AX - JMP LBB6_116 - -LBB6_117: - LEAQ 0(R9)(R11*1), SI - ADDQ $4, SI - JMP LBB6_95 +LBB6_98: + LEAQ 4(R9)(R12*1), SI + JMP LBB6_100 -LBB6_94: - LEAQ 0(R9)(R11*1), SI - ADDQ $5, SI +LBB6_99: + LEAQ 5(R9)(R12*1), SI -LBB6_95: +LBB6_100: MOVQ SI, DX SUBQ DI, DX ADDQ $2, DX - MOVQ -48(BP), AX + MOVQ -56(BP), AX MOVQ DX, 0(AX) - MOVB 2(SI), DI - LEAL -48(DI), CX + MOVB 2(SI), CX + LEAL -48(CX), DI MOVQ $-2, AX - CMPB CX, $10 - JB LBB6_97 - ANDB $-33, DI - ADDB $-65, DI - CMPB DI, $5 - JA LBB6_116 + CMPB DI, $10 + JB LBB6_102 + ANDB $-33, CX + ADDB $-65, CX + CMPB CX, $5 + JA LBB6_84 -LBB6_97: +LBB6_102: LEAQ 1(DX), CX - MOVQ -48(BP), DI + MOVQ -56(BP), DI MOVQ CX, 0(DI) - MOVB 3(SI), DI - LEAL -48(DI), CX - CMPB CX, $10 - JB LBB6_99 - ANDB $-33, DI - ADDB $-65, DI - CMPB DI, $5 - JA LBB6_116 + MOVB 3(SI), CX + LEAL -48(CX), DI + CMPB DI, $10 + JB LBB6_104 + ANDB $-33, CX + ADDB $-65, CX + CMPB CX, $5 + JA LBB6_84 -LBB6_99: +LBB6_104: LEAQ 2(DX), CX - MOVQ -48(BP), DI + MOVQ -56(BP), DI MOVQ CX, 0(DI) - MOVB 4(SI), DI - LEAL -48(DI), CX - CMPB CX, $10 - JB LBB6_101 - ANDB $-33, DI - ADDB $-65, DI - CMPB DI, $5 - JA LBB6_116 + MOVB 4(SI), CX + LEAL -48(CX), DI + CMPB DI, $10 + JB LBB6_106 + ANDB $-33, CX + ADDB $-65, CX + CMPB CX, $5 + JA LBB6_84 -LBB6_101: +LBB6_106: LEAQ 3(DX), CX - MOVQ -48(BP), DI + MOVQ -56(BP), DI MOVQ CX, 0(DI) - MOVB 5(SI), SI - LEAL -48(SI), CX - CMPB CX, $10 - JB LBB6_103 - ANDB $-33, SI - ADDB $-65, SI - CMPB SI, $5 - JA LBB6_116 - -LBB6_103: + MOVB 5(SI), CX + LEAL -48(CX), SI + CMPB SI, $10 + JB LBB6_108 + ANDB $-33, CX + ADDB $-65, CX + CMPB CX, $5 + JA LBB6_84 + +LBB6_108: ADDQ $4, DX - MOVQ -48(BP), CX + MOVQ -56(BP), CX MOVQ DX, 0(CX) - JMP LBB6_116 + JMP LBB6_84 -LBB6_28: +LBB6_109: SUBQ DI, R9 - ADDQ $1, R9 - JMP LBB6_23 + INCQ R9 -LBB6_72: - TESTB $2, R8 - JE LBB6_18 - MOVW $-16401, 0(AX) - MOVB $-67, 2(AX) - ADDQ $3, AX +LBB6_110: + MOVQ -56(BP), AX + MOVQ R9, 0(AX) + MOVQ $-2, AX + JMP LBB6_84 -LBB6_114: - XORL R13, R13 - JMP LBB6_115 +LBB6_111: + SUBQ DI, R9 + ADDQ $-4, R9 -LBB6_18: - MOVQ -48(BP), AX - MOVQ SI, 0(AX) - MOVQ $-1, AX - JMP LBB6_116 +LBB6_112: + MOVQ -56(BP), AX + MOVQ R9, 0(AX) + MOVQ $-4, AX + JMP LBB6_84 -LBB6_118: - LEAQ 0(R9)(R11*1), AX - ADDQ $10, AX - JMP LBB6_107 +LBB6_113: + LEAQ 10(R9)(R12*1), AX + JMP LBB6_115 -LBB6_106: - LEAQ 0(R9)(R11*1), AX - ADDQ $11, AX +LBB6_114: + LEAQ 11(R9)(R12*1), AX -LBB6_107: +LBB6_115: SUBQ DI, AX ADDQ $-4, AX - MOVQ -48(BP), CX + MOVQ -56(BP), CX MOVQ AX, 0(CX) MOVQ $-4, AX - JMP LBB6_116 + JMP LBB6_84 -LBB6_89: - ADDQ R10, DI - SUBQ DI, R9 - ADDQ $-4, R9 - -LBB6_83: - MOVQ -48(BP), AX - MOVQ R9, 0(AX) - MOVQ $-4, AX - JMP LBB6_116 +LBB6_116: + LEAQ 4(R10)(DI*1), AX + SUBQ AX, R9 + JMP LBB6_112 -LBB6_82: +LBB6_117: + ADDQ R12, R9 SUBQ DI, R9 - ADDQ R11, R9 - JMP LBB6_83 + JMP LBB6_112 LCPI7_0: QUAD $0x2626262626262626; QUAD $0x2626262626262626 // .space 16, '&&&&&&&&&&&&&&&&' @@ -2733,10 +2678,10 @@ LCPI7_1: QUAD $0xe2e2e2e2e2e2e2e2; QUAD $0xe2e2e2e2e2e2e2e2 // .space 16, '\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2' LCPI7_2: - QUAD $0xfdfdfdfdfdfdfdfd; QUAD $0xfdfdfdfdfdfdfdfd // .space 16, '\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd' + QUAD $0x0202020202020202; QUAD $0x0202020202020202 // .space 16, '\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02' LCPI7_3: - QUAD $0x3c3c3c3c3c3c3c3c; QUAD $0x3c3c3c3c3c3c3c3c // .space 16, '<<<<<<<<<<<<<<<<' + QUAD $0x3e3e3e3e3e3e3e3e; QUAD $0x3e3e3e3e3e3e3e3e // .space 16, '>>>>>>>>>>>>>>>>' _html_escape: BYTE $0x55 // pushq %rbp @@ -2746,374 +2691,359 @@ _html_escape: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $16, SP - MOVQ CX, -56(BP) - MOVQ DX, R14 - MOVQ DX, -48(BP) + SUBQ $24, SP + MOVQ CX, -64(BP) + MOVQ DX, R10 + MOVQ DX, -56(BP) + MOVQ DI, -48(BP) MOVQ DI, AX TESTQ SI, SI - JLE LBB7_70 - MOVQ -56(BP), AX + JLE LBB7_59 + MOVQ -64(BP), AX MOVQ 0(AX), R9 - QUAD $0xffffff89056ffac5 // vmovdqu $-119(%rip), %xmm0 /* LCPI7_0(%rip) */ - QUAD $0xffffff910d6ffac5 // vmovdqu $-111(%rip), %xmm1 /* LCPI7_1(%rip) */ - QUAD $0xffffff99156ffac5 // vmovdqu $-103(%rip), %xmm2 /* LCPI7_2(%rip) */ - QUAD $0xffffffa11d6ffac5 // vmovdqu $-95(%rip), %xmm3 /* LCPI7_3(%rip) */ - LONG $0x5e1d8d4c; WORD $0x00ae; BYTE $0x00 // leaq $44638(%rip), %r11 /* __HtmlQuoteTab(%rip) */ - MOVQ DI, R12 - MOVQ -48(BP), R14 + QUAD $0xffffff85056ffac5 // vmovdqu $-123(%rip), %xmm0 /* LCPI7_0(%rip) */ + QUAD $0xffffff8d0d6ffac5 // vmovdqu $-115(%rip), %xmm1 /* LCPI7_1(%rip) */ + QUAD $0xffffff95156ffac5 // vmovdqu $-107(%rip), %xmm2 /* LCPI7_2(%rip) */ + QUAD $0xffffff9d1d6ffac5 // vmovdqu $-99(%rip), %xmm3 /* LCPI7_3(%rip) */ + MOVQ $5764607797912141824, R14 + LONG $0xba1d8d4c; WORD $0x00ae; BYTE $0x00 // leaq $44730(%rip), %r11 /* __HtmlQuoteTab(%rip) */ + MOVQ -48(BP), R15 + MOVQ -56(BP), R10 LBB7_2: TESTQ R9, R9 - JLE LBB7_3 + JLE LBB7_61 CMPQ SI, $15 - SETGT AX - MOVQ R9, R13 - MOVQ R14, R8 - MOVQ SI, BX - MOVQ R12, R15 + SETGT BX + MOVQ R9, R12 + MOVQ R10, R8 + MOVQ SI, AX + MOVQ R15, R13 CMPQ R9, $16 - JL LBB7_12 + JL LBB7_9 CMPQ SI, $16 - JL LBB7_12 - XORL R8, R8 - MOVQ SI, DX + JL LBB7_9 + MOVQ R15, R13 + MOVQ SI, AX + MOVQ R10, R8 MOVQ R9, CX -LBB7_7: - LONG $0x6f7a81c4; WORD $0x0424 // vmovdqu (%r12,%r8), %xmm4 +LBB7_6: + LONG $0x6f7ac1c4; WORD $0x0065 // vmovdqu (%r13), %xmm4 LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 LONG $0xedebc9c5 // vpor %xmm5, %xmm6, %xmm5 - LONG $0xf2dbd9c5 // vpand %xmm2, %xmm4, %xmm6 + LONG $0xf2ebd9c5 // vpor %xmm2, %xmm4, %xmm6 LONG $0xf374c9c5 // vpcmpeqb %xmm3, %xmm6, %xmm6 LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 - LONG $0x7f7a81c4; WORD $0x0624 // vmovdqu %xmm4, (%r14,%r8) - LONG $0xc5d7f9c5 // vpmovmskb %xmm5, %eax - TESTW AX, AX - JNE LBB7_8 - LEAQ -16(DX), BX - LEAQ -16(CX), R13 - CMPQ DX, $31 - SETGT AX + LONG $0x7f7ac1c4; BYTE $0x20 // vmovdqu %xmm4, (%r8) + LONG $0xd5d7f9c5 // vpmovmskb %xmm5, %edx + TESTW DX, DX + JNE LBB7_21 + ADDQ $16, R13 ADDQ $16, R8 - CMPQ DX, $32 - JL LBB7_11 - MOVQ BX, DX + LEAQ -16(CX), R12 + CMPQ AX, $31 + SETGT BX + CMPQ AX, $32 + LEAQ -16(AX), AX + JL LBB7_9 CMPQ CX, $31 - MOVQ R13, CX - JG LBB7_7 - -LBB7_11: - LEAQ 0(R12)(R8*1), R15 - ADDQ R14, R8 + MOVQ R12, CX + JG LBB7_6 -LBB7_12: - TESTB AX, AX +LBB7_9: + TESTB BX, BX JE LBB7_13 - LONG $0x6f7ac1c4; BYTE $0x27 // vmovdqu (%r15), %xmm4 - LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 - LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 - LONG $0xedebc9c5 // vpor %xmm5, %xmm6, %xmm5 - LONG $0xf2dbd9c5 // vpand %xmm2, %xmm4, %xmm6 - LONG $0xf374c9c5 // vpcmpeqb %xmm3, %xmm6, %xmm6 - LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 - LONG $0xc5d7f9c5 // vpmovmskb %xmm5, %eax + MOVQ R14, DX + LONG $0x6f7ac1c4; WORD $0x0065 // vmovdqu (%r13), %xmm4 + LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 + LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 + LONG $0xedebc9c5 // vpor %xmm5, %xmm6, %xmm5 + LONG $0xf2ebd9c5 // vpor %xmm2, %xmm4, %xmm6 + LONG $0xf374c9c5 // vpcmpeqb %xmm3, %xmm6, %xmm6 + LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 + LONG $0xc5d7f9c5 // vpmovmskb %xmm5, %eax ORL $65536, AX - BSFL AX, R10 - LONG $0x7ef9e1c4; BYTE $0xe0 // vmovq %xmm4, %rax - CMPQ R13, R10 - JGE LBB7_24 - CMPQ R13, $8 - JB LBB7_35 + BSFL AX, R14 + LONG $0x7ef9e1c4; BYTE $0xe0 // vmovq %xmm4, %rax + CMPQ R12, R14 + JGE LBB7_22 + CMPQ R12, $8 + JB LBB7_25 MOVQ AX, 0(R8) - LEAQ 8(R15), R10 + LEAQ 8(R13), R14 ADDQ $8, R8 - LEAQ -8(R13), BX - CMPQ BX, $4 - JAE LBB7_38 - JMP LBB7_39 + LEAQ -8(R12), AX + CMPQ AX, $4 + JAE LBB7_26 + JMP LBB7_27 LBB7_13: - TESTQ R13, R13 - JLE LBB7_21 - TESTQ BX, BX - JLE LBB7_21 - XORL CX, CX - XORL AX, AX + TESTQ R12, R12 + JLE LBB7_20 + TESTQ AX, AX + JLE LBB7_20 -LBB7_16: - MOVBLZX 0(R15)(CX*1), R11 - CMPQ R11, $62 +LBB7_15: + MOVBLZX 0(R13), CX + CMPQ CX, $62 JA LBB7_17 - MOVQ $5764607797912141824, DX - BTQ R11, DX - JB LBB7_45 + BTQ CX, R14 + JB LBB7_24 LBB7_17: - CMPB R11, $-30 - JE LBB7_45 - LEAQ 0(BX)(AX*1), R10 - MOVB R11, 0(R8)(CX*1) - LEAQ -1(AX), DX - CMPQ R10, $2 + CMPB CX, $-30 + JE LBB7_24 + INCQ R13 + MOVB CX, 0(R8) + CMPQ AX, $2 + LEAQ -1(AX), AX JL LBB7_20 - ADDQ R13, AX - ADDQ $1, CX - CMPQ AX, $1 - MOVQ DX, AX - JG LBB7_16 + INCQ R8 + CMPQ R12, $1 + LEAQ -1(R12), R12 + JG LBB7_15 LBB7_20: - SUBQ DX, R15 - ADDQ DX, BX - LONG $0xff1d8d4c; WORD $0x00ac; BYTE $0x00 // leaq $44287(%rip), %r11 /* __HtmlQuoteTab(%rip) */ + SUBQ R15, R13 + NEGQ AX + SBBQ R12, R12 + XORQ R13, R12 + TESTQ R12, R12 + JNS LBB7_37 + JMP LBB7_57 LBB7_21: - TESTQ BX, BX - JE LBB7_22 - NOTQ R15 - ADDQ R12, R15 - TESTQ R15, R15 - JNS LBB7_49 - JMP LBB7_48 - -LBB7_8: - MOVWLZX AX, AX - BSFL AX, R15 - ADDQ R8, R15 - TESTQ R15, R15 - JNS LBB7_49 - JMP LBB7_48 + MOVWLZX DX, AX + SUBQ R15, R13 + BSFL AX, R12 + ADDQ R13, R12 + TESTQ R12, R12 + JNS LBB7_37 + JMP LBB7_57 -LBB7_24: - CMPL R10, $8 - JB LBB7_25 +LBB7_22: + CMPL R14, $8 + JB LBB7_31 MOVQ AX, 0(R8) - LEAQ 8(R15), R13 + LEAQ 8(R13), R12 ADDQ $8, R8 - LEAQ -8(R10), BX - CMPQ BX, $4 - JAE LBB7_28 - JMP LBB7_29 - -LBB7_45: - SUBQ R12, R15 - SUBQ AX, R15 - LONG $0x9d1d8d4c; WORD $0x00ac; BYTE $0x00 // leaq $44189(%rip), %r11 /* __HtmlQuoteTab(%rip) */ - TESTQ R15, R15 - JNS LBB7_49 - JMP LBB7_48 - -LBB7_35: - MOVQ R15, R10 - MOVQ R13, BX - CMPQ BX, $4 - JB LBB7_39 - -LBB7_38: - MOVL 0(R10), AX - MOVL AX, 0(R8) - ADDQ $4, R10 - ADDQ $4, R8 - ADDQ $-4, BX - -LBB7_39: - CMPQ BX, $2 - JAE LBB7_40 - TESTQ BX, BX - JE LBB7_43 - -LBB7_42: - MOVB 0(R10), AX - MOVB AX, 0(R8) + LEAQ -8(R14), AX + CMPQ AX, $4 + JAE LBB7_32 + JMP LBB7_33 -LBB7_43: - NOTQ R13 - ADDQ R12, R13 +LBB7_24: SUBQ R15, R13 - MOVQ R13, R15 - TESTQ R15, R15 - JNS LBB7_49 - JMP LBB7_48 + MOVQ R13, R12 + TESTQ R12, R12 + JNS LBB7_37 + JMP LBB7_57 LBB7_25: - MOVQ R15, R13 - MOVQ R10, BX - CMPQ BX, $4 - JB LBB7_29 + MOVQ R13, R14 + MOVQ R12, AX + CMPQ AX, $4 + JB LBB7_27 -LBB7_28: - MOVL 0(R13), AX - MOVL AX, 0(R8) - ADDQ $4, R13 +LBB7_26: + MOVL 0(R14), CX + MOVL CX, 0(R8) + ADDQ $4, R14 ADDQ $4, R8 - ADDQ $-4, BX + ADDQ $-4, AX + +LBB7_27: + CMPQ AX, $2 + JB LBB7_28 + MOVWLZX 0(R14), CX + MOVW CX, 0(R8) + ADDQ $2, R14 + ADDQ $2, R8 + ADDQ $-2, AX + TESTQ AX, AX + JNE LBB7_29 + JMP LBB7_30 + +LBB7_28: + TESTQ AX, AX + JE LBB7_30 LBB7_29: - CMPQ BX, $2 - JAE LBB7_30 - TESTQ BX, BX - JE LBB7_33 + MOVB 0(R14), AX + MOVB AX, 0(R8) + +LBB7_30: + SUBQ R15, R12 + ADDQ R13, R12 + NOTQ R12 + MOVQ DX, R14 + TESTQ R12, R12 + JNS LBB7_37 + JMP LBB7_57 + +LBB7_31: + MOVQ R13, R12 + MOVQ R14, AX + CMPQ AX, $4 + JB LBB7_33 LBB7_32: - MOVB 0(R13), AX - MOVB AX, 0(R8) + MOVL 0(R12), CX + MOVL CX, 0(R8) + ADDQ $4, R12 + ADDQ $4, R8 + ADDQ $-4, AX LBB7_33: - SUBQ R12, R15 - ADDQ R10, R15 - TESTQ R15, R15 - JNS LBB7_49 - JMP LBB7_48 - -LBB7_40: - MOVWLZX 0(R10), AX - MOVW AX, 0(R8) - ADDQ $2, R10 + CMPQ AX, $2 + JB LBB7_34 + MOVWLZX 0(R12), CX + MOVW CX, 0(R8) + ADDQ $2, R12 ADDQ $2, R8 - ADDQ $-2, BX - TESTQ BX, BX - JNE LBB7_42 - JMP LBB7_43 + ADDQ $-2, AX + TESTQ AX, AX + JNE LBB7_35 + JMP LBB7_36 -LBB7_30: - MOVWLZX 0(R13), AX - MOVW AX, 0(R8) - ADDQ $2, R13 - ADDQ $2, R8 - ADDQ $-2, BX - TESTQ BX, BX - JNE LBB7_32 - JMP LBB7_33 +LBB7_34: + TESTQ AX, AX + JE LBB7_36 -LBB7_22: - SUBQ R12, R15 - TESTQ R15, R15 - JS LBB7_48 - -LBB7_49: - ADDQ R15, R12 - ADDQ R15, R14 - SUBQ R15, SI - JLE LBB7_50 - SUBQ R15, R9 - MOVB 0(R12), CX +LBB7_35: + MOVB 0(R12), AX + MOVB AX, 0(R8) + +LBB7_36: + SUBQ R15, R13 + ADDQ R14, R13 + MOVQ R13, R12 + MOVQ DX, R14 + TESTQ R12, R12 + JS LBB7_57 + +LBB7_37: + ADDQ R12, R15 + ADDQ R12, R10 + SUBQ R12, SI + JLE LBB7_58 + SUBQ R12, R9 + MOVB 0(R15), CX CMPB CX, $-30 - JE LBB7_53 - MOVQ R12, AX + JE LBB7_51 + MOVQ R15, AX -LBB7_57: - MOVBLZX CX, CX - SHLQ $4, CX - MOVQ 0(CX)(R11*1), DX - MOVLQSX DX, R15 - SUBQ R15, R9 - JL LBB7_58 - SHLQ $32, DX - LEAQ 0(CX)(R11*1), R10 - ADDQ $8, R10 - MOVQ $12884901889, BX - CMPQ DX, BX - JL LBB7_62 - MOVL 0(R10), DX - MOVL DX, 0(R14) - LEAQ 0(CX)(R11*1), R10 - ADDQ $12, R10 - LEAQ 4(R14), R8 - LEAQ -4(R15), CX - CMPQ CX, $2 - JGE LBB7_65 - JMP LBB7_66 +LBB7_40: + MOVBLZX CX, DX + SHLQ $4, DX + MOVQ 0(DX)(R11*1), DI + MOVLQSX DI, BX + SUBQ BX, R9 + JL LBB7_60 + SHLQ $32, DI + LEAQ 8(DX)(R11*1), R15 + MOVQ $12884901889, CX + CMPQ DI, CX + JL LBB7_43 + MOVL 0(R15), CX + MOVL CX, 0(R10) + LEAQ 12(DX)(R11*1), R15 + LEAQ 4(R10), R8 + LEAQ -4(BX), DI + CMPQ DI, $2 + JGE LBB7_44 + JMP LBB7_45 -LBB7_62: - MOVQ R14, R8 - MOVQ R15, CX - CMPQ CX, $2 - JL LBB7_66 +LBB7_43: + MOVQ R10, R8 + MOVQ BX, DI + CMPQ DI, $2 + JL LBB7_45 -LBB7_65: - MOVWLZX 0(R10), DX +LBB7_44: + MOVWLZX 0(R15), DX MOVW DX, 0(R8) - ADDQ $2, R10 + ADDQ $2, R15 ADDQ $2, R8 - ADDQ $-2, CX + ADDQ $-2, DI -LBB7_66: - TESTQ CX, CX - JLE LBB7_68 - MOVB 0(R10), CX +LBB7_45: + TESTQ DI, DI + JLE LBB7_47 + MOVB 0(R15), CX MOVB CX, 0(R8) -LBB7_68: - ADDQ R15, R14 +LBB7_47: + ADDQ BX, R10 -LBB7_69: - ADDQ $1, AX - LEAQ -1(SI), CX - MOVQ AX, R12 +LBB7_48: + INCQ AX + MOVQ AX, R15 CMPQ SI, $1 - MOVQ CX, SI + LEAQ -1(SI), SI JG LBB7_2 - JMP LBB7_70 + JMP LBB7_59 -LBB7_53: +LBB7_51: CMPQ SI, $3 - JL LBB7_59 - CMPB 1(R12), $-128 - JNE LBB7_59 - MOVB 2(R12), CX + JL LBB7_55 + CMPB 1(R15), $-128 + JNE LBB7_55 + MOVB 2(R15), CX MOVL CX, AX ANDB $-2, AX CMPB AX, $-88 - JNE LBB7_59 - LEAQ 2(R12), AX + JNE LBB7_55 + LEAQ 2(R15), AX ADDQ $-2, SI - JMP LBB7_57 + JMP LBB7_40 -LBB7_59: +LBB7_55: TESTQ R9, R9 - JLE LBB7_3 - MOVB $-30, 0(R14) - ADDQ $1, R14 - ADDQ $-1, R9 - MOVQ R12, AX - JMP LBB7_69 + JLE LBB7_61 + MOVB $-30, 0(R10) + INCQ R10 + DECQ R9 + MOVQ R15, AX + JMP LBB7_48 -LBB7_48: - SUBQ -48(BP), R14 +LBB7_57: + SUBQ -56(BP), R10 + NOTQ R12 + ADDQ R12, R10 + MOVQ -64(BP), AX + MOVQ R10, 0(AX) + SUBQ -48(BP), R15 + ADDQ R12, R15 + NOTQ R15 + JMP LBB7_62 + +LBB7_58: MOVQ R15, AX - NOTQ AX - ADDQ R14, AX - MOVQ -56(BP), CX - MOVQ AX, 0(CX) - SUBQ R12, DI - ADDQ R15, DI - MOVQ DI, AX - JMP LBB7_71 -LBB7_50: - MOVQ R12, AX +LBB7_59: + SUBQ -56(BP), R10 + MOVQ -64(BP), CX + MOVQ R10, 0(CX) + SUBQ -48(BP), AX + JMP LBB7_63 + +LBB7_60: + SUBQ -56(BP), R10 + MOVQ -64(BP), AX + MOVQ R10, 0(AX) -LBB7_70: - SUBQ -48(BP), R14 - MOVQ -56(BP), CX - MOVQ R14, 0(CX) - SUBQ DI, AX - JMP LBB7_71 +LBB7_61: + NOTQ R15 + ADDQ -48(BP), R15 -LBB7_58: - SUBQ -48(BP), R14 - MOVQ -56(BP), AX - MOVQ R14, 0(AX) - -LBB7_3: - NOTQ R12 - ADDQ DI, R12 - MOVQ R12, AX +LBB7_62: + MOVQ R15, AX -LBB7_71: - ADDQ $16, SP +LBB7_63: + ADDQ $24, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -3147,7 +3077,7 @@ LBB8_5: SHLQ CX, DI MOVL AX, CX SHLQ $4, CX - LONG $0x543d8d4c; WORD $0x0031; BYTE $0x00 // leaq $12628(%rip), %r15 /* _POW10_M128_TAB(%rip) */ + LONG $0xec3d8d4c; WORD $0x0031; BYTE $0x00 // leaq $12780(%rip), %r15 /* _POW10_M128_TAB(%rip) */ MOVQ DI, AX MULQ 8(CX)(R15*1) MOVQ AX, R11 @@ -3173,35 +3103,35 @@ LBB8_5: JE LBB8_1 LBB8_11: - MOVQ R14, AX - SHRQ $63, AX - LEAL 9(AX), CX + MOVQ R14, DI + SHRQ $63, DI + LEAL 9(DI), CX SHRQ CX, R14 ORQ R11, DX JNE LBB8_14 - MOVL R14, CX - ANDL $3, CX - CMPL CX, $1 + MOVL R14, AX + ANDL $3, AX + CMPL AX, $1 JE LBB8_1 LBB8_14: - LONG $0x526ace69; WORD $0x0003 // imull $217706, %esi, %ecx - SARL $16, CX - ADDL $1087, CX - MOVLQSX CX, SI - MOVL R14, DX - ANDL $1, DX - ADDQ R14, DX - MOVQ $126100789566373888, CX - ANDQ DX, CX - NOTQ R10 - ADDQ SI, R10 - ADDQ AX, R10 - CMPQ CX, $1 - SBBQ $-1, R10 - LEAQ -1(R10), AX - CMPQ AX, $2045 - JBE LBB8_16 + LONG $0x526ac669; WORD $0x0003 // imull $217706, %esi, %eax + SARL $16, AX + ADDL $1087, AX + WORD $0x9848 // cltq + SUBQ R10, AX + XORQ $1, DI + SUBQ DI, AX + MOVL R14, DX + ANDL $1, DX + ADDQ R14, DX + MOVQ $126100789566373888, CX + ANDQ DX, CX + CMPQ CX, $1 + SBBQ $-1, AX + LEAQ -1(AX), SI + CMPQ SI, $2045 + JBE LBB8_16 LBB8_1: XORL AX, AX @@ -3218,15 +3148,15 @@ LBB8_16: MOVB $2, CX SBBB $0, CX SHRQ CX, DX - SHLQ $52, R10 - MOVQ $4503599627370495, AX - ANDQ DX, AX - ORQ R10, AX - MOVQ $-9223372036854775808, CX + SHLQ $52, AX + MOVQ $4503599627370495, CX + ANDQ DX, CX ORQ AX, CX + MOVQ $-9223372036854775808, AX + ORQ CX, AX CMPL R9, $-1 - LONG $0xc8450f48 // cmovneq %rax, %rcx - MOVQ CX, 0(R8) + LONG $0xc1450f48 // cmovneq %rcx, %rax + MOVQ AX, 0(R8) MOVB $1, AX JMP LBB8_17 @@ -3240,260 +3170,321 @@ _decimal_to_f64: BYTE $0x53 // pushq %rbx BYTE $0x50 // pushq %rax MOVQ SI, BX - MOVQ DI, R15 + MOVQ DI, R12 MOVQ $4503599627370496, R13 CMPL 16(DI), $0 JE LBB9_4 MOVQ $9218868437227405312, R14 - MOVL 20(R15), AX - XORL R12, R12 + MOVL 20(R12), AX + XORL R15, R15 CMPL AX, $310 - JG LBB9_64 + JG LBB9_78 CMPL AX, $-330 JGE LBB9_5 XORL R14, R14 - JMP LBB9_64 + JMP LBB9_78 LBB9_4: XORL R14, R14 - XORL R12, R12 - JMP LBB9_64 + XORL R15, R15 + JMP LBB9_78 LBB9_5: TESTL AX, AX MOVQ BX, -48(BP) - JLE LBB9_12 - XORL R12, R12 - LONG $0x80358d4c; WORD $0x005b; BYTE $0x00 // leaq $23424(%rip), %r14 /* _POW_TAB(%rip) */ - JMP LBB9_8 - -LBB9_10: - MOVL AX, AX - MOVL 0(R14)(AX*4), BX - CMPL 16(R15), $0 - JE LBB9_7 + JLE LBB9_22 + XORL R15, R15 + JMP LBB9_9 -LBB9_11: - MOVQ R15, DI +LBB9_7: + NEGL BX + MOVQ R12, DI MOVL BX, SI - LONG $0x002e6de8; BYTE $0x00 // callq _right_shift + LONG $0x002f37e8; BYTE $0x00 // callq _right_shift -LBB9_7: - ADDL BX, R12 - MOVL 20(R15), AX +LBB9_8: + ADDL R14, R15 + MOVL 20(R12), AX TESTL AX, AX - JLE LBB9_12 + JLE LBB9_22 -LBB9_8: +LBB9_9: + MOVL $27, R14 CMPL AX, $8 - JLE LBB9_10 - MOVL $27, BX - CMPL 16(R15), $0 - JNE LBB9_11 + JG LBB9_11 + MOVL AX, AX + LONG $0xe80d8d48; WORD $0x005b; BYTE $0x00 // leaq $23528(%rip), %rcx /* _POW_TAB(%rip) */ + MOVL 0(CX)(AX*4), R14 + +LBB9_11: + TESTL R14, R14 + JE LBB9_8 + CMPL 16(R12), $0 + JE LBB9_8 + MOVL R14, BX + NEGL BX + TESTL R14, R14 + JS LBB9_16 + CMPL R14, $61 + JL LBB9_7 + +LBB9_15: + MOVQ R12, DI + MOVL $60, SI + LONG $0x002ee5e8; BYTE $0x00 // callq _right_shift + LEAL 60(BX), AX + CMPL BX, $-120 + MOVL AX, BX + JL LBB9_15 JMP LBB9_7 -LBB9_12: - LONG $0x42358d4c; WORD $0x005b; BYTE $0x00 // leaq $23362(%rip), %r14 /* _POW_TAB(%rip) */ - JMP LBB9_14 +LBB9_16: + CMPL R14, $-61 + JG LBB9_18 + +LBB9_17: + MOVQ R12, DI + MOVL $60, SI + LONG $0x002d57e8; BYTE $0x00 // callq _left_shift + LEAL -60(BX), SI + CMPL BX, $120 + MOVL SI, BX + JG LBB9_17 + JMP LBB9_19 LBB9_18: - MOVL $27, BX - CMPL 16(R15), $0 - JE LBB9_13 + MOVL BX, SI + +LBB9_19: + MOVQ R12, DI + LONG $0x002d41e8; BYTE $0x00 // callq _left_shift + JMP LBB9_8 LBB9_20: - MOVQ R15, DI - MOVL BX, SI - LONG $0x002ca0e8; BYTE $0x00 // callq _left_shift - MOVL 20(R15), AX + MOVQ R12, DI + LONG $0x002d34e8; BYTE $0x00 // callq _left_shift + +LBB9_21: + SUBL R14, R15 + MOVL 20(R12), AX -LBB9_13: - SUBL BX, R12 +LBB9_22: + TESTL AX, AX + JS LBB9_25 + JNE LBB9_36 + MOVQ 0(R12), CX + CMPB 0(CX), $53 + JL LBB9_26 + JMP LBB9_36 + +LBB9_25: + MOVL $27, R14 + CMPL AX, $-8 + JL LBB9_27 + +LBB9_26: + NEGL AX + WORD $0x9848 // cltq + LONG $0x420d8d48; WORD $0x005b; BYTE $0x00 // leaq $23362(%rip), %rcx /* _POW_TAB(%rip) */ + MOVL 0(CX)(AX*4), R14 + +LBB9_27: + TESTL R14, R14 + JE LBB9_21 + CMPL 16(R12), $0 + JE LBB9_21 + TESTL R14, R14 + JLE LBB9_33 + MOVL R14, SI + CMPL R14, $61 + JL LBB9_20 + MOVL R14, BX -LBB9_14: - TESTL AX, AX - JS LBB9_17 - JNE LBB9_21 - MOVQ 0(R15), CX - CMPB 0(CX), $53 - JL LBB9_19 - JMP LBB9_21 +LBB9_32: + MOVQ R12, DI + MOVL $60, SI + LONG $0x002ccfe8; BYTE $0x00 // callq _left_shift + LEAL -60(BX), SI + CMPL BX, $120 + MOVL SI, BX + JG LBB9_32 + JMP LBB9_20 + +LBB9_33: + MOVL R14, BX + CMPL R14, $-61 + JG LBB9_35 -LBB9_17: - CMPL AX, $-8 - JL LBB9_18 +LBB9_34: + MOVQ R12, DI + MOVL $60, SI + LONG $0x002e1ce8; BYTE $0x00 // callq _right_shift + LEAL 60(BX), AX + CMPL BX, $-120 + MOVL AX, BX + JL LBB9_34 -LBB9_19: - MOVL AX, CX - NEGL CX - MOVLQSX CX, CX - MOVL 0(R14)(CX*4), BX - CMPL 16(R15), $0 - JNE LBB9_20 - JMP LBB9_13 +LBB9_35: + NEGL BX + MOVQ R12, DI + MOVL BX, SI + LONG $0x002e06e8; BYTE $0x00 // callq _right_shift + JMP LBB9_21 -LBB9_21: - CMPL R12, $-1022 - JG LBB9_27 - CMPL 16(R15), $0 +LBB9_36: + CMPL R15, $-1022 + JG LBB9_42 + CMPL 16(R12), $0 MOVQ -48(BP), BX - JE LBB9_29 - CMPL R12, $-1082 - JG LBB9_30 - ADDL $961, R12 + JE LBB9_44 + CMPL R15, $-1082 + JG LBB9_45 + ADDL $961, R15 -LBB9_25: - MOVQ R15, DI +LBB9_40: + MOVQ R12, DI MOVL $60, SI LONG $0x002dcfe8; BYTE $0x00 // callq _right_shift - ADDL $60, R12 - CMPL R12, $-120 - JL LBB9_25 - ADDL $60, R12 - JMP LBB9_31 - -LBB9_27: - CMPL R12, $1024 + ADDL $60, R15 + CMPL R15, $-120 + JL LBB9_40 + ADDL $60, R15 + JMP LBB9_46 + +LBB9_42: + CMPL R15, $1024 MOVQ -48(BP), BX - JG LBB9_61 - ADDL $-1, R12 - MOVL R12, R14 - JMP LBB9_32 + JG LBB9_75 + DECL R15 + MOVL R15, R14 + JMP LBB9_47 -LBB9_29: +LBB9_44: MOVL $-1022, R14 - JMP LBB9_34 + JMP LBB9_49 -LBB9_30: - ADDL $1021, R12 +LBB9_45: + ADDL $1021, R15 -LBB9_31: - NEGL R12 - MOVQ R15, DI - MOVL R12, SI - LONG $0x002d88e8; BYTE $0x00 // callq _right_shift +LBB9_46: + NEGL R15 + MOVQ R12, DI + MOVL R15, SI + LONG $0x002d89e8; BYTE $0x00 // callq _right_shift MOVL $-1022, R14 -LBB9_32: - CMPL 16(R15), $0 - JE LBB9_34 - MOVQ R15, DI +LBB9_47: + CMPL 16(R12), $0 + JE LBB9_49 + MOVQ R12, DI MOVL $53, SI - LONG $0x002bdee8; BYTE $0x00 // callq _left_shift + LONG $0x002bffe8; BYTE $0x00 // callq _left_shift -LBB9_34: - MOVL 20(R15), AX - MOVQ $-1, R12 - CMPL AX, $20 - JG LBB9_63 - TESTL AX, AX - JLE LBB9_40 - MOVL 16(R15), DX - XORL SI, SI - TESTL DX, DX - WORD $0x480f; BYTE $0xd6 // cmovsl %esi, %edx - LEAQ -1(AX), R9 - CMPQ R9, DX - LONG $0xca430f44 // cmovael %edx, %r9d - LEAL 1(R9), R8 - XORL R12, R12 +LBB9_49: + MOVLQSX 20(R12), R8 + MOVQ $-1, R15 + CMPQ R8, $20 + JG LBB9_77 + MOVL R8, CX + TESTL CX, CX + JLE LBB9_54 + MOVLQSX 16(R12), SI + XORL DX, DX + XORL R15, R15 -LBB9_37: +LBB9_52: CMPQ DX, SI - JE LBB9_41 - LEAQ 0(R12)(R12*4), DI - MOVQ 0(R15), CX - MOVBQSX 0(CX)(SI*1), CX - LEAQ 0(CX)(DI*2), R12 - ADDQ $-48, R12 - ADDQ $1, SI - CMPQ AX, SI - JNE LBB9_37 - MOVL R8, R9 - JMP LBB9_41 - -LBB9_40: - XORL R9, R9 - XORL R12, R12 - -LBB9_41: - CMPL AX, R9 - JLE LBB9_49 - MOVL AX, SI - SUBL R9, SI - MOVL R9, DX - NOTL DX - ADDL AX, DX - ANDL $7, SI - JE LBB9_46 - NEGL SI - XORL DI, DI + JGE LBB9_55 + LEAQ 0(R15)(R15*4), AX + MOVQ 0(R12), DI + MOVBQSX 0(DI)(DX*1), DI + LEAQ -48(DI)(AX*2), R15 + INCQ DX + CMPQ CX, DX + JNE LBB9_52 + JMP LBB9_55 + +LBB9_54: + XORL DX, DX + XORL R15, R15 -LBB9_44: - ADDQ R12, R12 - LEAQ 0(R12)(R12*4), R12 - ADDL $-1, DI - CMPL SI, DI - JNE LBB9_44 - SUBL DI, R9 +LBB9_55: + CMPL R8, DX + JLE LBB9_63 + MOVL CX, DI + SUBL DX, DI + MOVL DX, SI + NOTL SI + ADDL R8, SI + ANDL $7, DI + JE LBB9_60 + NEGL DI + XORL AX, AX -LBB9_46: - CMPL DX, $7 - JB LBB9_49 - MOVL AX, DX - SUBL R9, DX +LBB9_58: + ADDQ R15, R15 + LEAQ 0(R15)(R15*4), R15 + DECL AX + CMPL DI, AX + JNE LBB9_58 + SUBL AX, DX + +LBB9_60: + CMPL SI, $7 + JB LBB9_63 + MOVL CX, AX + SUBL DX, AX -LBB9_48: - IMUL3Q $100000000, R12, R12 - ADDL $-8, DX - JNE LBB9_48 +LBB9_62: + IMUL3Q $100000000, R15, R15 + ADDL $-8, AX + JNE LBB9_62 -LBB9_49: - TESTL AX, AX - JS LBB9_57 - MOVL 16(R15), SI - CMPL SI, AX - JLE LBB9_57 - MOVQ 0(R15), DX - MOVB 0(DX)(AX*1), CX - LEAL 1(AX), DI - CMPL DI, SI - JNE LBB9_58 - CMPB CX, $53 - JNE LBB9_58 - CMPL 28(R15), $0 - SETNE CX - JNE LBB9_59 - TESTL AX, AX - JLE LBB9_59 - ADDL $-1, AX - MOVB 0(DX)(AX*1), CX - ANDB $1, CX - JMP LBB9_59 +LBB9_63: + TESTL CX, CX + JS LBB9_71 + MOVL 16(R12), DX + CMPL DX, R8 + JLE LBB9_71 + MOVQ 0(R12), SI + MOVB 0(SI)(CX*1), AX + LEAL 1(CX), DI + CMPL DI, DX + JNE LBB9_72 + CMPB AX, $53 + JNE LBB9_72 + CMPL 28(R12), $0 + SETNE DX + JNE LBB9_73 + TESTL CX, CX + JLE LBB9_73 + MOVB -1(R8)(SI*1), DX + ANDB $1, DX + JMP LBB9_73 -LBB9_57: - XORL CX, CX +LBB9_71: + XORL DX, DX -LBB9_59: - MOVBLZX CX, AX - ADDQ AX, R12 +LBB9_73: + MOVBLZX DX, AX + ADDQ AX, R15 MOVQ $9007199254740992, AX - CMPQ R12, AX - JNE LBB9_63 + CMPQ R15, AX + JNE LBB9_77 CMPL R14, $1022 - JLE LBB9_62 + JLE LBB9_76 -LBB9_61: - XORL R12, R12 +LBB9_75: + XORL R15, R15 MOVQ $9218868437227405312, R14 - JMP LBB9_64 + JMP LBB9_78 -LBB9_62: - ADDL $1, R14 - MOVQ R13, R12 +LBB9_76: + INCL R14 + MOVQ R13, R15 -LBB9_63: - MOVQ R12, AX +LBB9_77: + MOVQ R15, AX ANDQ R13, AX ADDL $1023, R14 ANDL $2047, R14 @@ -3501,13 +3492,13 @@ LBB9_63: TESTQ AX, AX LONG $0xf0440f4c // cmoveq %rax, %r14 -LBB9_64: - ADDQ $-1, R13 - ANDQ R12, R13 +LBB9_78: + DECQ R13 + ANDQ R15, R13 ORQ R14, R13 MOVQ $-9223372036854775808, AX ORQ R13, AX - CMPL 24(R15), $0 + CMPL 24(R12), $0 LONG $0xc5440f49 // cmoveq %r13, %rax MOVQ AX, 0(BX) XORL AX, AX @@ -3520,10 +3511,10 @@ LBB9_64: BYTE $0x5d // popq %rbp RET -LBB9_58: - CMPB CX, $52 - SETGT CX - JMP LBB9_59 +LBB9_72: + CMPB AX, $52 + SETGT DX + JMP LBB9_73 _atof_native: BYTE $0x55 // pushq %rbp @@ -3545,7 +3536,7 @@ _atof_native: LBB10_4: MOVQ -32(BP), CX MOVB $0, 0(CX)(AX*1) - ADDQ $1, AX + INCQ AX CMPQ -24(BP), AX JA LBB10_4 @@ -3553,90 +3544,89 @@ LBB10_5: LONG $0xc057f8c5 // vxorps %xmm0, %xmm0, %xmm0 LONG $0x4511f8c5; BYTE $0xf0 // vmovups %xmm0, $-16(%rbp) CMPB 0(DI), $45 - JNE LBB10_8 + JNE LBB10_6 MOVL $1, -8(BP) MOVL $1, AX CMPQ AX, SI JL LBB10_9 -LBB10_7: +LBB10_41: MOVL $0, -12(BP) JMP LBB10_40 -LBB10_8: +LBB10_6: XORL AX, AX CMPQ AX, SI - JGE LBB10_7 + JGE LBB10_41 LBB10_9: MOVB $1, R11 XORL R9, R9 XORL R10, R10 XORL R8, R8 - JMP LBB10_13 + JMP LBB10_10 -LBB10_21: - MOVL $1, -4(BP) +LBB10_13: + DECL -12(BP) + XORL R10, R10 -LBB10_12: - ADDQ $1, AX +LBB10_22: + INCQ AX CMPQ AX, SI SETLT R11 CMPQ SI, AX JE LBB10_23 -LBB10_13: +LBB10_10: MOVBLZX 0(DI)(AX*1), CX LEAL -48(CX), DX CMPB DX, $9 - JA LBB10_18 - CMPB CX, $48 - JNE LBB10_20 + JA LBB10_19 TESTL R10, R10 - JE LBB10_22 - MOVLQSX R9, R11 - CMPQ -24(BP), R11 - JA LBB10_10 - JMP LBB10_11 - -LBB10_18: + JNE LBB10_14 + CMPB CX, $48 + JE LBB10_13 + +LBB10_14: + MOVLQSX R9, R10 + CMPQ -24(BP), R10 + JBE LBB10_16 + MOVQ -32(BP), DX + MOVB CX, 0(DX)(R10*1) + MOVL -16(BP), R9 + INCL R9 + MOVL R9, -16(BP) + MOVL R9, R10 + JMP LBB10_22 + +LBB10_19: CMPB CX, $46 - JNE LBB10_30 + JNE LBB10_20 MOVL R10, -12(BP) MOVL $1, R8 - JMP LBB10_12 - -LBB10_20: - MOVLQSX R10, R11 - CMPQ -24(BP), R11 - JBE LBB10_21 - -LBB10_10: - MOVQ -32(BP), DX - MOVB CX, 0(DX)(R11*1) - MOVL -16(BP), R9 - ADDL $1, R9 - MOVL R9, -16(BP) + JMP LBB10_22 -LBB10_11: +LBB10_16: + CMPB CX, $48 + JNE LBB10_18 MOVL R9, R10 - JMP LBB10_12 + JMP LBB10_22 -LBB10_22: - ADDL $-1, -12(BP) - XORL R10, R10 - JMP LBB10_12 +LBB10_18: + MOVL $1, -4(BP) + MOVL R9, R10 + JMP LBB10_22 LBB10_23: MOVL SI, CX MOVQ SI, AX TESTL R8, R8 - JNE LBB10_25 + JNE LBB10_26 -LBB10_24: +LBB10_25: MOVL R9, -12(BP) -LBB10_25: +LBB10_26: TESTB $1, R11 JE LBB10_40 MOVB 0(DI)(CX*1), CX @@ -3646,64 +3636,62 @@ LBB10_25: MOVL AX, DX MOVB 1(DI)(DX*1), CX CMPB CX, $45 - JE LBB10_31 + JE LBB10_32 MOVL $1, R8 CMPB CX, $43 - JNE LBB10_33 + JNE LBB10_30 ADDL $2, AX - JMP LBB10_32 + JMP LBB10_33 -LBB10_30: +LBB10_20: MOVQ AX, CX TESTL R8, R8 - JNE LBB10_25 - JMP LBB10_24 + JNE LBB10_26 + JMP LBB10_25 -LBB10_31: +LBB10_32: ADDL $2, AX MOVL $-1, R8 -LBB10_32: +LBB10_33: MOVL AX, DX MOVLQSX DX, DX - XORL AX, AX + XORL R9, R9 CMPQ DX, SI - JL LBB10_34 + JL LBB10_35 JMP LBB10_39 -LBB10_33: - ADDQ $1, DX +LBB10_30: + INCQ DX MOVLQSX DX, DX - XORL AX, AX + XORL R9, R9 CMPQ DX, SI JGE LBB10_39 -LBB10_34: - XORL AX, AX - LBB10_35: - MOVBLSX 0(DI)(DX*1), CX - CMPL CX, $48 - JL LBB10_39 - CMPL AX, $9999 - JG LBB10_39 - CMPB CX, $57 + XORL R9, R9 + +LBB10_36: + CMPL R9, $9999 JG LBB10_39 - LEAL 0(AX)(AX*4), AX - LEAL 0(CX)(AX*2), AX - ADDL $-48, AX - ADDQ $1, DX + MOVBLZX 0(DI)(DX*1), CX + LEAL -48(CX), AX + CMPB AX, $9 + JA LBB10_39 + LEAL 0(R9)(R9*4), AX + LEAL -48(CX)(AX*2), R9 + INCQ DX CMPQ SI, DX - JNE LBB10_35 + JNE LBB10_36 LBB10_39: - IMULL R8, AX - ADDL AX, -12(BP) + IMULL R8, R9 + ADDL R9, -12(BP) LBB10_40: LEAQ -32(BP), DI LEAQ -40(BP), SI - LONG $0xfffb60e8; BYTE $0xff // callq _decimal_to_f64 + LONG $0xfffabde8; BYTE $0xff // callq _decimal_to_f64 LONG $0x4510fbc5; BYTE $0xd8 // vmovsd $-40(%rbp), %xmm0 ADDQ $48, SP BYTE $0x5d // popq %rbp @@ -3726,11 +3714,11 @@ _value: MOVQ DI, -80(BP) MOVQ SI, -72(BP) LEAQ -48(BP), DX - LONG $0x000564e8; BYTE $0x00 // callq _advance_ns + LONG $0x00054de8; BYTE $0x00 // callq _advance_ns MOVBLSX AX, AX CMPL AX, $125 JA LBB11_11 - LONG $0x190d8d48; WORD $0x0003; BYTE $0x00 // leaq $793(%rip), %rcx /* LJTI11_0(%rip) */ + LONG $0x020d8d48; WORD $0x0003; BYTE $0x00 // leaq $770(%rip), %rcx /* LJTI11_0(%rip) */ MOVLQSX 0(CX)(AX*4), AX ADDQ CX, AX JMP AX @@ -3745,7 +3733,7 @@ LBB11_2: LEAQ -80(BP), DI LEAQ -48(BP), SI MOVQ -56(BP), DX - LONG $0x000b16e8; BYTE $0x00 // callq _vnumber + LONG $0x000b2ce8; BYTE $0x00 // callq _vnumber MOVQ -48(BP), BX JMP LBB11_50 @@ -3767,7 +3755,7 @@ LBB11_4: LBB11_7: MOVQ R12, DI MOVQ BX, SI - LONG $0x002183e8; BYTE $0x00 // callq _do_skip_number + LONG $0x002138e8; BYTE $0x00 // callq _do_skip_number TESTQ AX, AX JS LBB11_46 ADDQ AX, R12 @@ -3789,7 +3777,7 @@ LBB11_10: LBB11_11: MOVQ $-2, 0(R14) MOVQ -48(BP), BX - ADDQ $-1, BX + DECQ BX JMP LBB11_50 LBB11_12: @@ -3798,7 +3786,7 @@ LBB11_12: LEAQ -80(BP), DI LEAQ -64(BP), DX MOVQ R15, SI - LONG $0x0006dae8; BYTE $0x00 // callq _advance_string + LONG $0x0006fde8; BYTE $0x00 // callq _advance_string TESTQ AX, AX JS LBB11_31 MOVQ AX, -48(BP) @@ -3814,14 +3802,14 @@ LBB11_12: JMP LBB11_50 LBB11_14: + XORL AX, AX TESTL R12, R12 - MOVQ $-2, AX MOVL $11, CX JMP LBB11_28 LBB11_15: + XORL AX, AX TESTL R12, R12 - MOVQ $-2, AX MOVL $10, CX JMP LBB11_28 @@ -3831,8 +3819,8 @@ LBB11_16: JMP LBB11_50 LBB11_17: + XORL AX, AX TESTL R12, R12 - MOVQ $-2, AX MOVL $12, CX JMP LBB11_28 @@ -3880,18 +3868,18 @@ LBB11_26: JMP LBB11_50 LBB11_27: + XORL AX, AX TESTL R12, R12 - MOVQ $-2, AX MOVL $13, CX LBB11_28: - LONG $0xc8490f48 // cmovnsq %rax, %rcx - MOVQ CX, 0(R14) - SARL $31, R12 - NOTL R12 - MOVLQSX R12, BX - ADDQ -48(BP), BX - JMP LBB11_50 + MOVQ $-2, DX + LONG $0xd1480f48 // cmovsq %rcx, %rdx + SETPL AX + MOVQ DX, 0(R14) + MOVQ -48(BP), BX + SUBQ AX, BX + JMP LBB11_50 LBB11_24: MOVL -1(R15)(AX*1), DX @@ -3916,14 +3904,14 @@ LBB11_32: LBB11_34: SHRL $8, DX MOVBLSX 1(R15)(AX*1), SI - ADDQ $1, AX + INCQ AX MOVBLZX DX, DI CMPL DI, SI JE LBB11_34 JMP LBB11_39 LBB11_36: - ADDQ $-1, AX + DECQ AX MOVQ AX, -48(BP) MOVQ $-2, CX CMPB DX, $110 @@ -3933,14 +3921,14 @@ LBB11_36: LBB11_38: SHRL $8, DX MOVBLSX 1(R15)(AX*1), SI - ADDQ $1, AX + INCQ AX MOVBLZX DX, DI CMPL DI, SI JE LBB11_38 JMP LBB11_39 LBB11_41: - ADDQ $-1, AX + DECQ AX MOVQ AX, -48(BP) MOVQ $-2, CX CMPB DX, $116 @@ -3950,7 +3938,7 @@ LBB11_41: LBB11_43: SHRL $8, DX MOVBLSX 1(R15)(AX*1), SI - ADDQ $1, AX + INCQ AX MOVBLZX DX, DI CMPL DI, SI JE LBB11_43 @@ -4010,132 +3998,132 @@ LBB11_50: // .set L11_0_set_26, LBB11_26-LJTI11_0 // .set L11_0_set_27, LBB11_27-LJTI11_0 LJTI11_0: - LONG $0xfffffd8b // .long L11_0_set_10 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffdaf // .long L11_0_set_12 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffe02 // .long L11_0_set_14 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffe16 // .long L11_0_set_15 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffe2a // .long L11_0_set_16 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffe3a // .long L11_0_set_17 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffe4e // .long L11_0_set_18 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffe7d // .long L11_0_set_21 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffead // .long L11_0_set_23 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffecd // .long L11_0_set_26 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffedd // .long L11_0_set_27 + LONG $0xfffffda2 // .long L11_0_set_10 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdc5 // .long L11_0_set_12 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffe18 // .long L11_0_set_14 + LONG $0xfffffd07 // .long L11_0_set_2 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffd07 // .long L11_0_set_2 + LONG $0xfffffd07 // .long L11_0_set_2 + LONG $0xfffffd07 // .long L11_0_set_2 + LONG $0xfffffd07 // .long L11_0_set_2 + LONG $0xfffffd07 // .long L11_0_set_2 + LONG $0xfffffd07 // .long L11_0_set_2 + LONG $0xfffffd07 // .long L11_0_set_2 + LONG $0xfffffd07 // .long L11_0_set_2 + LONG $0xfffffd07 // .long L11_0_set_2 + LONG $0xfffffd07 // .long L11_0_set_2 + LONG $0xfffffe27 // .long L11_0_set_15 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffe36 // .long L11_0_set_16 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffe46 // .long L11_0_set_17 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffe55 // .long L11_0_set_18 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffe84 // .long L11_0_set_21 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffeb4 // .long L11_0_set_23 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffed4 // .long L11_0_set_26 + LONG $0xfffffdb2 // .long L11_0_set_11 + LONG $0xfffffee4 // .long L11_0_set_27 LCPI12_0: QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' @@ -4152,6 +4140,7 @@ LCPI12_3: _advance_ns: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + BYTE $0x53 // pushq %rbx MOVQ 0(DX), R8 CMPQ R8, SI JAE LBB12_6 @@ -4163,14 +4152,14 @@ _advance_ns: ADDB $-9, AX CMPB AX, $1 JBE LBB12_6 - MOVQ R8, AX + MOVQ R8, R10 JMP LBB12_5 LBB12_6: - LEAQ 1(R8), AX - CMPQ AX, SI + LEAQ 1(R8), R10 + CMPQ R10, SI JAE LBB12_10 - MOVB 0(DI)(AX*1), CX + MOVB 0(DI)(R10*1), CX CMPB CX, $13 JE LBB12_10 CMPB CX, $32 @@ -4180,10 +4169,10 @@ LBB12_6: JA LBB12_5 LBB12_10: - LEAQ 2(R8), AX - CMPQ AX, SI + LEAQ 2(R8), R10 + CMPQ R10, SI JAE LBB12_14 - MOVB 0(DI)(AX*1), CX + MOVB 0(DI)(R10*1), CX CMPB CX, $13 JE LBB12_14 CMPB CX, $32 @@ -4193,10 +4182,10 @@ LBB12_10: JA LBB12_5 LBB12_14: - LEAQ 3(R8), AX - CMPQ AX, SI + LEAQ 3(R8), R10 + CMPQ R10, SI JAE LBB12_18 - MOVB 0(DI)(AX*1), CX + MOVB 0(DI)(R10*1), CX CMPB CX, $13 JE LBB12_18 CMPB CX, $32 @@ -4206,55 +4195,59 @@ LBB12_14: JA LBB12_5 LBB12_18: - ADDQ $4, R8 - CMPQ R8, SI + LEAQ 4(R8), R9 + CMPQ R9, SI JAE LBB12_19 - LEAQ 0(DI)(R8*1), R9 - MOVQ SI, R11 - SUBQ R8, R11 + LEAQ 0(DI)(R9*1), R10 + MOVQ SI, BX + SUBQ R9, BX JE LBB12_27 - MOVL R9, AX + MOVL R10, AX ANDL $15, AX TESTQ AX, AX JE LBB12_27 - MOVL $5, R10 - SUBQ SI, R10 + LEAQ 0(DI)(R8*1), R10 + MOVQ SI, BX + SUBQ R8, BX + LEAQ -5(BX), R8 + XORL AX, AX MOVQ $4294977024, R9 LBB12_23: - MOVBLSX 0(DI)(R8*1), CX + MOVBLSX 4(R10)(AX*1), CX CMPL CX, $32 - JA LBB12_42 + JA LBB12_25 BTQ CX, R9 - JAE LBB12_42 - LEAQ 0(R10)(R8*1), AX - LEAQ 1(R8), CX - CMPQ AX, $4 + JAE LBB12_25 + LEAQ 1(AX), R11 + CMPQ R8, AX JE LBB12_26 - LEAL 0(DI)(R8*1), AX - ADDL $1, AX - ANDL $15, AX - MOVQ CX, R8 - TESTQ AX, AX + LEAQ 5(R10)(AX*1), CX + ANDL $15, CX + MOVQ R11, AX + TESTQ CX, CX JNE LBB12_23 LBB12_26: - LEAQ 0(DI)(CX*1), R9 - MOVQ SI, R11 - SUBQ CX, R11 + LEAQ 4(R11)(R10*1), R10 + SUBQ R11, BX + ADDQ $-4, BX LBB12_27: - CMPQ R11, $16 + CMPQ BX, $16 JB LBB12_33 - MOVQ DI, CX - SUBQ R9, CX - QUAD $0xfffffea7056ffac5 // vmovdqu $-345(%rip), %xmm0 /* LCPI12_0(%rip) */ - QUAD $0xfffffeaf0d6ffac5 // vmovdqu $-337(%rip), %xmm1 /* LCPI12_1(%rip) */ - QUAD $0xfffffeb7156ffac5 // vmovdqu $-329(%rip), %xmm2 /* LCPI12_2(%rip) */ - QUAD $0xfffffebf1d6ffac5 // vmovdqu $-321(%rip), %xmm3 /* LCPI12_3(%rip) */ + LEAQ -16(BX), R8 + MOVQ R8, AX + ANDQ $-16, AX + LEAQ 16(AX)(R10*1), R9 + ANDL $15, R8 + QUAD $0xfffffe92056ffac5 // vmovdqu $-366(%rip), %xmm0 /* LCPI12_0(%rip) */ + QUAD $0xfffffe9a0d6ffac5 // vmovdqu $-358(%rip), %xmm1 /* LCPI12_1(%rip) */ + QUAD $0xfffffea2156ffac5 // vmovdqu $-350(%rip), %xmm2 /* LCPI12_2(%rip) */ + QUAD $0xfffffeaa1d6ffac5 // vmovdqu $-342(%rip), %xmm3 /* LCPI12_3(%rip) */ LBB12_29: - LONG $0x6f79c1c4; BYTE $0x21 // vmovdqa (%r9), %xmm4 + LONG $0x6f79c1c4; BYTE $0x22 // vmovdqa (%r10), %xmm4 LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 @@ -4265,71 +4258,83 @@ LBB12_29: LONG $0xc4d7f9c5 // vpmovmskb %xmm4, %eax CMPW AX, $-1 JNE LBB12_30 - ADDQ $16, R9 - ADDQ $-16, R11 - ADDQ $-16, CX - CMPQ R11, $15 + ADDQ $16, R10 + ADDQ $-16, BX + CMPQ BX, $15 JA LBB12_29 + MOVQ R8, BX + MOVQ R9, R10 LBB12_33: - TESTQ R11, R11 - JE LBB12_40 - LEAQ 0(R9)(R11*1), R8 - XORL CX, CX - MOVQ $4294977024, R10 + TESTQ BX, BX + JE LBB12_42 + LEAQ 0(R10)(BX*1), R8 + INCQ R10 + MOVQ $4294977024, R9 LBB12_35: - MOVBLSX 0(R9)(CX*1), AX + MOVBLSX -1(R10), AX CMPL AX, $32 JA LBB12_37 - BTQ AX, R10 + BTQ AX, R9 JAE LBB12_37 - ADDQ $1, CX - CMPQ R11, CX + DECQ BX + INCQ R10 + TESTQ BX, BX JNE LBB12_35 - MOVQ R8, R9 - -LBB12_40: - SUBQ DI, R9 + MOVQ R8, R10 -LBB12_41: - MOVQ R9, R8 - JMP LBB12_42 +LBB12_42: + SUBQ DI, R10 + CMPQ R10, SI + JB LBB12_5 + JMP LBB12_44 LBB12_19: - MOVQ R8, 0(DX) - JMP LBB12_43 + MOVQ R9, 0(DX) + JMP LBB12_44 LBB12_30: MOVWLZX AX, AX + SUBQ DI, R10 NOTL AX - BSFL AX, R8 - SUBQ CX, R8 + BSFL AX, AX + ADDQ AX, R10 + CMPQ R10, SI + JB LBB12_5 + JMP LBB12_44 -LBB12_42: - MOVQ R8, AX - CMPQ R8, SI - JAE LBB12_43 +LBB12_25: + ADDQ AX, R10 + MOVQ DI, AX + NOTQ AX + LEAQ 5(AX)(R10*1), R10 + CMPQ R10, SI + JAE LBB12_44 LBB12_5: - LEAQ 1(AX), CX - MOVQ CX, 0(DX) - MOVB 0(DI)(AX*1), AX + LEAQ 1(R10), AX + MOVQ AX, 0(DX) + MOVB 0(DI)(R10*1), AX MOVBLSX AX, AX - BYTE $0x5d // popq %rbp + BYTE $0x5b // popq %rbx + BYTE $0x5d // popq %rbp RET -LBB12_43: +LBB12_37: + MOVQ DI, AX + NOTQ AX + ADDQ AX, R10 + CMPQ R10, SI + JB LBB12_5 + +LBB12_44: XORL AX, AX MOVBLSX AX, AX + BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB12_37: - SUBQ DI, R9 - ADDQ CX, R9 - JMP LBB12_41 - _vstring: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp @@ -4386,295 +4391,281 @@ _advance_string: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - BYTE $0x50 // pushq %rax - MOVQ 8(DI), R15 - SUBQ SI, R15 - JE LBB14_18 - MOVQ 0(DI), R9 - MOVQ DX, -48(BP) + SUBQ $16, SP + MOVQ 8(DI), R12 + SUBQ SI, R12 + JE LBB14_17 + MOVQ 0(DI), AX + MOVQ AX, -48(BP) + ADDQ AX, SI + MOVQ DX, -56(BP) MOVQ $-1, 0(DX) - CMPQ R15, $64 - JB LBB14_19 - MOVQ SI, R12 - NOTQ R12 - MOVQ $-1, DX - XORL R14, R14 - QUAD $0xffffff95056ffac5 // vmovdqu $-107(%rip), %xmm0 /* LCPI14_0(%rip) */ - QUAD $0xffffff9d0d6ffac5 // vmovdqu $-99(%rip), %xmm1 /* LCPI14_1(%rip) */ + CMPQ R12, $64 + JB LBB14_18 + MOVL R12, R9 + ANDL $63, R9 + MOVQ $-1, R14 + XORL R15, R15 + QUAD $0xffffff8a056ffac5 // vmovdqu $-118(%rip), %xmm0 /* LCPI14_0(%rip) */ + QUAD $0xffffff920d6ffac5 // vmovdqu $-110(%rip), %xmm1 /* LCPI14_1(%rip) */ + MOVQ -48(BP), DX LBB14_3: - LONG $0x6f7ac1c4; WORD $0x3114 // vmovdqu (%r9,%rsi), %xmm2 - LONG $0x6f7ac1c4; WORD $0x315c; BYTE $0x10 // vmovdqu $16(%r9,%rsi), %xmm3 - LONG $0x6f7ac1c4; WORD $0x3164; BYTE $0x20 // vmovdqu $32(%r9,%rsi), %xmm4 - LONG $0x6f7ac1c4; WORD $0x316c; BYTE $0x30 // vmovdqu $48(%r9,%rsi), %xmm5 - LONG $0xf074e9c5 // vpcmpeqb %xmm0, %xmm2, %xmm6 - LONG $0xded7f9c5 // vpmovmskb %xmm6, %ebx - LONG $0xf074e1c5 // vpcmpeqb %xmm0, %xmm3, %xmm6 - LONG $0xfed7f9c5 // vpmovmskb %xmm6, %edi - LONG $0xf074d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm6 - LONG $0xced7f9c5 // vpmovmskb %xmm6, %ecx - LONG $0xf074d1c5 // vpcmpeqb %xmm0, %xmm5, %xmm6 - LONG $0xeed779c5 // vpmovmskb %xmm6, %r13d - LONG $0xd174e9c5 // vpcmpeqb %xmm1, %xmm2, %xmm2 - LONG $0xc2d7f9c5 // vpmovmskb %xmm2, %eax - LONG $0xd174e1c5 // vpcmpeqb %xmm1, %xmm3, %xmm2 - LONG $0xd2d779c5 // vpmovmskb %xmm2, %r10d - LONG $0xd174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm2 - LONG $0xdad779c5 // vpmovmskb %xmm2, %r11d - LONG $0xd174d1c5 // vpcmpeqb %xmm1, %xmm5, %xmm2 - LONG $0xc2d779c5 // vpmovmskb %xmm2, %r8d + LONG $0x166ffac5 // vmovdqu (%rsi), %xmm2 + LONG $0x5e6ffac5; BYTE $0x10 // vmovdqu $16(%rsi), %xmm3 + LONG $0x666ffac5; BYTE $0x20 // vmovdqu $32(%rsi), %xmm4 + LONG $0x6e6ffac5; BYTE $0x30 // vmovdqu $48(%rsi), %xmm5 + LONG $0xf074e9c5 // vpcmpeqb %xmm0, %xmm2, %xmm6 + LONG $0xded7f9c5 // vpmovmskb %xmm6, %ebx + LONG $0xf074e1c5 // vpcmpeqb %xmm0, %xmm3, %xmm6 + LONG $0xced7f9c5 // vpmovmskb %xmm6, %ecx + LONG $0xf074d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm6 + LONG $0xc6d7f9c5 // vpmovmskb %xmm6, %eax + LONG $0xf074d1c5 // vpcmpeqb %xmm0, %xmm5, %xmm6 + LONG $0xeed779c5 // vpmovmskb %xmm6, %r13d + LONG $0xd174e9c5 // vpcmpeqb %xmm1, %xmm2, %xmm2 + LONG $0xfad7f9c5 // vpmovmskb %xmm2, %edi + LONG $0xd174e1c5 // vpcmpeqb %xmm1, %xmm3, %xmm2 + LONG $0xd2d779c5 // vpmovmskb %xmm2, %r10d + LONG $0xd174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm2 + LONG $0xdad779c5 // vpmovmskb %xmm2, %r11d + LONG $0xd174d1c5 // vpcmpeqb %xmm1, %xmm5, %xmm2 + LONG $0xc2d779c5 // vpmovmskb %xmm2, %r8d SHLQ $48, R13 - SHLQ $32, CX - SHLQ $16, DI - ORQ DI, BX + SHLQ $32, AX + SHLQ $16, CX ORQ CX, BX + ORQ AX, BX SHLQ $48, R8 SHLQ $32, R11 SHLQ $16, R10 - ORQ R10, AX - ORQ R11, AX - ORQ R8, AX + ORQ R10, DI + ORQ R11, DI + ORQ R8, DI JE LBB14_5 - CMPQ DX, $-1 + CMPQ R14, $-1 JE LBB14_8 LBB14_5: ORQ R13, BX - MOVQ AX, CX - ORQ R14, CX + MOVQ DI, AX + ORQ R15, AX JNE LBB14_9 LBB14_6: TESTQ BX, BX - JNE LBB14_10 + JNE LBB14_15 LBB14_7: - ADDQ $-64, R15 - ADDQ $-64, R12 ADDQ $64, SI - CMPQ R15, $63 + ADDQ $-64, R12 + CMPQ R12, $63 JA LBB14_3 - JMP LBB14_12 + JMP LBB14_10 LBB14_8: - BSFQ AX, DX - ADDQ SI, DX - MOVQ -48(BP), CX - MOVQ DX, 0(CX) + MOVQ SI, AX + SUBQ DX, AX + BSFQ DI, R14 + ADDQ AX, R14 + MOVQ -56(BP), AX + MOVQ R14, 0(AX) ORQ R13, BX - MOVQ AX, CX - ORQ R14, CX + MOVQ DI, AX + ORQ R15, AX JE LBB14_6 LBB14_9: - MOVQ R14, CX + MOVQ R15, AX + NOTQ AX + ANDQ DI, AX + LEAQ 0(AX)(AX*1), R8 + ORQ R15, R8 + MOVQ R8, CX NOTQ CX - ANDQ AX, CX - LEAQ 0(CX)(CX*1), R8 - ORQ R14, R8 - MOVQ R8, DI - NOTQ DI - ANDQ AX, DI - MOVQ $-6148914691236517206, AX - ANDQ AX, DI - XORL R14, R14 - ADDQ CX, DI - SETCS R14 - ADDQ DI, DI + ANDQ DI, CX + MOVQ $-6148914691236517206, DI + ANDQ DI, CX + XORL R15, R15 + ADDQ AX, CX + SETCS R15 + ADDQ CX, CX MOVQ $6148914691236517205, AX - XORQ AX, DI - ANDQ R8, DI - NOTQ DI - ANDQ DI, BX + XORQ AX, CX + ANDQ R8, CX + NOTQ CX + ANDQ CX, BX TESTQ BX, BX JE LBB14_7 + JMP LBB14_15 LBB14_10: - BSFQ BX, AX - SUBQ R12, AX + MOVQ R9, R12 + CMPQ R12, $32 + JB LBB14_22 LBB14_11: - ADDQ $8, SP - BYTE $0x5b // popq %rbx - WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 - WORD $0x5e41 // popq %r14 - WORD $0x5f41 // popq %r15 - BYTE $0x5d // popq %rbp - RET - -LBB14_12: - ADDQ R9, SI - CMPQ R15, $32 - JB LBB14_23 - -LBB14_13: LONG $0x066ffac5 // vmovdqu (%rsi), %xmm0 LONG $0x4e6ffac5; BYTE $0x10 // vmovdqu $16(%rsi), %xmm1 - QUAD $0xfffffe40156ffac5 // vmovdqu $-448(%rip), %xmm2 /* LCPI14_0(%rip) */ - QUAD $0xfffffe481d6ffac5 // vmovdqu $-440(%rip), %xmm3 /* LCPI14_1(%rip) */ + QUAD $0xfffffe43156ffac5 // vmovdqu $-445(%rip), %xmm2 /* LCPI14_0(%rip) */ + QUAD $0xfffffe4b1d6ffac5 // vmovdqu $-437(%rip), %xmm3 /* LCPI14_1(%rip) */ LONG $0xe274f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm4 - LONG $0xccd7f9c5 // vpmovmskb %xmm4, %ecx + LONG $0xfcd7f9c5 // vpmovmskb %xmm4, %edi LONG $0xd274f1c5 // vpcmpeqb %xmm2, %xmm1, %xmm2 - LONG $0xfad7f9c5 // vpmovmskb %xmm2, %edi + LONG $0xdad7f9c5 // vpmovmskb %xmm2, %ebx LONG $0xc374f9c5 // vpcmpeqb %xmm3, %xmm0, %xmm0 LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax LONG $0xc374f1c5 // vpcmpeqb %xmm3, %xmm1, %xmm0 - LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx - SHLQ $16, DI + LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx SHLQ $16, BX - ORQ BX, AX - JE LBB14_15 - CMPQ DX, $-1 - JE LBB14_20 + SHLQ $16, CX + ORQ CX, AX + JE LBB14_13 + CMPQ R14, $-1 + JE LBB14_19 -LBB14_15: - ORQ CX, DI +LBB14_13: + ORQ DI, BX MOVQ AX, CX - ORQ R14, CX - JNE LBB14_21 + ORQ R15, CX + JNE LBB14_20 + +LBB14_14: + TESTQ BX, BX + JE LBB14_21 + +LBB14_15: + BSFQ BX, AX + SUBQ DX, SI + LEAQ 1(SI)(AX*1), AX LBB14_16: - TESTQ DI, DI - JE LBB14_22 + ADDQ $16, SP + BYTE $0x5b // popq %rbx + WORD $0x5c41 // popq %r12 + WORD $0x5d41 // popq %r13 + WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 + BYTE $0x5d // popq %rbp + RET -LBB14_17: - BSFQ DI, AX - SUBQ R9, SI - ADDQ SI, AX - ADDQ $1, AX - JMP LBB14_11 +LBB14_18: + MOVQ $-1, R14 + XORL R15, R15 + MOVQ -48(BP), DX + CMPQ R12, $32 + JAE LBB14_11 + JMP LBB14_22 LBB14_19: - ADDQ R9, SI - MOVQ $-1, DX - XORL R14, R14 - CMPQ R15, $32 - JAE LBB14_13 - JMP LBB14_23 - -LBB14_20: - MOVQ SI, BX - SUBQ R9, BX - BSFQ AX, DX - ADDQ BX, DX - MOVQ -48(BP), BX - MOVQ DX, 0(BX) - ORQ CX, DI + MOVQ SI, CX + SUBQ DX, CX + BSFQ AX, R14 + ADDQ CX, R14 + MOVQ -56(BP), CX + MOVQ R14, 0(CX) + ORQ DI, BX MOVQ AX, CX - ORQ R14, CX - JE LBB14_16 + ORQ R15, CX + JE LBB14_14 -LBB14_21: - MOVL R14, CX +LBB14_20: + MOVL R15, CX NOTL CX ANDL AX, CX LEAL 0(CX)(CX*1), R8 - ORL R14, R8 - MOVL R8, BX - NOTL BX - ANDL AX, BX - ANDL $-1431655766, BX - XORL R14, R14 - ADDL CX, BX - SETCS R14 - ADDL BX, BX - XORL $1431655765, BX - ANDL R8, BX - NOTL BX - ANDL BX, DI - TESTQ DI, DI - JNE LBB14_17 + ORL R15, R8 + MOVL R8, DI + NOTL DI + ANDL AX, DI + ANDL $-1431655766, DI + XORL R15, R15 + ADDL CX, DI + SETCS R15 + ADDL DI, DI + XORL $1431655765, DI + ANDL R8, DI + NOTL DI + ANDL DI, BX + TESTQ BX, BX + JNE LBB14_15 -LBB14_22: +LBB14_21: ADDQ $32, SI - ADDQ $-32, R15 - -LBB14_23: - TESTQ R14, R14 - JNE LBB14_38 - TESTQ R15, R15 - JE LBB14_35 + ADDQ $-32, R12 -LBB14_25: - MOVQ R9, DI - NEGQ DI - MOVQ $-1, AX +LBB14_22: + TESTQ R15, R15 + JNE LBB14_33 + MOVQ $-1, AX + TESTQ R12, R12 + JE LBB14_16 -LBB14_26: - XORL CX, CX +LBB14_24: + MOVQ DX, R9 + NOTQ R9 + MOVQ -56(BP), CX -LBB14_27: - MOVBLZX 0(SI)(CX*1), BX +LBB14_25: + LEAQ 1(SI), DI + MOVBLZX 0(SI), BX CMPB BX, $34 - JE LBB14_34 + JE LBB14_32 + LEAQ -1(R12), R10 CMPB BX, $92 - JE LBB14_30 - ADDQ $1, CX - CMPQ R15, CX - JNE LBB14_27 - JMP LBB14_36 - -LBB14_30: - LEAQ -1(R15), BX - CMPQ BX, CX - JE LBB14_11 - CMPQ DX, $-1 - JNE LBB14_33 - LEAQ 0(DI)(SI*1), DX - ADDQ CX, DX - MOVQ -48(BP), BX - MOVQ DX, 0(BX) + JE LBB14_28 + MOVQ R10, R12 + MOVQ DI, SI + TESTQ R10, R10 + JNE LBB14_25 + JMP LBB14_16 -LBB14_33: - ADDQ CX, SI - ADDQ $2, SI - MOVQ R15, BX - SUBQ CX, BX - ADDQ $-2, BX - ADDQ $-2, R15 - CMPQ R15, CX - MOVQ BX, R15 - JNE LBB14_26 - JMP LBB14_11 - -LBB14_34: - ADDQ CX, SI - ADDQ $1, SI - -LBB14_35: - SUBQ R9, SI - MOVQ SI, AX - JMP LBB14_11 +LBB14_28: + TESTQ R10, R10 + JE LBB14_16 + CMPQ R14, $-1 + JNE LBB14_31 + ADDQ R9, DI + MOVQ DI, 0(CX) + MOVQ DI, R14 -LBB14_36: - MOVQ $-1, AX - CMPB BX, $34 - JNE LBB14_11 - ADDQ CX, SI - JMP LBB14_35 +LBB14_31: + ADDQ $2, SI + ADDQ $-2, R12 + MOVQ R12, R10 + MOVQ -48(BP), DX + TESTQ R10, R10 + JNE LBB14_25 + JMP LBB14_16 -LBB14_38: - TESTQ R15, R15 - JE LBB14_18 - CMPQ DX, $-1 - JNE LBB14_41 - MOVQ R9, DX - NOTQ DX - ADDQ SI, DX - MOVQ -48(BP), AX - MOVQ DX, 0(AX) +LBB14_32: + SUBQ DX, DI + MOVQ DI, AX + JMP LBB14_16 -LBB14_41: - ADDQ $1, SI - ADDQ $-1, R15 - TESTQ R15, R15 - JNE LBB14_25 - JMP LBB14_35 +LBB14_33: + TESTQ R12, R12 + JE LBB14_17 + CMPQ R14, $-1 + JNE LBB14_36 + MOVQ -48(BP), R14 + NOTQ R14 + ADDQ SI, R14 + MOVQ -56(BP), AX + MOVQ R14, 0(AX) -LBB14_18: +LBB14_36: + INCQ SI + DECQ R12 + MOVQ -48(BP), DX + MOVQ $-1, AX + TESTQ R12, R12 + JNE LBB14_24 + JMP LBB14_16 + +LBB14_17: MOVQ $-1, AX - JMP LBB14_11 + JMP LBB14_16 LCPI15_0: LONG $0x43300000 // .long 1127219200 @@ -4683,14 +4674,14 @@ LCPI15_0: LONG $0x00000000 // .long 0 LCPI15_1: - QUAD $0x4330000000000000 // .quad 0x4330000000000000 - QUAD $0x4530000000000000 // .quad 0x4530000000000000 + QUAD $0x4330000000000000 // .quad 4841369599423283200 + QUAD $0x4530000000000000 // .quad 4985484787499139072 LCPI15_2: - QUAD $0x430c6bf526340000 // .quad 0x430c6bf526340000 + QUAD $0x430c6bf526340000 // .quad 4831355200913801216 LCPI15_3: - QUAD $0xc30c6bf526340000 // .quad 0xc30c6bf526340000 + QUAD $0xc30c6bf526340000 // .quad -4392016835940974592 _vnumber: BYTE $0x55 // pushq %rbp @@ -4700,517 +4691,491 @@ _vnumber: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $72, SP - MOVQ DX, R14 - MOVQ $0, -64(BP) + SUBQ $56, SP + MOVQ DX, BX + MOVQ SI, R14 + MOVQ $0, -48(BP) MOVQ 0(SI), AX MOVQ 0(DI), R15 MOVQ 8(DI), R13 - MOVQ 32(DX), R11 - MOVQ 40(DX), BX + MOVQ 32(DX), R10 + MOVQ 40(DX), R11 MOVQ $9, 0(DX) LONG $0xc057f9c5 // vxorpd %xmm0, %xmm0, %xmm0 LONG $0x4211f9c5; BYTE $0x08 // vmovupd %xmm0, $8(%rdx) MOVQ 0(SI), CX MOVQ CX, 24(DX) CMPQ AX, R13 - JAE LBB15_6 + JAE LBB15_52 MOVB 0(R15)(AX*1), DI - MOVL $1, R10 + MOVL $1, R9 CMPB DI, $45 JNE LBB15_4 - ADDQ $1, AX + INCQ AX CMPQ AX, R13 - JAE LBB15_6 + JAE LBB15_52 MOVB 0(R15)(AX*1), DI - MOVL $-1, R10 + MOVL $-1, R9 LBB15_4: LEAL -48(DI), CX CMPB CX, $10 - JB LBB15_9 - MOVQ AX, 0(SI) - MOVQ $-2, 0(R14) - JMP LBB15_8 - -LBB15_6: - MOVQ R13, 0(SI) - -LBB15_7: - MOVQ $-1, 0(R14) + JB LBB15_6 -LBB15_8: - ADDQ $72, SP - BYTE $0x5b // popq %rbx - WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 - WORD $0x5e41 // popq %r14 - WORD $0x5f41 // popq %r15 - BYTE $0x5d // popq %rbp - RET +LBB15_5: + MOVQ AX, 0(R14) + MOVQ $-2, 0(BX) + JMP LBB15_53 -LBB15_9: +LBB15_6: CMPB DI, $48 - JNE LBB15_13 - LEAQ 1(AX), R9 + JNE LBB15_10 + LEAQ 1(AX), SI CMPQ AX, R13 - JAE LBB15_22 - MOVB 0(R15)(R9*1), DX + JAE LBB15_19 + MOVB 0(R15)(SI*1), DX ADDB $-46, DX CMPB DX, $55 - JA LBB15_22 + JA LBB15_19 MOVBLZX DX, R8 MOVQ $36028797027352577, DX BTQ R8, DX - JAE LBB15_22 + JAE LBB15_19 -LBB15_13: +LBB15_10: CMPQ AX, R13 - MOVQ R11, -104(BP) - MOVQ BX, -96(BP) - MOVQ SI, -56(BP) - JAE LBB15_21 + JAE LBB15_18 CMPB CX, $9 - MOVL R10, -68(BP) - MOVB $1, CX - JA LBB15_23 - MOVL CX, -44(BP) - MOVL $4294967248, R9 - LEAQ -1(R13), R10 + JA LBB15_20 + LEAQ -1(R13), R8 XORL CX, CX - XORL R8, R8 + XORL SI, SI XORL R12, R12 -LBB15_16: - CMPL R8, $18 - JG LBB15_18 +LBB15_13: + CMPL SI, $18 + JG LBB15_15 + MOVBQSX DI, DI LEAQ 0(R12)(R12*4), DX - MOVBLSX DI, DI - ADDL R9, DI - LEAQ 0(DI)(DX*2), R12 - ADDL $1, R8 - JMP LBB15_19 + LEAQ -48(DI)(DX*2), R12 + INCL SI + JMP LBB15_16 -LBB15_18: - ADDL $1, CX +LBB15_15: + INCL CX -LBB15_19: - CMPQ R10, AX - JE LBB15_28 +LBB15_16: + CMPQ R8, AX + JE LBB15_24 MOVBLZX 1(R15)(AX*1), DI - ADDQ $1, AX + INCQ AX LEAL -48(DI), DX CMPB DX, $10 - JB LBB15_16 - JMP LBB15_24 + JB LBB15_13 + JMP LBB15_21 -LBB15_21: - MOVB $1, CX - MOVL CX, -44(BP) - XORL CX, CX - XORL R8, R8 - XORL R12, R12 - XORL R11, R11 - TESTL CX, CX - SETGT R11 - JE LBB15_30 - JMP LBB15_38 +LBB15_18: + XORL CX, CX + XORL SI, SI + XORL R12, R12 + JMP LBB15_25 -LBB15_22: - MOVQ R9, 0(SI) - JMP LBB15_8 +LBB15_19: + MOVQ SI, 0(R14) + JMP LBB15_53 -LBB15_23: - MOVL CX, -44(BP) +LBB15_20: XORL R12, R12 - XORL R8, R8 + XORL SI, SI XORL CX, CX -LBB15_24: - CMPB DI, $46 - JNE LBB15_29 - ADDQ $1, AX - MOVQ $8, 0(R14) - CMPQ AX, R13 - JAE LBB15_67 - MOVB 0(R15)(AX*1), DX - ADDB $-48, DX - CMPB DX, $10 - JAE LBB15_89 - MOVL $0, -44(BP) - JMP LBB15_29 +LBB15_21: + XORL DX, DX + TESTL CX, CX + SETGT DX + MOVL DX, -52(BP) + MOVL $9, R8 + CMPB DI, $46 + JNE LBB15_26 + INCQ AX + MOVQ $8, 0(BX) + CMPQ AX, R13 + JAE LBB15_52 + MOVB 0(R15)(AX*1), DX + ADDB $-48, DX + MOVL $8, R8 + CMPB DX, $10 + JAE LBB15_5 + JMP LBB15_26 -LBB15_28: +LBB15_24: MOVQ R13, AX -LBB15_29: - MOVL -68(BP), R10 - XORL R11, R11 +LBB15_25: + XORL DX, DX TESTL CX, CX - SETGT R11 - JNE LBB15_38 + SETGT DX + MOVL DX, -52(BP) + MOVL $9, R8 -LBB15_30: +LBB15_26: + TESTL CX, CX + JNE LBB15_35 TESTQ R12, R12 - JNE LBB15_38 + JNE LBB15_35 CMPQ AX, R13 - JAE LBB15_36 - MOVL AX, SI - SUBL R13, SI - XORL R8, R8 + JAE LBB15_33 + MOVL AX, DI + SUBL R13, DI + XORL SI, SI XORL CX, CX -LBB15_33: +LBB15_30: CMPB 0(R15)(AX*1), $48 - JNE LBB15_37 - ADDQ $1, AX - ADDL $-1, CX + JNE LBB15_34 + INCQ AX + DECL CX CMPQ R13, AX - JNE LBB15_33 + JNE LBB15_30 XORL R12, R12 - CMPB -44(BP), $0 - JNE LBB15_57 - JMP LBB15_61 + CMPL R8, $9 + JE LBB15_55 + JMP LBB15_59 -LBB15_36: +LBB15_33: XORL CX, CX - XORL R8, R8 + XORL SI, SI -LBB15_37: +LBB15_34: XORL R12, R12 -LBB15_38: +LBB15_35: CMPQ AX, R13 - JAE LBB15_44 - CMPL R8, $18 - JG LBB15_44 - MOVL $4294967248, R9 + JAE LBB15_40 + CMPL SI, $18 + JG LBB15_40 -LBB15_41: +LBB15_37: MOVBLZX 0(R15)(AX*1), DI LEAL -48(DI), DX CMPB DX, $9 - JA LBB15_44 + JA LBB15_40 LEAQ 0(R12)(R12*4), DX - ADDL R9, DI - LEAQ 0(DI)(DX*2), R12 - ADDL $-1, CX - ADDQ $1, AX + LEAQ -48(DI)(DX*2), R12 + DECL CX + INCQ AX CMPQ AX, R13 - JAE LBB15_44 - LEAL 1(R8), DX - CMPL R8, $18 - MOVL DX, R8 - JL LBB15_41 + JAE LBB15_40 + LEAL 1(SI), DX + CMPL SI, $18 + MOVL DX, SI + JL LBB15_37 -LBB15_44: +LBB15_40: CMPQ AX, R13 - JAE LBB15_56 - MOVB 0(R15)(AX*1), DX - LEAL -48(DX), SI - CMPB SI, $9 - JA LBB15_50 - LEAQ -1(R13), SI - -LBB15_47: - CMPQ SI, AX - JE LBB15_68 - MOVBLZX 1(R15)(AX*1), DX - ADDQ $1, AX - LEAL -48(DX), DI - CMPB DI, $9 - JBE LBB15_47 - MOVL $1, R11 - -LBB15_50: - ORB $32, DX - CMPB DX, $101 - JNE LBB15_56 - LEAQ 1(AX), DX - MOVQ $8, 0(R14) - CMPQ DX, R13 - JAE LBB15_67 - MOVL R11, -44(BP) - MOVB 0(R15)(DX*1), SI + JAE LBB15_54 + MOVB 0(R15)(AX*1), SI + LEAL -48(SI), DX + CMPB DX, $9 + JA LBB15_46 + LEAQ -1(R13), DI + +LBB15_43: + CMPQ DI, AX + JE LBB15_76 + MOVBLZX 1(R15)(AX*1), SI + INCQ AX + LEAL -48(SI), DX + CMPB DX, $9 + JBE LBB15_43 + MOVL $1, -52(BP) + +LBB15_46: + ORB $32, SI + CMPB SI, $101 + JNE LBB15_54 + LEAQ 1(AX), DI + MOVQ $8, 0(BX) + CMPQ DI, R13 + JAE LBB15_52 + MOVB 0(R15)(DI*1), SI CMPB SI, $45 - JE LBB15_54 + JE LBB15_50 MOVL $1, R8 CMPB SI, $43 - JNE LBB15_87 + JNE LBB15_85 -LBB15_54: +LBB15_50: ADDQ $2, AX CMPQ AX, R13 - JAE LBB15_67 + JAE LBB15_52 XORL DX, DX CMPB SI, $43 SETEQ DX - LEAL 0(DX)(DX*1), R8 - ADDL $-1, R8 + LEAL -1(DX)(DX*1), R8 MOVB 0(R15)(AX*1), SI - JMP LBB15_88 + JMP LBB15_86 -LBB15_56: - MOVL CX, SI +LBB15_52: + MOVQ R13, 0(R14) + MOVQ $-1, 0(BX) + +LBB15_53: + ADDQ $56, SP + BYTE $0x5b // popq %rbx + WORD $0x5c41 // popq %r12 + WORD $0x5d41 // popq %r13 + WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 + BYTE $0x5d // popq %rbp + RET + +LBB15_54: + MOVL CX, DI MOVQ AX, R13 - CMPB -44(BP), $0 - JE LBB15_61 + CMPL R8, $9 + JNE LBB15_59 -LBB15_57: - TESTL SI, SI - JNE LBB15_60 +LBB15_55: + TESTL DI, DI + JNE LBB15_58 MOVQ $-9223372036854775808, AX - MOVLQSX R10, CX + MOVLQSX R9, CX TESTQ R12, R12 - JNS LBB15_71 + JNS LBB15_80 MOVQ R12, DX ANDQ CX, DX CMPQ DX, AX - JE LBB15_71 + JE LBB15_80 + +LBB15_58: + MOVQ $8, 0(BX) + +LBB15_59: + MOVQ $0, -64(BP) + LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 + QUAD $0xfffffcf60562f9c5 // vpunpckldq $-778(%rip), %xmm0, %xmm0 /* LCPI15_0(%rip) */ + QUAD $0xfffffcfe055cf9c5 // vsubpd $-770(%rip), %xmm0, %xmm0 /* LCPI15_1(%rip) */ + LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 + LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 + LONG $0x4511fbc5; BYTE $0xd0 // vmovsd %xmm0, $-48(%rbp) + MOVQ R12, AX + SHRQ $52, AX + JE LBB15_71 LBB15_60: - MOVQ $8, 0(R14) - -LBB15_61: - MOVQ $0, -80(BP) - LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 - QUAD $0xfffffcae0562f9c5 // vpunpckldq $-850(%rip), %xmm0, %xmm0 /* LCPI15_0(%rip) */ - QUAD $0xfffffcb6055cf9c5 // vsubpd $-842(%rip), %xmm0, %xmm0 /* LCPI15_1(%rip) */ - LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 - LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 - LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) - MOVQ R12, AX - SHRQ $52, AX - JNE LBB15_76 - LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx - MOVL R10, AX - SHRL $31, AX - SHLQ $63, AX - ORQ CX, AX - MOVQ AX, -64(BP) - TESTQ R12, R12 - JE LBB15_83 - TESTL SI, SI - JE LBB15_83 - LONG $0x6ef9e1c4; BYTE $0xc0 // vmovq %rax, %xmm0 - LEAL -1(SI), AX - CMPL AX, $36 - JA LBB15_69 - CMPL SI, $23 - JL LBB15_72 - LEAL -22(SI), AX - LONG $0x390d8d48; WORD $0x00be; BYTE $0x00 // leaq $48697(%rip), %rcx /* _P10_TAB(%rip) */ - LONG $0x0459fbc5; BYTE $0xc1 // vmulsd (%rcx,%rax,8), %xmm0, %xmm0 - LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) - MOVL $22, AX - JMP LBB15_73 + MOVQ R11, -80(BP) + MOVQ R10, -88(BP) + LEAQ -48(BP), CX + MOVQ DI, SI + MOVQ R12, DI + MOVQ SI, -72(BP) + MOVL R9, DX + MOVL R9, -56(BP) + LONG $0xffeab1e8; BYTE $0xff // callq _atof_eisel_lemire64 + TESTB AX, AX + JE LBB15_64 + MOVQ -72(BP), SI + MOVL -56(BP), DX + CMPL -52(BP), $0 + JE LBB15_79 + INCQ R12 + LEAQ -64(BP), CX + MOVQ R12, DI + LONG $0xffea8de8; BYTE $0xff // callq _atof_eisel_lemire64 + TESTB AX, AX + JE LBB15_64 + LONG $0x4d10fbc5; BYTE $0xc0 // vmovsd $-64(%rbp), %xmm1 + LONG $0x4510fbc5; BYTE $0xd0 // vmovsd $-48(%rbp), %xmm0 + LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 + JNE LBB15_64 + JNP LBB15_66 -LBB15_67: - MOVQ -56(BP), AX - MOVQ R13, 0(AX) - JMP LBB15_7 +LBB15_64: + MOVQ 0(R14), AX + ADDQ AX, R15 + MOVQ R13, SI + SUBQ AX, SI + MOVQ R15, DI + MOVQ -88(BP), DX + MOVQ -80(BP), CX + LONG $0xffef39e8; BYTE $0xff // callq _atof_native -LBB15_68: - MOVL $1, R11 - MOVL CX, SI - CMPB -44(BP), $0 - JNE LBB15_57 - JMP LBB15_61 +LBB15_65: + LONG $0x4511fbc5; BYTE $0xd0 // vmovsd %xmm0, $-48(%rbp) + +LBB15_66: + LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax + +LBB15_67: + MOVQ $-9223372036854775808, CX + DECQ CX + ANDQ AX, CX + MOVQ $9218868437227405312, DX + CMPQ CX, DX + JNE LBB15_69 + MOVQ $-8, 0(BX) LBB15_69: - CMPL SI, $-22 - JB LBB15_76 - NEGL SI - LONG $0xf0058d48; WORD $0x00bd; BYTE $0x00 // leaq $48624(%rip), %rax /* _P10_TAB(%rip) */ - LONG $0x045efbc5; BYTE $0xf0 // vdivsd (%rax,%rsi,8), %xmm0, %xmm0 - LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) - JMP LBB15_80 + MOVQ AX, 8(BX) + +LBB15_70: + MOVQ R13, 0(R14) + JMP LBB15_53 LBB15_71: + LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx + MOVL R9, AX + SHRL $31, AX + SHLQ $63, AX + ORQ CX, AX + MOVQ AX, -48(BP) + TESTQ R12, R12 + JE LBB15_67 + TESTL DI, DI + JE LBB15_67 + LONG $0x6ef9e1c4; BYTE $0xc0 // vmovq %rax, %xmm0 + LEAL -1(DI), AX + CMPL AX, $36 + JA LBB15_77 + CMPL DI, $23 + JL LBB15_81 + MOVLQSX DI, AX + LONG $0xb40d8d48; WORD $0x00bd; BYTE $0x00 // leaq $48564(%rip), %rcx /* _P10_TAB(%rip) */ + QUAD $0xffff50c18459fbc5; BYTE $0xff // vmulsd $-176(%rcx,%rax,8), %xmm0, %xmm0 + LONG $0x4511fbc5; BYTE $0xd0 // vmovsd %xmm0, $-48(%rbp) + MOVL $22, AX + JMP LBB15_82 + +LBB15_76: + MOVL $1, -52(BP) + MOVL CX, DI + CMPL R8, $9 + JE LBB15_55 + JMP LBB15_59 + +LBB15_77: + CMPL DI, $-22 + JB LBB15_60 + NEGL DI + MOVLQSX DI, AX + LONG $0x720d8d48; WORD $0x00bd; BYTE $0x00 // leaq $48498(%rip), %rcx /* _P10_TAB(%rip) */ + LONG $0x045efbc5; BYTE $0xc1 // vdivsd (%rcx,%rax,8), %xmm0, %xmm0 + JMP LBB15_65 + +LBB15_79: + MOVQ -48(BP), AX + JMP LBB15_67 + +LBB15_80: LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 IMULQ CX, R12 - QUAD $0xfffffbda0562f9c5 // vpunpckldq $-1062(%rip), %xmm0, %xmm0 /* LCPI15_0(%rip) */ - QUAD $0xfffffbe2055cf9c5 // vsubpd $-1054(%rip), %xmm0, %xmm0 /* LCPI15_1(%rip) */ - MOVQ R12, 16(R14) + QUAD $0xfffffb790562f9c5 // vpunpckldq $-1159(%rip), %xmm0, %xmm0 /* LCPI15_0(%rip) */ + QUAD $0xfffffb81055cf9c5 // vsubpd $-1151(%rip), %xmm0, %xmm0 /* LCPI15_1(%rip) */ + MOVQ R12, 16(BX) LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 ANDQ CX, AX LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx ORQ AX, CX - MOVQ CX, 8(R14) - MOVQ -56(BP), AX - MOVQ R13, 0(AX) - JMP LBB15_8 + MOVQ CX, 8(BX) + JMP LBB15_70 -LBB15_72: - MOVL SI, AX +LBB15_81: + MOVL DI, AX -LBB15_73: - QUAD $0xfffffbbf052ef9c5 // vucomisd $-1089(%rip), %xmm0 /* LCPI15_2(%rip) */ - JA LBB15_76 - QUAD $0xfffffbbd0d10fbc5 // vmovsd $-1091(%rip), %xmm1 /* LCPI15_3(%rip) */ +LBB15_82: + QUAD $0xfffffb65052ef9c5 // vucomisd $-1179(%rip), %xmm0 /* LCPI15_2(%rip) */ + JA LBB15_60 + QUAD $0xfffffb5f0d10fbc5 // vmovsd $-1185(%rip), %xmm1 /* LCPI15_3(%rip) */ LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 - JA LBB15_76 + JA LBB15_60 MOVL AX, AX - LONG $0x7c0d8d48; WORD $0x00bd; BYTE $0x00 // leaq $48508(%rip), %rcx /* _P10_TAB(%rip) */ + LONG $0xf90d8d48; WORD $0x00bc; BYTE $0x00 // leaq $48377(%rip), %rcx /* _P10_TAB(%rip) */ LONG $0x0459fbc5; BYTE $0xc1 // vmulsd (%rcx,%rax,8), %xmm0, %xmm0 - LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) - JMP LBB15_80 - -LBB15_76: - MOVL R11, -44(BP) - LEAQ -64(BP), CX - MOVQ R12, DI - MOVL R10, DX - MOVL R10, BX - MOVQ SI, -88(BP) - LONG $0xffea1ae8; BYTE $0xff // callq _atof_eisel_lemire64 - TESTB AX, AX - JE LBB15_81 - MOVQ -88(BP), SI - CMPL -44(BP), $0 - JE LBB15_82 - MOVL BX, DX - ADDQ $1, R12 - LEAQ -80(BP), CX - MOVQ R12, DI - LONG $0xffe9fae8; BYTE $0xff // callq _atof_eisel_lemire64 - TESTB AX, AX - JE LBB15_81 - LONG $0x4d10fbc5; BYTE $0xb0 // vmovsd $-80(%rbp), %xmm1 - LONG $0x4510fbc5; BYTE $0xc0 // vmovsd $-64(%rbp), %xmm0 - LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 - JNE LBB15_81 - JP LBB15_81 - -LBB15_80: - LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax - JMP LBB15_83 - -LBB15_81: - MOVQ -56(BP), BX - MOVQ 0(BX), AX - ADDQ AX, R15 - MOVQ R13, SI - SUBQ AX, SI - MOVQ R15, DI - MOVQ -104(BP), DX - MOVQ -96(BP), CX - LONG $0xffedf7e8; BYTE $0xff // callq _atof_native - LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) - LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax - JMP LBB15_84 - -LBB15_82: - MOVQ -64(BP), AX + JMP LBB15_65 -LBB15_83: - MOVQ -56(BP), BX - LONG $0x6ef9e1c4; BYTE $0xc0 // vmovq %rax, %xmm0 - -LBB15_84: - MOVQ $-9223372036854775808, CX - ADDQ $-1, CX - ANDQ AX, CX - MOVQ $9218868437227405312, AX - CMPQ CX, AX - JNE LBB15_86 - MOVQ $-8, 0(R14) +LBB15_85: + MOVQ DI, AX LBB15_86: - LONG $0x117bc1c4; WORD $0x0846 // vmovsd %xmm0, $8(%r14) - MOVQ R13, 0(BX) - JMP LBB15_8 - -LBB15_87: - MOVQ DX, AX - -LBB15_88: LEAL -48(SI), DI CMPB DI, $9 - JBE LBB15_90 - -LBB15_89: - MOVQ -56(BP), CX - MOVQ AX, 0(CX) - MOVQ $-2, 0(R14) - JMP LBB15_8 - -LBB15_90: + JA LBB15_5 + MOVL R9, -56(BP) CMPQ AX, R13 - JAE LBB15_96 + JAE LBB15_93 CMPB DI, $9 - JA LBB15_96 + JA LBB15_93 LEAQ -1(R13), R9 XORL DI, DI -LBB15_93: +LBB15_90: MOVL DI, DX - MOVBLZX SI, SI CMPL DI, $10000 LEAL 0(DX)(DX*4), DI + MOVBLZX SI, SI LEAL -48(SI)(DI*2), DI WORD $0x4d0f; BYTE $0xfa // cmovgel %edx, %edi CMPQ R9, AX - JE LBB15_97 + JE LBB15_94 MOVBLZX 1(R15)(AX*1), SI - ADDQ $1, AX + INCQ AX LEAL -48(SI), DX CMPB DX, $10 - JB LBB15_93 - JMP LBB15_98 + JB LBB15_90 + JMP LBB15_95 -LBB15_96: +LBB15_93: XORL DI, DI - JMP LBB15_98 + JMP LBB15_95 -LBB15_97: +LBB15_94: MOVQ R13, AX -LBB15_98: - MOVQ DI, SI - IMULL R8, SI - ADDL CX, SI +LBB15_95: + IMULL R8, DI + ADDL CX, DI MOVQ AX, R13 - MOVL -44(BP), R11 - JMP LBB15_61 + MOVL -56(BP), R9 + JMP LBB15_59 _vsigned: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp BYTE $0x53 // pushq %rbx - MOVQ 0(SI), AX + MOVQ 0(SI), BX MOVQ 0(DI), R8 - MOVQ 8(DI), R11 + MOVQ 8(DI), R10 MOVQ $9, 0(DX) LONG $0xc057f8c5 // vxorps %xmm0, %xmm0, %xmm0 LONG $0x4211f8c5; BYTE $0x08 // vmovups %xmm0, $8(%rdx) MOVQ 0(SI), CX MOVQ CX, 24(DX) - CMPQ AX, R11 + CMPQ BX, R10 JAE LBB16_1 - MOVB 0(R8)(AX*1), CX + MOVB 0(R8)(BX*1), CX MOVL $1, R9 CMPB CX, $45 JNE LBB16_5 - ADDQ $1, AX - CMPQ AX, R11 + INCQ BX + CMPQ BX, R10 JAE LBB16_1 - MOVB 0(R8)(AX*1), CX + MOVB 0(R8)(BX*1), CX MOVQ $-1, R9 LBB16_5: LEAL -48(CX), DI CMPB DI, $10 JB LBB16_7 - MOVQ AX, 0(SI) + MOVQ BX, 0(SI) MOVQ $-2, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET LBB16_1: - MOVQ R11, 0(SI) + MOVQ R10, 0(SI) MOVQ $-1, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp @@ -5218,76 +5183,68 @@ LBB16_1: LBB16_7: CMPB CX, $48 - JNE LBB16_12 - LEAQ 1(AX), DI - CMPQ AX, R11 - JAE LBB16_11 + JNE LBB16_8 + LEAQ 1(BX), DI + CMPQ BX, R10 + JAE LBB16_17 MOVB 0(R8)(DI*1), CX ADDB $-46, CX CMPB CX, $55 - JA LBB16_11 - MOVBLZX CX, R10 + JA LBB16_17 + MOVBLZX CX, R11 MOVQ $36028797027352577, CX - BTQ R10, CX - JAE LBB16_11 + BTQ R11, CX + JAE LBB16_17 -LBB16_12: - CMPQ AX, R11 - MOVQ R11, R10 - LONG $0xd0470f4c // cmovaq %rax, %r10 +LBB16_8: XORL DI, DI -LBB16_13: - CMPQ R10, AX - JE LBB16_23 - MOVBQSX 0(R8)(AX*1), CX - LEAL -48(CX), BX - CMPB BX, $9 +LBB16_9: + CMPQ BX, R10 + JAE LBB16_22 + MOVBQSX 0(R8)(BX*1), CX + LEAL -48(CX), AX + CMPB AX, $9 JA LBB16_18 IMUL3Q $10, DI, DI - JO LBB16_17 - ADDQ $1, AX - ADDL $-48, CX + JO LBB16_13 + INCQ BX + ADDQ $-48, CX IMULQ R9, CX ADDQ CX, DI - JNO LBB16_13 + JNO LBB16_9 -LBB16_17: - ADDQ $-1, AX - MOVQ AX, 0(SI) +LBB16_13: + DECQ BX + MOVQ BX, 0(SI) MOVQ $-5, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB16_11: +LBB16_17: MOVQ DI, 0(SI) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET LBB16_18: - CMPQ AX, R11 - JAE LBB16_22 - CMPB CX, $46 - JE LBB16_25 - CMPB CX, $69 - JE LBB16_25 CMPB CX, $101 + JE LBB16_21 + CMPB CX, $69 + JE LBB16_21 + CMPB CX, $46 JNE LBB16_22 -LBB16_25: - MOVQ AX, 0(SI) +LBB16_21: + MOVQ BX, 0(SI) MOVQ $-6, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET LBB16_22: - MOVQ AX, R10 - -LBB16_23: - MOVQ R10, 0(SI) + MOVQ BX, 0(SI) MOVQ DI, 16(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp @@ -5296,7 +5253,6 @@ LBB16_23: _vunsigned: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - BYTE $0x53 // pushq %rbx MOVQ DX, R8 MOVQ 0(SI), CX MOVQ 0(DI), R9 @@ -5315,14 +5271,12 @@ _vunsigned: LBB17_3: MOVQ CX, 0(SI) MOVQ $-6, 0(R8) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET LBB17_1: MOVQ R11, 0(SI) MOVQ $-1, 0(R8) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET @@ -5332,7 +5286,6 @@ LBB17_4: JB LBB17_6 MOVQ CX, 0(SI) MOVQ $-2, 0(R8) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET @@ -5353,58 +5306,53 @@ LBB17_7: MOVL $10, R10 LBB17_8: - CMPQ R11, CX - JE LBB17_22 + CMPQ CX, R11 + JAE LBB17_20 MOVBLSX 0(R9)(CX*1), DI LEAL -48(DI), DX CMPB DX, $9 JA LBB17_17 MULQ R10 JO LBB17_13 - ADDQ $1, CX + INCQ CX ADDL $-48, DI - XORL BX, BX - ADDQ DI, AX - SETCS BX - MOVQ BX, DX + MOVLQSX DI, DX + MOVQ DX, DI + SARQ $63, DI + ADDQ DX, AX + ADCQ $0, DI + MOVL DI, DX + ANDL $1, DX NEGQ DX - XORQ DX, BX + XORQ DX, DI JNE LBB17_13 TESTQ DX, DX JNS LBB17_8 LBB17_13: - ADDQ $-1, CX + DECQ CX MOVQ CX, 0(SI) MOVQ $-5, 0(R8) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET LBB17_17: - CMPQ CX, R11 - JAE LBB17_21 - CMPB DI, $46 + CMPB DI, $101 JE LBB17_3 CMPB DI, $69 JE LBB17_3 - CMPB DI, $101 + CMPB DI, $46 JE LBB17_3 -LBB17_21: - MOVQ CX, R11 - -LBB17_22: - MOVQ R11, 0(SI) +LBB17_20: + MOVQ CX, 0(SI) MOVQ AX, 16(R8) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET LBB17_16: - ADDQ $1, CX + INCQ CX MOVQ CX, 0(SI) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET @@ -5429,10 +5377,10 @@ _fsm_exec: WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx SUBQ $40, SP - MOVL CX, -60(BP) CMPL 0(DI), $0 JE LBB19_2 MOVQ DI, R12 + MOVL CX, -60(BP) MOVQ SI, -48(BP) MOVQ DX, -56(BP) MOVQ $-1, R14 @@ -5440,19 +5388,19 @@ _fsm_exec: LBB19_2: MOVQ $-1, R13 - JMP LBB19_65 + JMP LBB19_61 LBB19_3: LEAQ 3(AX), CX MOVQ CX, 0(BX) TESTQ AX, AX - JLE LBB19_62 + JLE LBB19_68 -LBB19_39: +LBB19_40: MOVL 0(R12), CX MOVQ R14, R13 TESTL CX, CX - JE LBB19_65 + JE LBB19_61 LBB19_4: MOVQ -48(BP), R13 @@ -5460,20 +5408,20 @@ LBB19_4: MOVQ 8(R13), SI MOVQ -56(BP), BX MOVQ BX, DX - LONG $0xfff1bde8; BYTE $0xff // callq _advance_ns + LONG $0xfff220e8; BYTE $0xff // callq _advance_ns MOVLQSX 0(R12), DX LEAQ -1(DX), CX + MOVL 0(R12)(DX*4), SI CMPQ R14, $-1 JNE LBB19_6 MOVQ 0(BX), R14 - ADDQ $-1, R14 + DECQ R14 LBB19_6: - MOVL 0(R12)(DX*4), SI - ADDL $-1, SI + DECL SI CMPL SI, $5 JA LBB19_11 - LONG $0x7c3d8d48; WORD $0x0004; BYTE $0x00 // leaq $1148(%rip), %rdi /* LJTI19_0(%rip) */ + LONG $0xaf3d8d48; WORD $0x0004; BYTE $0x00 // leaq $1199(%rip), %rdi /* LJTI19_0(%rip) */ MOVLQSX 0(DI)(SI*4), SI ADDQ DI, SI JMP SI @@ -5483,12 +5431,12 @@ LBB19_8: CMPL AX, $44 JE LBB19_29 CMPL AX, $93 - JNE LBB19_64 + JNE LBB19_59 MOVL CX, 0(R12) MOVQ R14, R13 TESTL CX, CX JNE LBB19_4 - JMP LBB19_65 + JMP LBB19_61 LBB19_11: MOVL CX, 0(R12) @@ -5502,16 +5450,16 @@ LBB19_12: CMPL AX, $44 JE LBB19_31 CMPL AX, $125 - JNE LBB19_64 + JNE LBB19_59 MOVL CX, 0(R12) MOVQ R14, R13 TESTL CX, CX JNE LBB19_4 - JMP LBB19_65 + JMP LBB19_61 LBB19_15: CMPB AX, $34 - JNE LBB19_64 + JNE LBB19_59 MOVL $4, 0(R12)(DX*4) MOVQ 0(BX), R15 MOVQ R13, DI @@ -5519,20 +5467,20 @@ LBB19_15: LBB19_17: MOVQ R15, SI LEAQ -72(BP), DX - LONG $0xfff374e8; BYTE $0xff // callq _advance_string + LONG $0xfff412e8; BYTE $0xff // callq _advance_string MOVQ AX, R13 TESTQ AX, AX - JS LBB19_56 + JS LBB19_62 MOVQ R13, 0(BX) TESTQ R15, R15 - JG LBB19_39 - JMP LBB19_57 + JG LBB19_40 + JMP LBB19_63 LBB19_19: CMPB AX, $58 - JNE LBB19_64 + JNE LBB19_59 MOVL $0, 0(R12)(DX*4) - JMP LBB19_39 + JMP LBB19_40 LBB19_21: CMPB AX, $93 @@ -5541,19 +5489,19 @@ LBB19_21: MOVQ R14, R13 TESTL CX, CX JNE LBB19_4 - JMP LBB19_65 + JMP LBB19_61 LBB19_23: MOVBLSX AX, AX CMPL AX, $34 JE LBB19_33 CMPL AX, $125 - JNE LBB19_64 + JNE LBB19_59 MOVL CX, 0(R12) MOVQ R14, R13 TESTL CX, CX JNE LBB19_4 - JMP LBB19_65 + JMP LBB19_61 LBB19_26: MOVL $1, 0(R12)(DX*4) @@ -5563,7 +5511,7 @@ LBB19_26: LBB19_27: MOVQ $-1, R13 - LONG $0x710d8d48; WORD $0x0003; BYTE $0x00 // leaq $881(%rip), %rcx /* LJTI19_1(%rip) */ + LONG $0xa40d8d48; WORD $0x0003; BYTE $0x00 // leaq $932(%rip), %rcx /* LJTI19_1(%rip) */ MOVLQSX 0(CX)(AX*4), AX ADDQ CX, AX JMP AX @@ -5577,80 +5525,85 @@ LBB19_28: ADDQ R13, DI MOVQ 8(AX), SI SUBQ R13, SI - LONG $0x000cebe8; BYTE $0x00 // callq _do_skip_number - LEAQ -1(AX), CX - MOVQ $-2, DX - SUBQ AX, DX + LONG $0x000d1ce8; BYTE $0x00 // callq _do_skip_number + MOVQ $-2, CX + SUBQ AX, CX TESTQ AX, AX - LONG $0xd1490f48 // cmovnsq %rcx, %rdx - MOVQ $-2, AX - LONG $0xe8480f4c // cmovsq %rax, %r13 - ADDQ R15, DX - MOVQ DX, 0(BX) + LEAQ -1(AX), AX + LONG $0xc1480f48 // cmovsq %rcx, %rax + MOVQ $-2, CX + LONG $0xe9480f4c // cmovsq %rcx, %r13 + ADDQ R15, AX + MOVQ AX, 0(BX) TESTQ R13, R13 - JNS LBB19_39 - JMP LBB19_65 + JNS LBB19_40 + JMP LBB19_61 LBB19_29: CMPL DX, $65535 - JG LBB19_58 + JG LBB19_65 LEAL 1(DX), AX MOVL AX, 0(R12) MOVL $0, 4(R12)(DX*4) - JMP LBB19_39 + JMP LBB19_40 LBB19_31: CMPL DX, $65535 - JG LBB19_58 + JG LBB19_65 LEAL 1(DX), AX MOVL AX, 0(R12) MOVL $3, 4(R12)(DX*4) - JMP LBB19_39 + JMP LBB19_40 LBB19_33: - MOVL $2, 0(R12)(DX*4) - CMPL -60(BP), $0 - JE LBB19_35 - MOVQ R13, DI - MOVQ BX, SI - LONG $0x000564e8; BYTE $0x00 // callq _validate_string - TESTQ AX, AX - JNS LBB19_37 - JMP LBB19_61 - -LBB19_35: + MOVL $2, 0(R12)(DX*4) + MOVL -60(BP), AX + CMPL AX, $1 + JE LBB19_37 + TESTL AX, AX + JNE LBB19_38 + MOVQ -56(BP), BX MOVQ 0(BX), R15 - MOVQ R13, DI + MOVQ -48(BP), DI MOVQ R15, SI LEAQ -72(BP), DX - LONG $0xfff20ae8; BYTE $0xff // callq _advance_string + LONG $0xfff2b2e8; BYTE $0xff // callq _advance_string MOVQ AX, R13 TESTQ AX, AX - JS LBB19_56 + JS LBB19_62 MOVQ R13, 0(BX) TESTQ R15, R15 - JLE LBB19_57 + JG LBB19_38 + JMP LBB19_63 LBB19_37: + MOVQ R13, DI + MOVQ BX, SI + LONG $0x000560e8; BYTE $0x00 // callq _validate_string + TESTQ AX, AX + JS LBB19_69 + +LBB19_38: MOVLQSX 0(R12), AX CMPQ AX, $65535 - JG LBB19_58 + JG LBB19_65 LEAL 1(AX), CX MOVL CX, 0(R12) MOVL $4, 4(R12)(AX*4) - JMP LBB19_39 + JMP LBB19_40 -LBB19_40: - CMPL -60(BP), $0 - JE LBB19_55 +LBB19_41: + MOVL -60(BP), AX + CMPL AX, $1 + JE LBB19_57 + TESTL AX, AX + JNE LBB19_40 + MOVQ -56(BP), BX + MOVQ 0(BX), R15 MOVQ -48(BP), DI - MOVQ -56(BP), SI - LONG $0x0004f4e8; BYTE $0x00 // callq _validate_string - TESTQ AX, AX - JNS LBB19_39 - JMP LBB19_61 + JMP LBB19_17 -LBB19_42: +LBB19_44: MOVQ -56(BP), BX MOVQ 0(BX), R13 MOVQ -48(BP), AX @@ -5658,120 +5611,129 @@ LBB19_42: ADDQ R13, DI MOVQ 8(AX), SI SUBQ R13, SI - LONG $0x000bc1e8; BYTE $0x00 // callq _do_skip_number + LONG $0x000be8e8; BYTE $0x00 // callq _do_skip_number TESTQ AX, AX - JS LBB19_63 + JS LBB19_70 ADDQ R13, AX MOVQ AX, 0(BX) TESTQ R13, R13 - JG LBB19_39 - JMP LBB19_77 + JG LBB19_40 + JMP LBB19_71 -LBB19_44: +LBB19_46: MOVLQSX 0(R12), AX CMPQ AX, $65535 - JG LBB19_58 + JG LBB19_65 LEAL 1(AX), CX MOVL CX, 0(R12) MOVL $5, 4(R12)(AX*4) - JMP LBB19_39 + JMP LBB19_40 -LBB19_46: +LBB19_48: MOVQ -56(BP), BX MOVQ 0(BX), AX MOVQ -48(BP), SI MOVQ 8(SI), CX LEAQ -4(CX), DX CMPQ AX, DX - JA LBB19_68 + JA LBB19_77 MOVQ 0(SI), CX MOVL 0(CX)(AX*1), DX CMPL DX, $1702063201 - JNE LBB19_78 + JNE LBB19_74 LEAQ 4(AX), CX MOVQ CX, 0(BX) TESTQ AX, AX - JG LBB19_39 - JMP LBB19_62 + JG LBB19_40 + JMP LBB19_68 -LBB19_49: +LBB19_51: MOVQ -56(BP), BX MOVQ 0(BX), AX MOVQ -48(BP), SI MOVQ 8(SI), CX LEAQ -3(CX), DX CMPQ AX, DX - JA LBB19_68 + JA LBB19_77 MOVQ 0(SI), CX CMPL -1(CX)(AX*1), $1819047278 JE LBB19_3 - JMP LBB19_69 + JMP LBB19_78 -LBB19_51: +LBB19_53: MOVQ -56(BP), BX MOVQ 0(BX), AX MOVQ -48(BP), SI MOVQ 8(SI), CX LEAQ -3(CX), DX CMPQ AX, DX - JA LBB19_68 + JA LBB19_77 MOVQ 0(SI), CX CMPL -1(CX)(AX*1), $1702195828 JE LBB19_3 - JMP LBB19_73 + JMP LBB19_82 -LBB19_53: +LBB19_55: MOVLQSX 0(R12), AX CMPQ AX, $65535 - JG LBB19_58 + JG LBB19_65 LEAL 1(AX), CX MOVL CX, 0(R12) MOVL $6, 4(R12)(AX*4) - JMP LBB19_39 + JMP LBB19_40 -LBB19_55: - MOVQ -56(BP), BX - MOVQ 0(BX), R15 - MOVQ -48(BP), DI - JMP LBB19_17 +LBB19_57: + MOVQ -48(BP), DI + MOVQ -56(BP), SI + LONG $0x0003dae8; BYTE $0x00 // callq _validate_string + TESTQ AX, AX + JNS LBB19_40 + JMP LBB19_69 -LBB19_58: +LBB19_59: + MOVQ 0(BX), AX + MOVQ R13, CX + XORL R13, R13 + +LBB19_60: + CMPQ AX, 8(CX) + SETCS R13 + NOTQ R13 + JMP LBB19_61 + +LBB19_65: MOVQ $-7, R13 - JMP LBB19_65 + JMP LBB19_61 -LBB19_56: +LBB19_62: MOVQ -48(BP), AX MOVQ 8(AX), AX MOVQ AX, 0(BX) - JMP LBB19_65 + JMP LBB19_61 -LBB19_57: - ADDQ $-1, R15 +LBB19_63: + DECQ R15 MOVQ R15, R13 - JMP LBB19_65 + JMP LBB19_61 -LBB19_61: - MOVQ AX, R13 - JMP LBB19_65 +LBB19_64: + MOVQ -56(BP), AX + MOVQ 0(AX), AX + XORL R13, R13 + MOVQ -48(BP), CX + JMP LBB19_60 -LBB19_68: +LBB19_77: MOVQ CX, 0(BX) - JMP LBB19_65 - -LBB19_62: - ADDQ $-1, AX - MOVQ AX, R13 - JMP LBB19_65 + JMP LBB19_61 -LBB19_63: - NOTQ AX - ADDQ AX, R13 - MOVQ R13, 0(BX) +LBB19_68: + DECQ AX -LBB19_64: - MOVQ $-2, R13 +LBB19_69: + MOVQ AX, R13 -LBB19_65: +LBB19_61: MOVQ R13, AX ADDQ $40, SP BYTE $0x5b // popq %rbx @@ -5782,62 +5744,69 @@ LBB19_65: BYTE $0x5d // popq %rbp RET -LBB19_69: - LEAQ -1(AX), DX - MOVQ DX, 0(BX) +LBB19_70: + NOTQ AX + ADDQ AX, R13 + MOVQ R13, 0(BX) MOVQ $-2, R13 - CMPB -1(CX)(AX*1), $110 - JNE LBB19_65 - MOVL $1819047278, DX + JMP LBB19_61 LBB19_71: + DECQ R13 + JMP LBB19_61 + +LBB19_74: + MOVQ $-2, R13 + CMPB DX, $97 + JNE LBB19_61 + INCQ AX + MOVL $1702063201, DX + +LBB19_76: SHRL $8, DX MOVQ AX, 0(BX) MOVBLSX 0(CX)(AX*1), SI MOVBLZX DX, DI - ADDQ $1, AX + INCQ AX CMPL DI, SI - JE LBB19_71 - JMP LBB19_65 + JE LBB19_76 + JMP LBB19_61 -LBB19_73: +LBB19_78: LEAQ -1(AX), DX MOVQ DX, 0(BX) MOVQ $-2, R13 - CMPB -1(CX)(AX*1), $116 - JNE LBB19_65 - MOVL $1702195828, DX + CMPB -1(CX)(AX*1), $110 + JNE LBB19_61 + MOVL $1819047278, DX -LBB19_75: +LBB19_80: SHRL $8, DX MOVQ AX, 0(BX) MOVBLSX 0(CX)(AX*1), SI MOVBLZX DX, DI - ADDQ $1, AX + INCQ AX CMPL DI, SI - JE LBB19_75 - JMP LBB19_65 - -LBB19_77: - ADDQ $-1, R13 - JMP LBB19_65 + JE LBB19_80 + JMP LBB19_61 -LBB19_78: +LBB19_82: + LEAQ -1(AX), DX + MOVQ DX, 0(BX) MOVQ $-2, R13 - CMPB DX, $97 - JNE LBB19_65 - ADDQ $1, AX - MOVL $1702063201, DX + CMPB -1(CX)(AX*1), $116 + JNE LBB19_61 + MOVL $1702195828, DX -LBB19_80: +LBB19_84: SHRL $8, DX MOVQ AX, 0(BX) MOVBLSX 0(CX)(AX*1), SI MOVBLZX DX, DI - ADDQ $1, AX + INCQ AX CMPL DI, SI - JE LBB19_80 - JMP LBB19_65 + JE LBB19_84 + JMP LBB19_61 // .set L19_0_set_8, LBB19_8-LJTI19_0 // .set L19_0_set_12, LBB19_12-LJTI19_0 @@ -5846,148 +5815,148 @@ LBB19_80: // .set L19_0_set_21, LBB19_21-LJTI19_0 // .set L19_0_set_23, LBB19_23-LJTI19_0 LJTI19_0: - LONG $0xfffffb8d // .long L19_0_set_8 - LONG $0xfffffbc7 // .long L19_0_set_12 - LONG $0xfffffbf0 // .long L19_0_set_15 - LONG $0xfffffc2f // .long L19_0_set_19 - LONG $0xfffffc44 // .long L19_0_set_21 - LONG $0xfffffc5c // .long L19_0_set_23 - - // .set L19_1_set_65, LBB19_65-LJTI19_1 + LONG $0xfffffb5a // .long L19_0_set_8 + LONG $0xfffffb94 // .long L19_0_set_12 + LONG $0xfffffbbd // .long L19_0_set_15 + LONG $0xfffffbfc // .long L19_0_set_19 + LONG $0xfffffc11 // .long L19_0_set_21 + LONG $0xfffffc29 // .long L19_0_set_23 + + // .set L19_1_set_61, LBB19_61-LJTI19_1 // .set L19_1_set_64, LBB19_64-LJTI19_1 - // .set L19_1_set_40, LBB19_40-LJTI19_1 - // .set L19_1_set_42, LBB19_42-LJTI19_1 - // .set L19_1_set_28, LBB19_28-LJTI19_1 + // .set L19_1_set_41, LBB19_41-LJTI19_1 // .set L19_1_set_44, LBB19_44-LJTI19_1 + // .set L19_1_set_28, LBB19_28-LJTI19_1 // .set L19_1_set_46, LBB19_46-LJTI19_1 - // .set L19_1_set_49, LBB19_49-LJTI19_1 + // .set L19_1_set_48, LBB19_48-LJTI19_1 // .set L19_1_set_51, LBB19_51-LJTI19_1 // .set L19_1_set_53, LBB19_53-LJTI19_1 + // .set L19_1_set_55, LBB19_55-LJTI19_1 LJTI19_1: - LONG $0xffffff3b // .long L19_1_set_65 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xfffffda1 // .long L19_1_set_40 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xfffffdc6 // .long L19_1_set_42 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xfffffe00 // .long L19_1_set_44 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xfffffe25 // .long L19_1_set_46 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xfffffe68 // .long L19_1_set_49 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xfffffe9a // .long L19_1_set_51 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xffffff34 // .long L19_1_set_64 - LONG $0xfffffec8 // .long L19_1_set_53 + LONG $0xffffff2b // .long L19_1_set_61 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xfffffd79 // .long L19_1_set_41 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xfffffd9d // .long L19_1_set_44 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xfffffc65 // .long L19_1_set_28 + LONG $0xfffffc65 // .long L19_1_set_28 + LONG $0xfffffc65 // .long L19_1_set_28 + LONG $0xfffffc65 // .long L19_1_set_28 + LONG $0xfffffc65 // .long L19_1_set_28 + LONG $0xfffffc65 // .long L19_1_set_28 + LONG $0xfffffc65 // .long L19_1_set_28 + LONG $0xfffffc65 // .long L19_1_set_28 + LONG $0xfffffc65 // .long L19_1_set_28 + LONG $0xfffffc65 // .long L19_1_set_28 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xfffffdd7 // .long L19_1_set_46 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xfffffdfc // .long L19_1_set_48 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xfffffe3f // .long L19_1_set_51 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xfffffe71 // .long L19_1_set_53 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xffffff10 // .long L19_1_set_64 + LONG $0xfffffea3 // .long L19_1_set_55 _skip_array: BYTE $0x55 // pushq %rbp @@ -6027,10 +5996,10 @@ _skip_string: MOVQ 0(SI), BX LEAQ -32(BP), DX MOVQ BX, SI - LONG $0xffed15e8; BYTE $0xff // callq _advance_string + LONG $0xffed80e8; BYTE $0xff // callq _advance_string TESTQ AX, AX JS LBB22_2 - ADDQ $-1, BX + DECQ BX MOVQ AX, CX MOVQ BX, AX JMP LBB22_3 @@ -6064,170 +6033,176 @@ _validate_string: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $24, SP + SUBQ $40, SP MOVQ SI, R14 MOVQ 0(SI), R15 - MOVQ 8(DI), R8 - MOVQ R8, -64(BP) - SUBQ R15, R8 - JE LBB23_52 - MOVQ 0(DI), R13 - LEAQ 0(R15)(R13*1), DI - MOVQ $-1, R11 - XORL R12, R12 - CMPQ R8, $64 - JB LBB23_2 - QUAD $0xffffff85056f7ac5 // vmovdqu $-123(%rip), %xmm8 /* LCPI23_0(%rip) */ - QUAD $0xffffff8d0d6ffac5 // vmovdqu $-115(%rip), %xmm1 /* LCPI23_1(%rip) */ - QUAD $0xffffff95156ffac5 // vmovdqu $-107(%rip), %xmm2 /* LCPI23_2(%rip) */ + MOVQ 8(DI), R12 + MOVQ R12, -64(BP) + SUBQ R15, R12 + JE LBB23_16 + MOVQ R14, -48(BP) + MOVQ 0(DI), AX + MOVQ AX, -56(BP) + LEAQ 0(AX)(R15*1), SI + CMPQ R12, $64 + MOVQ SI, -72(BP) + JB LBB23_31 + MOVL R12, R9 + ANDL $63, R9 + MOVQ $-1, R13 + XORL R14, R14 + QUAD $0xffffff72056f7ac5 // vmovdqu $-142(%rip), %xmm8 /* LCPI23_0(%rip) */ + QUAD $0xffffff7a0d6ffac5 // vmovdqu $-134(%rip), %xmm1 /* LCPI23_1(%rip) */ + QUAD $0xffffff82156ffac5 // vmovdqu $-126(%rip), %xmm2 /* LCPI23_2(%rip) */ LONG $0xdb76e1c5 // vpcmpeqd %xmm3, %xmm3, %xmm3 - MOVQ R15, DX - -LBB23_4: - LONG $0x6f7ac1c4; WORD $0x157c; BYTE $0x00 // vmovdqu (%r13,%rdx), %xmm7 - LONG $0x6f7ac1c4; WORD $0x1574; BYTE $0x10 // vmovdqu $16(%r13,%rdx), %xmm6 - LONG $0x6f7ac1c4; WORD $0x156c; BYTE $0x20 // vmovdqu $32(%r13,%rdx), %xmm5 - LONG $0x6f7ac1c4; WORD $0x1564; BYTE $0x30 // vmovdqu $48(%r13,%rdx), %xmm4 - LONG $0xc774b9c5 // vpcmpeqb %xmm7, %xmm8, %xmm0 - LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax - LONG $0xc674b9c5 // vpcmpeqb %xmm6, %xmm8, %xmm0 - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - LONG $0xc574b9c5 // vpcmpeqb %xmm5, %xmm8, %xmm0 - LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx - LONG $0xc474b9c5 // vpcmpeqb %xmm4, %xmm8, %xmm0 - LONG $0xc8d779c5 // vpmovmskb %xmm0, %r9d - LONG $0xc174c1c5 // vpcmpeqb %xmm1, %xmm7, %xmm0 - LONG $0xf0d7f9c5 // vpmovmskb %xmm0, %esi - LONG $0xc174c9c5 // vpcmpeqb %xmm1, %xmm6, %xmm0 - SHLQ $16, CX - ORQ CX, AX - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - LONG $0xc174d1c5 // vpcmpeqb %xmm1, %xmm5, %xmm0 - SHLQ $32, BX - ORQ BX, AX - LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx - LONG $0xc174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm0 - SHLQ $16, CX - ORQ CX, SI - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - LONG $0xc764e9c5 // vpcmpgtb %xmm7, %xmm2, %xmm0 - LONG $0xfb64c1c5 // vpcmpgtb %xmm3, %xmm7, %xmm7 - LONG $0xc7dbf9c5 // vpand %xmm7, %xmm0, %xmm0 + +LBB23_3: + LONG $0x3e6ffac5 // vmovdqu (%rsi), %xmm7 + LONG $0x766ffac5; BYTE $0x10 // vmovdqu $16(%rsi), %xmm6 + LONG $0x6e6ffac5; BYTE $0x20 // vmovdqu $32(%rsi), %xmm5 + LONG $0x666ffac5; BYTE $0x30 // vmovdqu $48(%rsi), %xmm4 + LONG $0xc774b9c5 // vpcmpeqb %xmm7, %xmm8, %xmm0 + LONG $0xd0d7f9c5 // vpmovmskb %xmm0, %edx + LONG $0xc674b9c5 // vpcmpeqb %xmm6, %xmm8, %xmm0 + LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax + LONG $0xc574b9c5 // vpcmpeqb %xmm5, %xmm8, %xmm0 + LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx + LONG $0xc474b9c5 // vpcmpeqb %xmm4, %xmm8, %xmm0 + LONG $0xd8d779c5 // vpmovmskb %xmm0, %r11d + LONG $0xc174c1c5 // vpcmpeqb %xmm1, %xmm7, %xmm0 + LONG $0xd0d779c5 // vpmovmskb %xmm0, %r10d + LONG $0xc174c9c5 // vpcmpeqb %xmm1, %xmm6, %xmm0 + LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx + LONG $0xc174d1c5 // vpcmpeqb %xmm1, %xmm5, %xmm0 + SHLQ $16, AX + ORQ AX, DX + LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax + LONG $0xc174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm0 SHLQ $32, BX - ORQ BX, SI - LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx - LONG $0xc664e9c5 // vpcmpgtb %xmm6, %xmm2, %xmm0 - LONG $0xf364c9c5 // vpcmpgtb %xmm3, %xmm6, %xmm6 - LONG $0xc6dbf9c5 // vpand %xmm6, %xmm0, %xmm0 - SHLQ $48, CX - ORQ CX, SI - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - LONG $0xc564e9c5 // vpcmpgtb %xmm5, %xmm2, %xmm0 - LONG $0xeb64d1c5 // vpcmpgtb %xmm3, %xmm5, %xmm5 - LONG $0xc5dbf9c5 // vpand %xmm5, %xmm0, %xmm0 + ORQ BX, DX + LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx + LONG $0xc764e9c5 // vpcmpgtb %xmm7, %xmm2, %xmm0 + LONG $0xfb64c1c5 // vpcmpgtb %xmm3, %xmm7, %xmm7 + LONG $0xc0dbc1c5 // vpand %xmm0, %xmm7, %xmm0 SHLQ $16, CX - ORQ CX, BX - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - SHLQ $48, R9 - SHLQ $32, CX - CMPQ R11, $-1 - JNE LBB23_7 - TESTQ SI, SI - JNE LBB23_6 - -LBB23_7: - LONG $0xc464e9c5 // vpcmpgtb %xmm4, %xmm2, %xmm0 - LONG $0xe364d9c5 // vpcmpgtb %xmm3, %xmm4, %xmm4 - LONG $0xc4dbf9c5 // vpand %xmm4, %xmm0, %xmm0 - LONG $0xd0d779c5 // vpmovmskb %xmm0, %r10d - SHLQ $48, R10 - ORQ CX, BX - ORQ R9, AX - MOVQ SI, CX - ORQ R12, CX - JNE LBB23_8 - ORQ R10, BX - TESTQ AX, AX + ORQ CX, R10 + LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx + LONG $0xc664e9c5 // vpcmpgtb %xmm6, %xmm2, %xmm0 + LONG $0xf364c9c5 // vpcmpgtb %xmm3, %xmm6, %xmm6 + LONG $0xc0dbc9c5 // vpand %xmm0, %xmm6, %xmm0 + SHLQ $32, AX + ORQ AX, R10 + LONG $0xf8d7f9c5 // vpmovmskb %xmm0, %edi + LONG $0xc564e9c5 // vpcmpgtb %xmm5, %xmm2, %xmm0 + LONG $0xeb64d1c5 // vpcmpgtb %xmm3, %xmm5, %xmm5 + LONG $0xc0dbd1c5 // vpand %xmm0, %xmm5, %xmm0 + SHLQ $48, BX + ORQ BX, R10 + LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax + LONG $0xc464e9c5 // vpcmpgtb %xmm4, %xmm2, %xmm0 + LONG $0xe364d9c5 // vpcmpgtb %xmm3, %xmm4, %xmm4 + LONG $0xc0dbd9c5 // vpand %xmm0, %xmm4, %xmm0 + SHLQ $16, DI + ORQ DI, CX + LONG $0xc0d779c5 // vpmovmskb %xmm0, %r8d + SHLQ $48, R11 + SHLQ $32, AX + CMPQ R13, $-1 + JNE LBB23_5 + TESTQ R10, R10 JNE LBB23_10 -LBB23_14: - TESTQ BX, BX - JNE LBB23_15 - ADDQ $-64, R8 - ADDQ $64, DX - CMPQ R8, $63 - JA LBB23_4 - JMP LBB23_18 - -LBB23_8: - MOVQ R12, CX - NOTQ CX - ANDQ SI, CX - MOVQ CX, -56(BP) - LEAQ 0(CX)(CX*1), R9 - ORQ R12, R9 - MOVQ R9, -48(BP) - NOTQ R9 - ANDQ SI, R9 - MOVQ $-6148914691236517206, SI - ANDQ SI, R9 - XORL R12, R12 - ADDQ -56(BP), R9 - SETCS R12 - ADDQ R9, R9 - MOVQ $6148914691236517205, CX - XORQ CX, R9 - ANDQ -48(BP), R9 - NOTQ R9 - ANDQ R9, AX - ORQ R10, BX - TESTQ AX, AX - JE LBB23_14 - JMP LBB23_10 +LBB23_5: + SHLQ $48, R8 + ORQ AX, CX + ORQ R11, DX + MOVQ R10, AX + ORQ R14, AX + JNE LBB23_9 + ORQ R8, CX + TESTQ DX, DX + JNE LBB23_11 -LBB23_6: - BSFQ SI, R11 - ADDQ DX, R11 - JMP LBB23_7 +LBB23_7: + TESTQ CX, CX + JNE LBB23_18 + ADDQ $64, SI + ADDQ $-64, R12 + CMPQ R12, $63 + JA LBB23_3 + JMP LBB23_20 + +LBB23_9: + MOVQ R14, AX + NOTQ AX + ANDQ R10, AX + LEAQ 0(AX)(AX*1), R11 + ORQ R14, R11 + MOVQ R11, DI + NOTQ DI + ANDQ R10, DI + MOVQ $-6148914691236517206, BX + ANDQ BX, DI + XORL R14, R14 + ADDQ AX, DI + SETCS R14 + ADDQ DI, DI + MOVQ $6148914691236517205, AX + XORQ AX, DI + ANDQ R11, DI + NOTQ DI + ANDQ DI, DX + ORQ R8, CX + TESTQ DX, DX + JE LBB23_7 + JMP LBB23_11 LBB23_10: - BSFQ AX, CX - LEAQ 0(CX)(DX*1), R12 - ADDQ $1, R12 - TESTQ BX, BX - JE LBB23_50 - BSFQ BX, AX - CMPQ AX, CX - JBE LBB23_13 + MOVQ SI, DI + SUBQ -56(BP), DI + BSFQ R10, R13 + ADDQ DI, R13 + JMP LBB23_5 -LBB23_50: - TESTQ R12, R12 - JS LBB23_51 - LEAQ -1(R15), BX +LBB23_11: + SUBQ -56(BP), SI + BSFQ DX, DX + LEAQ 1(SI)(DX*1), BX + TESTQ CX, CX + MOVQ -48(BP), R14 + JE LBB23_13 + BSFQ CX, AX + CMPQ AX, DX + JBE LBB23_27 + +LBB23_13: + TESTQ BX, BX + JS LBB23_15 MOVQ R15, SI NOTQ SI - ADDQ R12, SI - LONG $0x0002b3e8; BYTE $0x00 // callq _utf8_validate - ADDQ AX, R15 + ADDQ BX, SI + MOVQ -72(BP), DI + LONG $0x0002bee8; BYTE $0x00 // callq _utf8_validate + LEAQ 0(AX)(R15*1), R13 TESTQ AX, AX - LONG $0xfc480f4d // cmovsq %r12, %r15 - MOVQ $-2, R12 - LONG $0xe3480f4c // cmovsq %rbx, %r12 - MOVQ R15, R11 - JMP LBB23_54 + LONG $0xeb480f4c // cmovsq %rbx, %r13 + LEAQ -1(R15), BX + MOVQ $-2, AX + LONG $0xd8490f48 // cmovnsq %rax, %rbx + JMP LBB23_17 -LBB23_51: - CMPQ R12, $-1 - JNE LBB23_54 +LBB23_15: + CMPQ BX, $-1 + JNE LBB23_17 -LBB23_52: - MOVQ $-1, R12 - MOVQ -64(BP), R11 +LBB23_16: + MOVQ $-1, BX + MOVQ -64(BP), R13 -LBB23_54: - MOVQ R11, 0(R14) - MOVQ R12, AX - ADDQ $24, SP +LBB23_17: + MOVQ R13, 0(R14) + MOVQ BX, AX + ADDQ $40, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -6236,212 +6211,217 @@ LBB23_54: BYTE $0x5d // popq %rbp RET -LBB23_15: - MOVQ $-2, R12 - CMPQ R11, $-1 - JNE LBB23_54 - BSFQ BX, R11 - ADDQ DX, R11 - JMP LBB23_54 - LBB23_18: - ADDQ R13, DX - CMPQ R8, $32 - JB LBB23_32 + MOVQ $-2, BX + CMPQ R13, $-1 + JE LBB23_28 + +LBB23_19: + MOVQ -48(BP), R14 + JMP LBB23_17 LBB23_20: - LONG $0x026ffac5 // vmovdqu (%rdx), %xmm0 - LONG $0x4a6ffac5; BYTE $0x10 // vmovdqu $16(%rdx), %xmm1 - QUAD $0xfffffd44156ffac5 // vmovdqu $-700(%rip), %xmm2 /* LCPI23_0(%rip) */ + MOVQ R9, R12 + CMPQ R12, $32 + JB LBB23_35 + +LBB23_21: + LONG $0x066ffac5 // vmovdqu (%rsi), %xmm0 + LONG $0x4e6ffac5; BYTE $0x10 // vmovdqu $16(%rsi), %xmm1 + QUAD $0xfffffd35156ffac5 // vmovdqu $-715(%rip), %xmm2 /* LCPI23_0(%rip) */ LONG $0xda74f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm3 - LONG $0xf3d7f9c5 // vpmovmskb %xmm3, %esi + LONG $0xdbd779c5 // vpmovmskb %xmm3, %r11d LONG $0xd274f1c5 // vpcmpeqb %xmm2, %xmm1, %xmm2 - LONG $0xc2d7f9c5 // vpmovmskb %xmm2, %eax - QUAD $0xfffffd3c156ffac5 // vmovdqu $-708(%rip), %xmm2 /* LCPI23_1(%rip) */ + LONG $0xd2d7f9c5 // vpmovmskb %xmm2, %edx + QUAD $0xfffffd2d156ffac5 // vmovdqu $-723(%rip), %xmm2 /* LCPI23_1(%rip) */ LONG $0xda74f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm3 - LONG $0xdbd7f9c5 // vpmovmskb %xmm3, %ebx + LONG $0xcbd7f9c5 // vpmovmskb %xmm3, %ecx LONG $0xd274f1c5 // vpcmpeqb %xmm2, %xmm1, %xmm2 - LONG $0xcad7f9c5 // vpmovmskb %xmm2, %ecx - QUAD $0xfffffd34156ffac5 // vmovdqu $-716(%rip), %xmm2 /* LCPI23_2(%rip) */ + LONG $0xc2d7f9c5 // vpmovmskb %xmm2, %eax + QUAD $0xfffffd25156ffac5 // vmovdqu $-731(%rip), %xmm2 /* LCPI23_2(%rip) */ LONG $0xd864e9c5 // vpcmpgtb %xmm0, %xmm2, %xmm3 LONG $0xe476d9c5 // vpcmpeqd %xmm4, %xmm4, %xmm4 LONG $0xc464f9c5 // vpcmpgtb %xmm4, %xmm0, %xmm0 - LONG $0xc0dbe1c5 // vpand %xmm0, %xmm3, %xmm0 - LONG $0xd0d779c5 // vpmovmskb %xmm0, %r10d + LONG $0xc3dbf9c5 // vpand %xmm3, %xmm0, %xmm0 + LONG $0xc0d779c5 // vpmovmskb %xmm0, %r8d LONG $0xc164e9c5 // vpcmpgtb %xmm1, %xmm2, %xmm0 LONG $0xcc64f1c5 // vpcmpgtb %xmm4, %xmm1, %xmm1 - LONG $0xc1dbf9c5 // vpand %xmm1, %xmm0, %xmm0 - LONG $0xc8d779c5 // vpmovmskb %xmm0, %r9d + LONG $0xc0dbf1c5 // vpand %xmm0, %xmm1, %xmm0 + LONG $0xd0d779c5 // vpmovmskb %xmm0, %r10d + SHLQ $16, DX SHLQ $16, AX - SHLQ $16, CX - ORQ CX, BX - CMPQ R11, $-1 + ORQ AX, CX + CMPQ R13, $-1 JNE LBB23_23 - TESTQ BX, BX - JNE LBB23_22 + TESTQ CX, CX + JNE LBB23_47 LBB23_23: - SHLQ $16, R9 - ORQ SI, AX - MOVQ BX, CX - ORQ R12, CX - JNE LBB23_24 - ORQ R10, R9 - TESTQ AX, AX - JE LBB23_28 + SHLQ $16, R10 + ORQ R11, DX + MOVQ CX, AX + ORQ R14, AX + JNE LBB23_32 + ORQ R8, R10 + TESTQ DX, DX + JE LBB23_33 -LBB23_26: - SUBQ R13, DX - BSFQ AX, CX - LEAQ 0(DX)(CX*1), R12 - ADDQ $1, R12 - TESTQ R9, R9 - JE LBB23_50 - BSFQ R9, AX +LBB23_25: + SUBQ -56(BP), SI + BSFQ DX, CX + LEAQ 1(SI)(CX*1), BX + TESTQ R10, R10 + JE LBB23_30 + BSFQ R10, AX CMPQ AX, CX - JA LBB23_50 + MOVQ -48(BP), R14 + JA LBB23_13 -LBB23_13: - ADDQ DX, AX - CMPQ R11, $-1 - LONG $0xd8440f4c // cmoveq %rax, %r11 - MOVQ $-2, R12 - JMP LBB23_54 +LBB23_27: + ADDQ SI, AX + CMPQ R13, $-1 + LONG $0xe8440f4c // cmoveq %rax, %r13 + MOVQ $-2, BX + JMP LBB23_17 -LBB23_2: - MOVQ DI, DX - CMPQ R8, $32 - JAE LBB23_20 - JMP LBB23_32 +LBB23_28: + SUBQ -56(BP), SI + BSFQ CX, R13 -LBB23_24: - MOVL R12, CX - NOTL CX - ANDL BX, CX - LEAL 0(CX)(CX*1), SI - ORL R12, SI - MOVL SI, -48(BP) - NOTL SI - ANDL BX, SI - ANDL $-1431655766, SI - XORL R12, R12 - ADDL CX, SI - SETCS R12 - ADDL SI, SI - XORL $1431655765, SI - ANDL -48(BP), SI - NOTL SI - ANDL SI, AX - ORQ R10, R9 - TESTQ AX, AX - JNE LBB23_26 +LBB23_29: + ADDQ SI, R13 + MOVQ -48(BP), R14 + JMP LBB23_17 -LBB23_28: - TESTQ R9, R9 - JNE LBB23_29 - ADDQ $32, DX - ADDQ $-32, R8 +LBB23_30: + MOVQ -48(BP), R14 + JMP LBB23_13 + +LBB23_31: + MOVQ $-1, R13 + XORL R14, R14 + CMPQ R12, $32 + JAE LBB23_21 + JMP LBB23_35 LBB23_32: - TESTQ R12, R12 - JNE LBB23_33 - TESTQ R8, R8 - JE LBB23_49 + MOVL R14, AX + NOTL AX + ANDL CX, AX + LEAL 0(AX)(AX*1), BX + ORL R14, BX + MOVL BX, DI + NOTL DI + ANDL CX, DI + ANDL $-1431655766, DI + XORL R14, R14 + ADDL AX, DI + SETCS R14 + ADDL DI, DI + XORL $1431655765, DI + ANDL BX, DI + NOTL DI + ANDL DI, DX + ORQ R8, R10 + TESTQ DX, DX + JNE LBB23_25 -LBB23_36: - MOVQ R13, AX - NOTQ AX - MOVQ R13, SI - NEGQ SI +LBB23_33: + TESTQ R10, R10 + JNE LBB23_48 + ADDQ $32, SI + ADDQ $-32, R12 + +LBB23_35: + TESTQ R14, R14 + JNE LBB23_50 + MOVQ -48(BP), R14 + TESTQ R12, R12 + JE LBB23_46 LBB23_37: - XORL BX, BX + MOVQ -56(BP), CX + NOTQ CX LBB23_38: - MOVBLZX 0(DX)(BX*1), CX - CMPB CX, $34 - JE LBB23_48 - CMPB CX, $92 - JE LBB23_40 - CMPB CX, $31 - JBE LBB23_43 - ADDQ $1, BX - CMPQ R8, BX + LEAQ 1(SI), AX + MOVBLZX 0(SI), DX + CMPB DX, $34 + JE LBB23_45 + LEAQ -1(R12), BX + CMPB DX, $92 + JE LBB23_42 + CMPB DX, $31 + JBE LBB23_52 + MOVQ AX, SI + MOVQ BX, R12 + TESTQ BX, BX JNE LBB23_38 - JMP LBB23_46 - -LBB23_40: - LEAQ -1(R8), CX - CMPQ CX, BX - JE LBB23_52 - LEAQ 0(SI)(DX*1), CX - ADDQ BX, CX - CMPQ R11, $-1 - LONG $0xd9440f4c // cmoveq %rcx, %r11 - ADDQ BX, DX - ADDQ $2, DX - MOVQ R8, CX - SUBQ BX, CX - ADDQ $-2, CX - ADDQ $-2, R8 - CMPQ R8, BX - MOVQ CX, R8 - JNE LBB23_37 - JMP LBB23_52 + JMP LBB23_44 -LBB23_48: - ADDQ BX, DX - ADDQ $1, DX +LBB23_42: + TESTQ BX, BX + JE LBB23_16 + ADDQ CX, AX + CMPQ R13, $-1 + LONG $0xe8440f4c // cmoveq %rax, %r13 + ADDQ $2, SI + ADDQ $-2, R12 + MOVQ R12, BX + TESTQ BX, BX + JNE LBB23_38 -LBB23_49: - SUBQ R13, DX - MOVQ DX, R12 - JMP LBB23_50 +LBB23_44: + CMPB DX, $34 + JNE LBB23_16 + JMP LBB23_46 + +LBB23_45: + MOVQ AX, SI LBB23_46: - CMPB CX, $34 - JNE LBB23_52 - ADDQ R8, DX - JMP LBB23_49 + SUBQ -56(BP), SI + MOVQ SI, BX + JMP LBB23_13 -LBB23_22: - MOVQ DX, CX - SUBQ R13, CX - BSFQ BX, R11 - ADDQ CX, R11 +LBB23_47: + MOVQ SI, AX + SUBQ -56(BP), AX + BSFQ CX, R13 + ADDQ AX, R13 JMP LBB23_23 -LBB23_29: - MOVQ $-2, R12 - CMPQ R11, $-1 - JNE LBB23_54 - SUBQ R13, DX - BSFQ R9, R11 - ADDQ DX, R11 - JMP LBB23_54 +LBB23_48: + MOVQ $-2, BX + CMPQ R13, $-1 + JNE LBB23_19 + SUBQ -56(BP), SI + BSFQ R10, R13 + JMP LBB23_29 -LBB23_33: - TESTQ R8, R8 - JE LBB23_52 - MOVQ R13, AX +LBB23_50: + TESTQ R12, R12 + MOVQ -48(BP), R14 + JE LBB23_16 + MOVQ -56(BP), AX NOTQ AX - ADDQ DX, AX - CMPQ R11, $-1 - LONG $0xd8440f4c // cmoveq %rax, %r11 - ADDQ $1, DX - ADDQ $-1, R8 - TESTQ R8, R8 - JNE LBB23_36 - JMP LBB23_49 + ADDQ SI, AX + CMPQ R13, $-1 + LONG $0xe8440f4c // cmoveq %rax, %r13 + INCQ SI + DECQ R12 + TESTQ R12, R12 + JNE LBB23_37 + JMP LBB23_46 -LBB23_43: - MOVQ $-2, R12 - CMPQ R11, $-1 - JNE LBB23_54 - ADDQ DX, AX - LEAQ 0(BX)(AX*1), R11 - ADDQ $1, R11 - JMP LBB23_54 +LBB23_52: + MOVQ $-2, BX + CMPQ R13, $-1 + JNE LBB23_19 + ADDQ CX, AX + MOVQ AX, R13 + MOVQ -48(BP), R14 + JMP LBB23_17 _utf8_validate: BYTE $0x55 // pushq %rbp @@ -6451,143 +6431,137 @@ _utf8_validate: BYTE $0x53 // pushq %rbx MOVQ $-1, AX TESTQ SI, SI - JLE LBB24_28 - LONG $0xdd0d8d4c; WORD $0x00ad; BYTE $0x00 // leaq $44509(%rip), %r9 /* _first(%rip) */ - LONG $0xd6058d4c; WORD $0x00ae; BYTE $0x00 // leaq $44758(%rip), %r8 /* _ranges(%rip) */ - LONG $0x30158d4c; WORD $0x0001; BYTE $0x00 // leaq $304(%rip), %r10 /* LJTI24_0(%rip) */ + JLE LBB24_27 + LONG $0x050d8d4c; WORD $0x00ae; BYTE $0x00 // leaq $44549(%rip), %r9 /* _first(%rip) */ + LONG $0xfe058d4c; WORD $0x00ae; BYTE $0x00 // leaq $44798(%rip), %r8 /* _ranges(%rip) */ + LONG $0x19158d4c; WORD $0x0001; BYTE $0x00 // leaq $281(%rip), %r10 /* LJTI24_0(%rip) */ MOVQ DI, R14 LBB24_2: CMPB 0(R14), $0 JS LBB24_3 - MOVQ SI, BX + MOVQ SI, DX MOVQ R14, CX CMPQ SI, $16 - JL LBB24_15 - XORL DX, DX - XORL BX, BX + JL LBB24_5 -LBB24_6: - LONG $0x6f7ac1c4; WORD $0x1604 // vmovdqu (%r14,%rdx), %xmm0 - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - TESTW CX, CX - JNE LBB24_7 - ADDQ $16, DX - LEAQ 0(SI)(BX*1), CX - ADDQ $-16, CX - ADDQ $-16, BX +LBB24_10: + LONG $0x016ffac5 // vmovdqu (%rcx), %xmm0 + LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx + TESTW BX, BX + JNE LBB24_11 ADDQ $16, CX - CMPQ CX, $31 - JG LBB24_6 - MOVQ R14, CX - SUBQ BX, CX - MOVQ SI, BX - SUBQ DX, BX + CMPQ DX, $31 + LEAQ -16(DX), DX + JG LBB24_10 -LBB24_15: - TESTQ BX, BX - JLE LBB24_28 - ADDQ $1, BX - MOVQ CX, DX - SUBQ R14, DX +LBB24_5: + TESTQ DX, DX + JLE LBB24_27 + INCQ DX -LBB24_17: +LBB24_7: CMPB 0(CX), $0 - JS LBB24_8 - ADDQ $1, CX - ADDQ $-1, BX - ADDQ $1, DX - CMPQ BX, $1 - JG LBB24_17 - JMP LBB24_28 + JS LBB24_12 + INCQ CX + DECQ DX + CMPQ DX, $1 + JG LBB24_7 + JMP LBB24_27 LBB24_3: XORL DX, DX + CMPQ DX, $-1 + JNE LBB24_14 + JMP LBB24_27 -LBB24_8: +LBB24_12: + SUBQ R14, CX + MOVQ CX, DX CMPQ DX, $-1 - JE LBB24_28 + JE LBB24_27 -LBB24_9: +LBB24_14: SUBQ DX, SI - JLE LBB24_28 + JLE LBB24_27 LEAQ 0(R14)(DX*1), R11 MOVBLZX 0(R14)(DX*1), R14 MOVBLZX 0(R14)(R9*1), R15 - MOVL R15, CX - ANDL $7, CX - CMPQ SI, CX - JB LBB24_26 - CMPB CX, $4 - JA LBB24_26 + MOVL R15, DX + ANDL $7, DX + CMPQ SI, DX + JB LBB24_25 + CMPB DX, $4 + JA LBB24_25 MOVL $1, BX - MOVBLZX CX, DX - MOVLQSX 0(R10)(DX*4), DX - ADDQ R10, DX - JMP DX + MOVBLZX DX, CX + MOVLQSX 0(R10)(CX*4), CX + ADDQ R10, CX + JMP CX -LBB24_19: +LBB24_18: MOVB 3(R11), BX TESTB BX, BX - JNS LBB24_26 + JNS LBB24_25 CMPB BX, $-65 - JA LBB24_26 + JA LBB24_25 -LBB24_21: +LBB24_20: MOVB 2(R11), BX TESTB BX, BX - JNS LBB24_26 + JNS LBB24_25 CMPB BX, $-65 - JA LBB24_26 + JA LBB24_25 -LBB24_23: +LBB24_22: TESTB R14, R14 - JNS LBB24_26 + JNS LBB24_25 SHRQ $4, R15 MOVB 1(R11), R14 CMPB R14, 0(R8)(R15*2) - JB LBB24_26 - MOVQ CX, BX + JB LBB24_25 + MOVQ DX, BX CMPB 1(R8)(R15*2), R14 - JB LBB24_26 + JB LBB24_25 -LBB24_27: +LBB24_26: ADDQ BX, R11 MOVQ R11, R14 SUBQ BX, SI JG LBB24_2 - JMP LBB24_28 + JMP LBB24_27 -LBB24_7: - MOVWLZX CX, CX - BSFQ CX, DX - SUBQ BX, DX +LBB24_11: + MOVWLZX BX, DX + SUBQ R14, CX + BSFQ DX, DX + ADDQ CX, DX CMPQ DX, $-1 - JNE LBB24_9 - JMP LBB24_28 + JNE LBB24_14 + JMP LBB24_27 -LBB24_26: +LBB24_25: SUBQ DI, R11 MOVQ R11, AX -LBB24_28: +LBB24_27: BYTE $0x5b // popq %rbx WORD $0x5e41 // popq %r14 WORD $0x5f41 // popq %r15 BYTE $0x5d // popq %rbp RET -// .set L24_0_set_27, LBB24_27-LJTI24_0 // .set L24_0_set_26, LBB24_26-LJTI24_0 -// .set L24_0_set_23, LBB24_23-LJTI24_0 -// .set L24_0_set_21, LBB24_21-LJTI24_0 -// .set L24_0_set_19, LBB24_19-LJTI24_0 +// .set L24_0_set_25, LBB24_25-LJTI24_0 +// .set L24_0_set_22, LBB24_22-LJTI24_0 +// .set L24_0_set_20, LBB24_20-LJTI24_0 +// .set L24_0_set_18, LBB24_18-LJTI24_0 LJTI24_0: - LONG $0xffffffcc // .long L24_0_set_27 - LONG $0xfffffff3 // .long L24_0_set_26 - LONG $0xffffffaf // .long L24_0_set_23 - LONG $0xffffffa2 // .long L24_0_set_21 - LONG $0xffffff95 // .long L24_0_set_19 + LONG $0xffffffc9 // .long L24_0_set_26 + LONG $0xfffffff3 // .long L24_0_set_25 + LONG $0xffffffac // .long L24_0_set_22 + LONG $0xffffff9f // .long L24_0_set_20 + LONG $0xffffff92 // .long L24_0_set_18 _skip_negative: BYTE $0x55 // pushq %rbp @@ -6601,12 +6575,12 @@ _skip_negative: MOVQ 8(DI), SI SUBQ BX, SI MOVQ AX, DI - LONG $0x000099e8; BYTE $0x00 // callq _do_skip_number + LONG $0x000098e8; BYTE $0x00 // callq _do_skip_number TESTQ AX, AX JS LBB25_1 ADDQ BX, AX MOVQ AX, 0(R14) - ADDQ $-1, BX + DECQ BX JMP LBB25_3 LBB25_1: @@ -6635,316 +6609,355 @@ LCPI26_3: QUAD $0x2d2d2d2d2d2d2d2d; QUAD $0x2d2d2d2d2d2d2d2d // .space 16, '----------------' LCPI26_4: - QUAD $0xdfdfdfdfdfdfdfdf; QUAD $0xdfdfdfdfdfdfdfdf // .space 16, '\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf' + QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' LCPI26_5: QUAD $0x2e2e2e2e2e2e2e2e; QUAD $0x2e2e2e2e2e2e2e2e // .space 16, '................' LCPI26_6: - QUAD $0x4545454545454545; QUAD $0x4545454545454545 // .space 16, 'EEEEEEEEEEEEEEEE' + QUAD $0x6565656565656565; QUAD $0x6565656565656565 // .space 16, 'eeeeeeeeeeeeeeee' _do_skip_number: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp WORD $0x5741 // pushq %r15 WORD $0x5641 // pushq %r14 + WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx TESTQ SI, SI - JE LBB26_1 + JE LBB26_34 CMPB 0(DI), $48 - JNE LBB26_6 - MOVL $1, AX + JNE LBB26_5 + MOVL $1, DX CMPQ SI, $1 - JE LBB26_55 - MOVB 1(DI), CX - ADDB $-46, CX - CMPB CX, $55 - JA LBB26_55 - MOVBLZX CX, CX - MOVQ $36028797027352577, DX - BTQ CX, DX - JAE LBB26_55 + JE LBB26_52 + MOVB 1(DI), AX + ADDB $-46, AX + CMPB AX, $55 + JA LBB26_52 + MOVBLZX AX, AX + MOVQ $36028797027352577, CX + BTQ AX, CX + JAE LBB26_52 -LBB26_6: +LBB26_5: CMPQ SI, $16 - JB LBB26_7 - MOVQ $-1, R10 - XORL AX, AX - QUAD $0xffffff27056f7ac5 // vmovdqu $-217(%rip), %xmm8 /* LCPI26_0(%rip) */ - QUAD $0xffffff2f0d6f7ac5 // vmovdqu $-209(%rip), %xmm9 /* LCPI26_1(%rip) */ - QUAD $0xffffff37156f7ac5 // vmovdqu $-201(%rip), %xmm10 /* LCPI26_2(%rip) */ - QUAD $0xffffff3f1d6f7ac5 // vmovdqu $-193(%rip), %xmm11 /* LCPI26_3(%rip) */ - QUAD $0xffffff47256ffac5 // vmovdqu $-185(%rip), %xmm4 /* LCPI26_4(%rip) */ - QUAD $0xffffff4f2d6ffac5 // vmovdqu $-177(%rip), %xmm5 /* LCPI26_5(%rip) */ - QUAD $0xffffff57356ffac5 // vmovdqu $-169(%rip), %xmm6 /* LCPI26_6(%rip) */ - MOVL $4294967295, R11 + JB LBB26_57 + LEAQ -16(SI), R11 + MOVQ R11, AX + ANDQ $-16, AX + LEAQ 16(AX)(DI*1), R10 + ANDL $15, R11 MOVQ $-1, R9 + QUAD $0xffffff15056f7ac5 // vmovdqu $-235(%rip), %xmm8 /* LCPI26_0(%rip) */ + QUAD $0xffffff1d0d6f7ac5 // vmovdqu $-227(%rip), %xmm9 /* LCPI26_1(%rip) */ + QUAD $0xffffff25156f7ac5 // vmovdqu $-219(%rip), %xmm10 /* LCPI26_2(%rip) */ + QUAD $0xffffff2d1d6f7ac5 // vmovdqu $-211(%rip), %xmm11 /* LCPI26_3(%rip) */ + QUAD $0xffffff35256ffac5 // vmovdqu $-203(%rip), %xmm4 /* LCPI26_4(%rip) */ + QUAD $0xffffff3d2d6ffac5 // vmovdqu $-195(%rip), %xmm5 /* LCPI26_5(%rip) */ + QUAD $0xffffff45356ffac5 // vmovdqu $-187(%rip), %xmm6 /* LCPI26_6(%rip) */ + MOVL $4294967295, R14 + MOVQ $-1, AX MOVQ $-1, R8 - MOVQ SI, R15 + MOVQ DI, R15 -LBB26_9: - LONG $0x3c6ffac5; BYTE $0x07 // vmovdqu (%rdi,%rax), %xmm7 +LBB26_7: + LONG $0x6f7ac1c4; BYTE $0x3f // vmovdqu (%r15), %xmm7 LONG $0x6441c1c4; BYTE $0xc0 // vpcmpgtb %xmm8, %xmm7, %xmm0 LONG $0xcf64b1c5 // vpcmpgtb %xmm7, %xmm9, %xmm1 LONG $0xc1dbf9c5 // vpand %xmm1, %xmm0, %xmm0 LONG $0xcf74a9c5 // vpcmpeqb %xmm7, %xmm10, %xmm1 LONG $0xd774a1c5 // vpcmpeqb %xmm7, %xmm11, %xmm2 LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 - LONG $0xd4dbc1c5 // vpand %xmm4, %xmm7, %xmm2 + LONG $0xd4ebc1c5 // vpor %xmm4, %xmm7, %xmm2 LONG $0xd674e9c5 // vpcmpeqb %xmm6, %xmm2, %xmm2 LONG $0xfd74c1c5 // vpcmpeqb %xmm5, %xmm7, %xmm7 LONG $0xdfebe9c5 // vpor %xmm7, %xmm2, %xmm3 LONG $0xc0ebf1c5 // vpor %xmm0, %xmm1, %xmm0 LONG $0xc0ebe1c5 // vpor %xmm0, %xmm3, %xmm0 - LONG $0xdfd7f9c5 // vpmovmskb %xmm7, %ebx - LONG $0xe2d779c5 // vpmovmskb %xmm2, %r12d - LONG $0xf1d779c5 // vpmovmskb %xmm1, %r14d + LONG $0xd7d7f9c5 // vpmovmskb %xmm7, %edx + LONG $0xead779c5 // vpmovmskb %xmm2, %r13d + LONG $0xe1d779c5 // vpmovmskb %xmm1, %r12d LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - XORQ R11, CX + XORQ R14, CX BSFQ CX, CX CMPL CX, $16 - JE LBB26_11 - MOVL $-1, DX - SHLL CX, DX - NOTL DX - ANDL DX, BX - ANDL DX, R12 - ANDL R14, DX - MOVL DX, R14 - -LBB26_11: - LEAL -1(BX), DX - ANDL BX, DX - JNE LBB26_12 - LEAL -1(R12), DX - ANDL R12, DX - JNE LBB26_12 - LEAL -1(R14), DX - ANDL R14, DX - JNE LBB26_12 - TESTL BX, BX - JE LBB26_19 - BSFL BX, BX + JE LBB26_9 + MOVL $-1, BX + SHLL CX, BX + NOTL BX + ANDL BX, DX + ANDL BX, R13 + ANDL R12, BX + MOVL BX, R12 + +LBB26_9: + LEAL -1(DX), BX + ANDL DX, BX + JNE LBB26_50 + LEAL -1(R13), BX + ANDL R13, BX + JNE LBB26_50 + LEAL -1(R12), BX + ANDL R12, BX + JNE LBB26_50 + TESTL DX, DX + JE LBB26_15 + MOVQ R15, BX + SUBQ DI, BX + BSFL DX, DX + ADDQ BX, DX CMPQ R8, $-1 - JNE LBB26_56 - ADDQ AX, BX - MOVQ BX, R8 + JNE LBB26_51 + MOVQ DX, R8 + +LBB26_15: + TESTL R13, R13 + JE LBB26_18 + MOVQ R15, BX + SUBQ DI, BX + BSFL R13, DX + ADDQ BX, DX + CMPQ AX, $-1 + JNE LBB26_51 + MOVQ DX, AX -LBB26_19: +LBB26_18: TESTL R12, R12 - JE LBB26_22 + JE LBB26_21 + MOVQ R15, BX + SUBQ DI, BX BSFL R12, DX + ADDQ BX, DX CMPQ R9, $-1 - JNE LBB26_57 - ADDQ AX, DX + JNE LBB26_51 MOVQ DX, R9 -LBB26_22: - TESTL R14, R14 - JE LBB26_25 - BSFL R14, DX - CMPQ R10, $-1 - JNE LBB26_57 - ADDQ AX, DX - MOVQ DX, R10 +LBB26_21: + CMPL CX, $16 + JNE LBB26_35 + ADDQ $16, R15 + ADDQ $-16, SI + CMPQ SI, $15 + JA LBB26_7 + TESTQ R11, R11 + JE LBB26_36 + +LBB26_24: + LEAQ 0(R10)(R11*1), CX + LONG $0x5b358d48; WORD $0x0001; BYTE $0x00 // leaq $347(%rip), %rsi /* LJTI26_0(%rip) */ + JMP LBB26_26 LBB26_25: - CMPL CX, $16 - JNE LBB26_58 - ADDQ $-16, R15 - ADDQ $16, AX - CMPQ R15, $15 - JA LBB26_9 - LEAQ 0(DI)(AX*1), DX - MOVQ DX, CX - CMPQ AX, SI - JE LBB26_41 + MOVQ BX, R10 + DECQ R11 + JE LBB26_54 + +LBB26_26: + MOVBLSX 0(R10), DX + ADDL $-43, DX + CMPL DX, $58 + JA LBB26_36 + LEAQ 1(R10), BX + MOVLQSX 0(SI)(DX*4), DX + ADDQ SI, DX + JMP DX LBB26_28: - LEAQ 0(DX)(R15*1), CX - MOVQ DX, R14 - SUBQ DI, R14 - XORL AX, AX - LONG $0x3f1d8d4c; WORD $0x0001; BYTE $0x00 // leaq $319(%rip), %r11 /* LJTI26_0(%rip) */ - JMP LBB26_29 - -LBB26_31: - CMPL BX, $101 - JNE LBB26_40 - -LBB26_32: + MOVQ BX, DX + SUBQ DI, DX CMPQ R9, $-1 - JNE LBB26_59 - LEAQ 0(R14)(AX*1), R9 - -LBB26_39: - ADDQ $1, AX - CMPQ R15, AX - JE LBB26_41 - -LBB26_29: - MOVBLSX 0(DX)(AX*1), BX - LEAL -48(BX), SI - CMPL SI, $10 - JB LBB26_39 - LEAL -43(BX), SI - CMPL SI, $26 - JA LBB26_31 - MOVLQSX 0(R11)(SI*4), SI - ADDQ R11, SI - JMP SI + JNE LBB26_58 + DECQ DX + MOVQ DX, R9 + JMP LBB26_25 -LBB26_37: - CMPQ R10, $-1 - JNE LBB26_59 - LEAQ 0(R14)(AX*1), R10 - JMP LBB26_39 +LBB26_30: + MOVQ BX, DX + SUBQ DI, DX + CMPQ AX, $-1 + JNE LBB26_58 + DECQ DX + MOVQ DX, AX + JMP LBB26_25 -LBB26_35: +LBB26_32: + MOVQ BX, DX + SUBQ DI, DX CMPQ R8, $-1 - JNE LBB26_59 - LEAQ 0(R14)(AX*1), R8 - JMP LBB26_39 + JNE LBB26_58 + DECQ DX + MOVQ DX, R8 + JMP LBB26_25 -LBB26_1: +LBB26_34: MOVQ $-1, AX - JMP LBB26_55 + JMP LBB26_53 -LBB26_58: - ADDQ AX, CX - ADDQ DI, CX +LBB26_35: + ADDQ CX, R15 + MOVQ R15, R10 -LBB26_41: - MOVQ $-1, AX - TESTQ R9, R9 - JNE LBB26_42 - JMP LBB26_55 +LBB26_36: + MOVQ $-1, DX + TESTQ AX, AX + JE LBB26_52 -LBB26_40: - ADDQ AX, DX - MOVQ DX, CX - MOVQ $-1, AX +LBB26_37: TESTQ R9, R9 - JE LBB26_55 - -LBB26_42: - TESTQ R10, R10 - JE LBB26_55 + JE LBB26_52 TESTQ R8, R8 - JE LBB26_55 - SUBQ DI, CX - LEAQ -1(CX), AX - CMPQ R9, AX - JE LBB26_47 + JE LBB26_52 + SUBQ DI, R10 + LEAQ -1(R10), CX + CMPQ AX, CX + JE LBB26_45 + CMPQ R8, CX + JE LBB26_45 + CMPQ R9, CX + JE LBB26_45 + TESTQ R9, R9 + JLE LBB26_46 + LEAQ -1(R9), CX + CMPQ AX, CX + JE LBB26_46 + NOTQ R9 + MOVQ R9, DX + MOVQ R9, AX + JMP LBB26_53 + +LBB26_45: + NEGQ R10 + MOVQ R10, DX + MOVQ R10, AX + JMP LBB26_53 + +LBB26_46: + MOVQ R8, CX + ORQ AX, CX CMPQ R8, AX - JE LBB26_47 - CMPQ R10, AX - JE LBB26_47 - TESTQ R10, R10 - JLE LBB26_51 - LEAQ -1(R10), AX - CMPQ R9, AX - JE LBB26_51 - NOTQ R10 - MOVQ R10, AX - JMP LBB26_55 + JL LBB26_49 + TESTQ CX, CX + JS LBB26_49 + NOTQ R8 + MOVQ R8, DX + MOVQ R8, AX + JMP LBB26_53 -LBB26_47: - NEGQ CX - MOVQ CX, AX +LBB26_49: + TESTQ CX, CX + LEAQ -1(AX), CX + NOTQ AX + LONG $0xc2480f49 // cmovsq %r10, %rax + CMPQ R8, CX + LONG $0xc2450f49 // cmovneq %r10, %rax + JMP LBB26_53 + +LBB26_50: + SUBQ DI, R15 + BSFL BX, DX + ADDQ R15, DX -LBB26_55: +LBB26_51: + NOTQ DX + +LBB26_52: + MOVQ DX, AX + +LBB26_53: BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 + WORD $0x5d41 // popq %r13 WORD $0x5e41 // popq %r14 WORD $0x5f41 // popq %r15 BYTE $0x5d // popq %rbp RET -LBB26_51: - MOVQ R8, AX - ORQ R9, AX - CMPQ R8, R9 - JL LBB26_54 - TESTQ AX, AX - JS LBB26_54 - NOTQ R8 - MOVQ R8, AX - JMP LBB26_55 - LBB26_54: - LEAQ -1(R9), DX + MOVQ CX, R10 + MOVQ $-1, DX TESTQ AX, AX - NOTQ R9 - LONG $0xc9480f4c // cmovsq %rcx, %r9 - CMPQ R8, DX - LONG $0xc9450f4c // cmovneq %rcx, %r9 - MOVQ R9, AX - JMP LBB26_55 + JNE LBB26_37 + JMP LBB26_52 -LBB26_12: - BSFL DX, CX - JMP LBB26_13 - -LBB26_59: - SUBQ DX, DI - NOTQ AX - ADDQ DI, AX - JMP LBB26_55 +LBB26_58: + NEGQ DX + JMP LBB26_52 LBB26_57: - MOVL DX, CX - JMP LBB26_13 - -LBB26_56: - MOVL BX, CX - -LBB26_13: - NOTQ AX - SUBQ CX, AX - JMP LBB26_55 - -LBB26_7: MOVQ $-1, R8 - MOVQ DI, DX - MOVQ SI, R15 + MOVQ DI, R10 + MOVQ SI, R11 + MOVQ $-1, AX MOVQ $-1, R9 - MOVQ $-1, R10 - JMP LBB26_28 + JMP LBB26_24 -// .set L26_0_set_37, LBB26_37-LJTI26_0 -// .set L26_0_set_40, LBB26_40-LJTI26_0 -// .set L26_0_set_35, LBB26_35-LJTI26_0 +// .set L26_0_set_28, LBB26_28-LJTI26_0 +// .set L26_0_set_36, LBB26_36-LJTI26_0 // .set L26_0_set_32, LBB26_32-LJTI26_0 +// .set L26_0_set_25, LBB26_25-LJTI26_0 +// .set L26_0_set_30, LBB26_30-LJTI26_0 LJTI26_0: - LONG $0xfffffefc // .long L26_0_set_37 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xfffffefc // .long L26_0_set_37 - LONG $0xffffff0c // .long L26_0_set_35 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xffffff39 // .long L26_0_set_40 - LONG $0xfffffec8 // .long L26_0_set_32 + LONG $0xfffffecc // .long L26_0_set_28 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xfffffecc // .long L26_0_set_28 + LONG $0xfffffefc // .long L26_0_set_32 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xfffffea7 // .long L26_0_set_25 + LONG $0xfffffea7 // .long L26_0_set_25 + LONG $0xfffffea7 // .long L26_0_set_25 + LONG $0xfffffea7 // .long L26_0_set_25 + LONG $0xfffffea7 // .long L26_0_set_25 + LONG $0xfffffea7 // .long L26_0_set_25 + LONG $0xfffffea7 // .long L26_0_set_25 + LONG $0xfffffea7 // .long L26_0_set_25 + LONG $0xfffffea7 // .long L26_0_set_25 + LONG $0xfffffea7 // .long L26_0_set_25 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xfffffee4 // .long L26_0_set_30 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xffffff26 // .long L26_0_set_36 + LONG $0xfffffee4 // .long L26_0_set_30 _skip_positive: BYTE $0x55 // pushq %rbp @@ -6961,16 +6974,16 @@ _skip_positive: MOVQ 8(DI), SI SUBQ BX, SI MOVQ AX, DI - LONG $0xfffc6de8; BYTE $0xff // callq _do_skip_number - LEAQ -1(AX), DX + LONG $0xfffbb8e8; BYTE $0xff // callq _do_skip_number MOVQ $-2, CX - MOVQ $-2, SI - SUBQ AX, SI + MOVQ $-2, DX + SUBQ AX, DX TESTQ AX, AX - LONG $0xf2490f48 // cmovnsq %rdx, %rsi + LEAQ -1(AX), AX + LONG $0xc2480f48 // cmovsq %rdx, %rax LONG $0xcb490f48 // cmovnsq %rbx, %rcx - ADDQ R15, SI - MOVQ SI, 0(R14) + ADDQ R15, AX + MOVQ AX, 0(R14) MOVQ CX, AX ADDQ $8, SP BYTE $0x5b // popq %rbx @@ -7006,7 +7019,7 @@ _skip_number: LBB28_3: MOVQ BX, DI - LONG $0xfffbf6e8; BYTE $0xff // callq _do_skip_number + LONG $0xfffb41e8; BYTE $0xff // callq _do_skip_number TESTQ AX, AX JS LBB28_7 ADDQ AX, BX @@ -7049,64 +7062,49 @@ _validate_one: _find_non_ascii: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + MOVQ DI, CX CMPQ SI, $16 JL LBB30_1 - XORL AX, AX - XORL DX, DX -LBB30_9: - LONG $0x046ffac5; BYTE $0x07 // vmovdqu (%rdi,%rax), %xmm0 - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - TESTW CX, CX - JNE LBB30_10 - ADDQ $16, AX - LEAQ 0(SI)(DX*1), CX - ADDQ $-16, CX - ADDQ $-16, DX +LBB30_6: + LONG $0x016ffac5 // vmovdqu (%rcx), %xmm0 + LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax + TESTW AX, AX + JNE LBB30_7 ADDQ $16, CX - CMPQ CX, $31 - JG LBB30_9 - MOVQ DI, CX - SUBQ DX, CX - SUBQ AX, SI - MOVQ $-1, AX - TESTQ SI, SI - JG LBB30_5 - JMP LBB30_12 + CMPQ SI, $31 + LEAQ -16(SI), SI + JG LBB30_6 LBB30_1: - MOVQ DI, CX MOVQ $-1, AX TESTQ SI, SI - JLE LBB30_12 - -LBB30_5: - ADDQ $1, SI - MOVQ CX, DX - SUBQ DI, DX + JLE LBB30_9 + INCQ SI -LBB30_6: +LBB30_3: CMPB 0(CX), $0 - JS LBB30_7 - ADDQ $1, CX - ADDQ $-1, SI - ADDQ $1, DX + JS LBB30_8 + INCQ CX + DECQ SI CMPQ SI, $1 - JG LBB30_6 + JG LBB30_3 -LBB30_12: +LBB30_9: BYTE $0x5d // popq %rbp RET -LBB30_7: - MOVQ DX, AX +LBB30_8: + SUBQ DI, CX + MOVQ CX, AX BYTE $0x5d // popq %rbp RET -LBB30_10: - MOVWLZX CX, AX +LBB30_7: + MOVWLZX AX, AX + SUBQ DI, CX BSFQ AX, AX - SUBQ DX, AX + ADDQ CX, AX BYTE $0x5d // popq %rbp RET @@ -7115,7 +7113,7 @@ _print_mantissa: WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp WORD $0x5641 // pushq %r14 BYTE $0x53 // pushq %rbx - MOVL DX, R14 + MOVLQSX DX, R14 ADDQ SI, R14 MOVQ DI, AX SHRQ $32, AX @@ -7151,7 +7149,7 @@ _print_mantissa: LONG $0x64fa6b41 // imull $100, %r10d, %edi SUBL DI, AX MOVWLZX AX, R11 - LONG $0x663d8d48; WORD $0x0059; BYTE $0x00 // leaq $22886(%rip), %rdi /* _Digits(%rip) */ + LONG $0x203d8d48; WORD $0x0059; BYTE $0x00 // leaq $22816(%rip), %rdi /* _Digits(%rip) */ MOVWLZX 0(DI)(R8*2), AX MOVW AX, -2(R14) MOVWLZX 0(DI)(R9*2), AX @@ -7167,7 +7165,7 @@ LBB31_2: CMPL DI, $10000 JB LBB31_3 MOVL $3518437209, R8 - LONG $0x1e0d8d4c; WORD $0x0059; BYTE $0x00 // leaq $22814(%rip), %r9 /* _Digits(%rip) */ + LONG $0xd80d8d4c; WORD $0x0058; BYTE $0x00 // leaq $22744(%rip), %r9 /* _Digits(%rip) */ LBB31_5: MOVL DI, AX @@ -7198,7 +7196,7 @@ LBB31_7: WORD $0xd16b; BYTE $0x64 // imull $100, %ecx, %edx SUBL DX, AX MOVWLZX AX, AX - LONG $0xb7158d48; WORD $0x0058; BYTE $0x00 // leaq $22711(%rip), %rdx /* _Digits(%rip) */ + LONG $0x71158d48; WORD $0x0058; BYTE $0x00 // leaq $22641(%rip), %rdx /* _Digits(%rip) */ MOVWLZX 0(DX)(AX*2), AX MOVW AX, -2(R14) ADDQ $-2, R14 @@ -7208,7 +7206,7 @@ LBB31_8: CMPL AX, $10 JB LBB31_10 MOVL AX, AX - LONG $0x9a0d8d48; WORD $0x0058; BYTE $0x00 // leaq $22682(%rip), %rcx /* _Digits(%rip) */ + LONG $0x540d8d48; WORD $0x0058; BYTE $0x00 // leaq $22612(%rip), %rcx /* _Digits(%rip) */ MOVWLZX 0(CX)(AX*2), AX MOVW AX, -2(R14) BYTE $0x5b // popq %rbx @@ -7236,51 +7234,44 @@ _left_shift: WORD $0x5641 // pushq %r14 BYTE $0x53 // pushq %rbx MOVL SI, CX - IMUL3Q $104, CX, R11 - LONG $0xbe158d48; WORD $0x008b; BYTE $0x00 // leaq $35774(%rip), %rdx /* _LSHIFT_TAB(%rip) */ - MOVL 0(R11)(DX*1), R8 + IMUL3Q $104, CX, DX + LONG $0x78358d48; WORD $0x008b; BYTE $0x00 // leaq $35704(%rip), %rsi /* _LSHIFT_TAB(%rip) */ + MOVL 0(DX)(SI*1), R8 MOVQ 0(DI), R10 MOVLQSX 16(DI), R9 + MOVB 4(DX)(SI*1), AX TESTQ R9, R9 - JE LBB32_1 - LEAQ 0(R11)(DX*1), SI - ADDQ $4, SI - XORL BX, BX + JE LBB32_6 + LEAQ 5(DX)(SI*1), DX + XORL SI, SI -LBB32_4: - MOVBLZX 0(SI)(BX*1), AX +LBB32_3: TESTB AX, AX - JE LBB32_10 - CMPB 0(R10)(BX*1), AX - JNE LBB32_6 - ADDQ $1, BX - CMPQ R9, BX - JNE LBB32_4 - MOVL R9, SI - ADDQ R11, DX - CMPB 4(SI)(DX*1), $0 - JNE LBB32_9 - JMP LBB32_10 - -LBB32_1: - XORL SI, SI - ADDQ R11, DX - CMPB 4(SI)(DX*1), $0 - JE LBB32_10 + JE LBB32_8 + CMPB 0(R10)(SI*1), AX + JNE LBB32_5 + MOVBLZX 0(DX)(SI*1), AX + INCQ SI + CMPQ R9, SI + JNE LBB32_3 + +LBB32_6: + TESTB AX, AX + JE LBB32_8 -LBB32_9: - ADDL $-1, R8 +LBB32_7: + DECL R8 -LBB32_10: +LBB32_8: TESTL R9, R9 - JLE LBB32_25 + JLE LBB32_23 LEAL 0(R8)(R9*1), AX MOVLQSX AX, R14 - ADDQ $-1, R14 + DECQ R14 XORL DX, DX MOVQ $-3689348814741910323, R11 -LBB32_12: +LBB32_10: MOVBQSX -1(R10)(R9*1), SI ADDQ $-48, SI SHLQ CX, SI @@ -7293,91 +7284,83 @@ LBB32_12: MOVQ SI, AX SUBQ BX, AX CMPQ 8(DI), R14 - JBE LBB32_18 + JBE LBB32_16 ADDB $48, AX MOVB AX, 0(R10)(R14*1) - JMP LBB32_20 + JMP LBB32_18 -LBB32_18: +LBB32_16: TESTQ AX, AX - JE LBB32_20 + JE LBB32_18 MOVL $1, 28(DI) -LBB32_20: +LBB32_18: CMPQ R9, $2 - JL LBB32_14 - ADDQ $-1, R9 + JL LBB32_12 + DECQ R9 MOVQ 0(DI), R10 - ADDQ $-1, R14 - JMP LBB32_12 + DECQ R14 + JMP LBB32_10 -LBB32_14: +LBB32_12: CMPQ SI, $10 - JAE LBB32_15 + JAE LBB32_13 -LBB32_25: +LBB32_23: MOVLQSX 16(DI), CX MOVLQSX R8, AX ADDQ CX, AX MOVL AX, 16(DI) MOVQ 8(DI), CX CMPQ CX, AX - JA LBB32_27 + JA LBB32_25 MOVL CX, 16(DI) MOVL CX, AX -LBB32_27: +LBB32_25: ADDL R8, 20(DI) TESTL AX, AX - JLE LBB32_31 + JLE LBB32_29 MOVQ 0(DI), CX - MOVL AX, DX - ADDQ $1, DX - ADDL $-1, AX + MOVL AX, AX -LBB32_29: - MOVL AX, SI - CMPB 0(CX)(SI*1), $48 - JNE LBB32_33 - MOVL AX, 16(DI) - ADDQ $-1, DX - ADDL $-1, AX +LBB32_27: + CMPB -1(CX)(AX*1), $48 + JNE LBB32_31 + MOVL AX, DX + DECQ AX + DECL DX + MOVL DX, 16(DI) + LEAQ 1(AX), DX CMPQ DX, $1 - JG LBB32_29 - JMP LBB32_32 + JG LBB32_27 -LBB32_31: - JE LBB32_32 +LBB32_29: + TESTL AX, AX + JE LBB32_30 -LBB32_33: +LBB32_31: BYTE $0x5b // popq %rbx WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET -LBB32_32: - MOVL $0, 20(DI) - BYTE $0x5b // popq %rbx - WORD $0x5e41 // popq %r14 - BYTE $0x5d // popq %rbp - RET - -LBB32_15: +LBB32_13: MOVLQSX R14, SI - ADDQ $-1, SI - JMP LBB32_16 + DECQ SI + JMP LBB32_14 -LBB32_17: +LBB32_15: ADDB $48, AX MOVQ 0(DI), BX MOVB AX, 0(BX)(SI*1) -LBB32_24: - ADDQ $-1, SI +LBB32_22: + DECQ SI CMPQ CX, $9 - JBE LBB32_25 + JBE LBB32_23 -LBB32_16: +LBB32_14: MOVQ DX, CX MOVQ DX, AX MULQ R11 @@ -7387,157 +7370,153 @@ LBB32_16: MOVQ CX, AX SUBQ BX, AX CMPQ 8(DI), SI - JA LBB32_17 + JA LBB32_15 TESTQ AX, AX - JE LBB32_24 + JE LBB32_22 MOVL $1, 28(DI) - JMP LBB32_24 + JMP LBB32_22 -LBB32_6: - JL LBB32_9 - JMP LBB32_10 +LBB32_30: + MOVL $0, 20(DI) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 + BYTE $0x5d // popq %rbp + RET + +LBB32_5: + JL LBB32_7 + JMP LBB32_8 _right_shift: - BYTE $0x55 // pushq %rbp - WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - BYTE $0x53 // pushq %rbx - MOVL SI, CX - MOVL 16(DI), R8 - XORL DX, DX - TESTL R8, R8 - MOVL $0, R11 - LONG $0xd8490f45 // cmovnsl %r8d, %r11d - XORL AX, AX + BYTE $0x55 // pushq %rbp + WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + MOVL SI, CX + MOVLQSX 16(DI), R9 + XORL SI, SI + XORL AX, AX LBB33_1: - CMPQ R11, DX - JE LBB33_2 + CMPQ SI, R9 + JGE LBB33_2 LEAQ 0(AX)(AX*4), AX - MOVQ 0(DI), SI - MOVBQSX 0(SI)(DX*1), SI - LEAQ 0(SI)(AX*2), AX - ADDQ $-48, AX - ADDQ $1, DX - MOVQ AX, SI - SHRQ CX, SI - TESTQ SI, SI + MOVQ 0(DI), DX + MOVBQSX 0(DX)(SI*1), DX + LEAQ -48(DX)(AX*2), AX + INCQ SI + MOVQ AX, DX + SHRQ CX, DX + TESTQ DX, DX JE LBB33_1 - MOVL DX, R11 -LBB33_7: +LBB33_6: MOVL 20(DI), DX - SUBL R11, DX - ADDL $1, DX - MOVQ $-1, R9 - SHLQ CX, R9 + SUBL SI, DX + INCL DX + MOVQ $-1, R8 + SHLQ CX, R8 MOVL DX, 20(DI) - NOTQ R9 + NOTQ R8 XORL R10, R10 - CMPL R11, R8 - JGE LBB33_10 - MOVLQSX R11, R8 + CMPL SI, R9 + JGE LBB33_9 + MOVLQSX SI, R9 MOVQ 0(DI), SI XORL R10, R10 -LBB33_9: +LBB33_8: MOVQ AX, DX SHRQ CX, DX - ANDQ R9, AX + ANDQ R8, AX ADDB $48, DX MOVB DX, 0(SI)(R10*1) - MOVQ 0(DI), SI - LEAQ 0(SI)(R8*1), DX - MOVBQSX 0(R10)(DX*1), R11 - LEAQ 1(R8)(R10*1), BX - ADDQ $1, R10 LEAQ 0(AX)(AX*4), AX - LEAQ 0(R11)(AX*2), AX - ADDQ $-48, AX - MOVLQSX 16(DI), DX - CMPQ BX, DX - JL LBB33_9 - JMP LBB33_10 - -LBB33_12: + MOVQ 0(DI), SI + LEAQ 0(SI)(R9*1), DX + MOVBQSX 0(R10)(DX*1), DX + LEAQ -48(DX)(AX*2), AX + MOVLQSX 16(DI), R11 + LEAQ 1(R9)(R10*1), DX + INCQ R10 + CMPQ DX, R11 + JL LBB33_8 + JMP LBB33_9 + +LBB33_11: ADDB $48, SI - MOVQ 0(DI), BX - MOVB SI, 0(BX)(DX*1) - ADDL $1, DX - MOVL DX, R10 + MOVQ 0(DI), DX + MOVB SI, 0(DX)(R9*1) + INCL R9 + MOVL R9, R10 -LBB33_15: +LBB33_14: ADDQ AX, AX LEAQ 0(AX)(AX*4), AX -LBB33_10: +LBB33_9: TESTQ AX, AX - JE LBB33_16 + JE LBB33_15 MOVQ AX, SI SHRQ CX, SI - ANDQ R9, AX - MOVLQSX R10, DX - CMPQ 8(DI), DX - JA LBB33_12 + ANDQ R8, AX + MOVLQSX R10, R9 + CMPQ 8(DI), R9 + JA LBB33_11 TESTQ SI, SI - JE LBB33_15 + JE LBB33_14 MOVL $1, 28(DI) - JMP LBB33_15 + JMP LBB33_14 -LBB33_16: +LBB33_15: MOVL R10, 16(DI) TESTL R10, R10 - JLE LBB33_20 + JLE LBB33_19 MOVQ 0(DI), AX - MOVL R10, CX - ADDQ $1, CX - ADDL $-1, R10 - -LBB33_18: - MOVL R10, DX - CMPB 0(AX)(DX*1), $48 - JNE LBB33_22 - MOVL R10, 16(DI) - ADDQ $-1, CX - ADDL $-1, R10 + MOVL R10, R10 + +LBB33_17: + CMPB -1(AX)(R10*1), $48 + JNE LBB33_21 + MOVL R10, CX + DECQ R10 + DECL CX + MOVL CX, 16(DI) + LEAQ 1(R10), CX CMPQ CX, $1 - JG LBB33_18 - JMP LBB33_21 + JG LBB33_17 + +LBB33_19: + TESTL R10, R10 + JE LBB33_20 + +LBB33_21: + BYTE $0x5d // popq %rbp + RET LBB33_2: TESTQ AX, AX - JE LBB33_23 + JE LBB33_22 MOVQ AX, DX SHRQ CX, DX TESTQ DX, DX - JNE LBB33_7 + JNE LBB33_6 LBB33_4: ADDQ AX, AX LEAQ 0(AX)(AX*4), AX - ADDL $1, R11 + INCL SI MOVQ AX, DX SHRQ CX, DX TESTQ DX, DX JE LBB33_4 - JMP LBB33_7 + JMP LBB33_6 LBB33_20: - JE LBB33_21 - -LBB33_22: - BYTE $0x5b // popq %rbx - BYTE $0x5d // popq %rbp - RET - -LBB33_21: MOVL $0, 20(DI) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB33_23: +LBB33_22: MOVL $0, 16(DI) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET @@ -11693,29 +11672,29 @@ _LSHIFT_TAB: LONG $0x00000000 // .asciz 4, '\x00\x00\x00\x00' _P10_TAB: - QUAD $0x3ff0000000000000 // .quad 0x3ff0000000000000 - QUAD $0x4024000000000000 // .quad 0x4024000000000000 - QUAD $0x4059000000000000 // .quad 0x4059000000000000 - QUAD $0x408f400000000000 // .quad 0x408f400000000000 - QUAD $0x40c3880000000000 // .quad 0x40c3880000000000 - QUAD $0x40f86a0000000000 // .quad 0x40f86a0000000000 - QUAD $0x412e848000000000 // .quad 0x412e848000000000 - QUAD $0x416312d000000000 // .quad 0x416312d000000000 - QUAD $0x4197d78400000000 // .quad 0x4197d78400000000 - QUAD $0x41cdcd6500000000 // .quad 0x41cdcd6500000000 - QUAD $0x4202a05f20000000 // .quad 0x4202a05f20000000 - QUAD $0x42374876e8000000 // .quad 0x42374876e8000000 - QUAD $0x426d1a94a2000000 // .quad 0x426d1a94a2000000 - QUAD $0x42a2309ce5400000 // .quad 0x42a2309ce5400000 - QUAD $0x42d6bcc41e900000 // .quad 0x42d6bcc41e900000 - QUAD $0x430c6bf526340000 // .quad 0x430c6bf526340000 - QUAD $0x4341c37937e08000 // .quad 0x4341c37937e08000 - QUAD $0x4376345785d8a000 // .quad 0x4376345785d8a000 - QUAD $0x43abc16d674ec800 // .quad 0x43abc16d674ec800 - QUAD $0x43e158e460913d00 // .quad 0x43e158e460913d00 - QUAD $0x4415af1d78b58c40 // .quad 0x4415af1d78b58c40 - QUAD $0x444b1ae4d6e2ef50 // .quad 0x444b1ae4d6e2ef50 - QUAD $0x4480f0cf064dd592 // .quad 0x4480f0cf064dd592 + QUAD $0x3ff0000000000000 // .quad 4607182418800017408 + QUAD $0x4024000000000000 // .quad 4621819117588971520 + QUAD $0x4059000000000000 // .quad 4636737291354636288 + QUAD $0x408f400000000000 // .quad 4652007308841189376 + QUAD $0x40c3880000000000 // .quad 4666723172467343360 + QUAD $0x40f86a0000000000 // .quad 4681608360884174848 + QUAD $0x412e848000000000 // .quad 4696837146684686336 + QUAD $0x416312d000000000 // .quad 4711630319722168320 + QUAD $0x4197d78400000000 // .quad 4726483295884279808 + QUAD $0x41cdcd6500000000 // .quad 4741671816366391296 + QUAD $0x4202a05f20000000 // .quad 4756540486875873280 + QUAD $0x42374876e8000000 // .quad 4771362005757984768 + QUAD $0x426d1a94a2000000 // .quad 4786511204640096256 + QUAD $0x42a2309ce5400000 // .quad 4801453603149578240 + QUAD $0x42d6bcc41e900000 // .quad 4816244402031689728 + QUAD $0x430c6bf526340000 // .quad 4831355200913801216 + QUAD $0x4341c37937e08000 // .quad 4846369599423283200 + QUAD $0x4376345785d8a000 // .quad 4861130398305394688 + QUAD $0x43abc16d674ec800 // .quad 4876203697187506176 + QUAD $0x43e158e460913d00 // .quad 4891288408196988160 + QUAD $0x4415af1d78b58c40 // .quad 4906019910204099648 + QUAD $0x444b1ae4d6e2ef50 // .quad 4921056587992461136 + QUAD $0x4480f0cf064dd592 // .quad 4936209963552724370 _first: QUAD $0xf0f0f0f0f0f0f0f0; QUAD $0xf0f0f0f0f0f0f0f0 // .ascii 16, '\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0' @@ -11759,7 +11738,7 @@ _entry: _f64toa: MOVQ out+0(FP), DI MOVSD val+8(FP), X0 - CALL ·__native_entry__+570(SB) // _f64toa + CALL ·__native_entry__+630(SB) // _f64toa MOVQ AX, ret+16(FP) RET @@ -11772,7 +11751,7 @@ TEXT ·__html_escape(SB), NOSPLIT | NOFRAME, $0 - 40 _entry: MOVQ (TLS), R14 - LEAQ -64(SP), R12 + LEAQ -72(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11781,7 +11760,7 @@ _html_escape: MOVQ nb+8(FP), SI MOVQ dp+16(FP), DX MOVQ dn+24(FP), CX - CALL ·__native_entry__+8834(SB) // _html_escape + CALL ·__native_entry__+8581(SB) // _html_escape MOVQ AX, ret+32(FP) RET @@ -11801,7 +11780,7 @@ _entry: _i64toa: MOVQ out+0(FP), DI MOVQ val+8(FP), SI - CALL ·__native_entry__+3653(SB) // _i64toa + CALL ·__native_entry__+3642(SB) // _i64toa MOVQ AX, ret+16(FP) RET @@ -11822,7 +11801,7 @@ _lspace: MOVQ sp+0(FP), DI MOVQ nb+8(FP), SI MOVQ off+16(FP), DX - CALL ·__native_entry__+251(SB) // _lspace + CALL ·__native_entry__+301(SB) // _lspace MOVQ AX, ret+24(FP) RET @@ -11855,7 +11834,7 @@ TEXT ·__quote(SB), NOSPLIT | NOFRAME, $0 - 48 _entry: MOVQ (TLS), R14 - LEAQ -80(SP), R12 + LEAQ -56(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11865,7 +11844,7 @@ _quote: MOVQ dp+16(FP), DX MOVQ dn+24(FP), CX MOVQ flags+32(FP), R8 - CALL ·__native_entry__+4970(SB) // _quote + CALL ·__native_entry__+4955(SB) // _quote MOVQ AX, ret+40(FP) RET @@ -11878,7 +11857,7 @@ TEXT ·__skip_array(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -152(SP), R12 + LEAQ -160(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11886,7 +11865,7 @@ _skip_array: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+18226(SB) // _skip_array + CALL ·__native_entry__+18017(SB) // _skip_array MOVQ AX, ret+24(FP) RET @@ -11899,14 +11878,14 @@ TEXT ·__skip_number(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -88(SP), R12 + LEAQ -96(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow _skip_number: MOVQ s+0(FP), DI MOVQ p+8(FP), SI - CALL ·__native_entry__+21165(SB) // _skip_number + CALL ·__native_entry__+21135(SB) // _skip_number MOVQ AX, ret+16(FP) RET @@ -11919,7 +11898,7 @@ TEXT ·__skip_object(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -152(SP), R12 + LEAQ -160(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11927,7 +11906,7 @@ _skip_object: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+18263(SB) // _skip_object + CALL ·__native_entry__+18054(SB) // _skip_object MOVQ AX, ret+24(FP) RET @@ -11940,7 +11919,7 @@ TEXT ·__skip_one(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -152(SP), R12 + LEAQ -160(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11948,7 +11927,7 @@ _skip_one: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+16378(SB) // _skip_one + CALL ·__native_entry__+16120(SB) // _skip_one MOVQ AX, ret+24(FP) RET @@ -11968,7 +11947,7 @@ _entry: _u64toa: MOVQ out+0(FP), DI MOVQ val+8(FP), SI - CALL ·__native_entry__+3748(SB) // _u64toa + CALL ·__native_entry__+3735(SB) // _u64toa MOVQ AX, ret+16(FP) RET @@ -11981,7 +11960,7 @@ TEXT ·__unquote(SB), NOSPLIT | NOFRAME, $0 - 48 _entry: MOVQ (TLS), R14 - LEAQ -80(SP), R12 + LEAQ -88(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11991,7 +11970,7 @@ _unquote: MOVQ dp+16(FP), DX MOVQ ep+24(FP), CX MOVQ flags+32(FP), R8 - CALL ·__native_entry__+6540(SB) // _unquote + CALL ·__native_entry__+6426(SB) // _unquote MOVQ AX, ret+40(FP) RET @@ -12004,7 +11983,7 @@ TEXT ·__validate_one(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -152(SP), R12 + LEAQ -160(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12012,7 +11991,7 @@ _validate_one: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+21282(SB) // _validate_one + CALL ·__native_entry__+21252(SB) // _validate_one MOVQ AX, ret+24(FP) RET @@ -12025,7 +12004,7 @@ TEXT ·__value(SB), NOSPLIT | NOFRAME, $0 - 48 _entry: MOVQ (TLS), R14 - LEAQ -424(SP), R12 + LEAQ -416(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12035,7 +12014,7 @@ _value: MOVQ p+16(FP), DX MOVQ v+24(FP), CX MOVQ allow_control+32(FP), R8 - CALL ·__native_entry__+11437(SB) // _value + CALL ·__native_entry__+11301(SB) // _value MOVQ AX, ret+40(FP) RET @@ -12048,7 +12027,7 @@ TEXT ·__vnumber(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -320(SP), R12 + LEAQ -312(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12056,7 +12035,7 @@ _vnumber: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+14392(SB), AX // _vnumber + LEAQ ·__native_entry__+14278(SB), AX // _vnumber JMP AX _stack_grow: @@ -12076,7 +12055,7 @@ _vsigned: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+15822(SB), AX // _vsigned + LEAQ ·__native_entry__+15592(SB), AX // _vsigned JMP AX _stack_grow: @@ -12088,7 +12067,7 @@ TEXT ·__vstring(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -120(SP), R12 + LEAQ -128(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12096,7 +12075,7 @@ _vstring: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+13345(SB), AX // _vstring + LEAQ ·__native_entry__+13243(SB), AX // _vstring JMP AX _stack_grow: @@ -12108,7 +12087,7 @@ TEXT ·__vunsigned(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -16(SP), R12 + LEAQ -8(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12116,7 +12095,7 @@ _vunsigned: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+16102(SB), AX // _vunsigned + LEAQ ·__native_entry__+15851(SB), AX // _vunsigned JMP AX _stack_grow: diff --git a/internal/native/avx/native_subr_amd64.go b/internal/native/avx/native_subr_amd64.go index a4cbcbee1..57ef79ba8 100644 --- a/internal/native/avx/native_subr_amd64.go +++ b/internal/native/avx/native_subr_amd64.go @@ -9,45 +9,45 @@ package avx func __native_entry__() uintptr var ( - _subr__f64toa = __native_entry__() + 570 - _subr__html_escape = __native_entry__() + 8834 - _subr__i64toa = __native_entry__() + 3653 - _subr__lspace = __native_entry__() + 251 + _subr__f64toa = __native_entry__() + 630 + _subr__html_escape = __native_entry__() + 8581 + _subr__i64toa = __native_entry__() + 3642 + _subr__lspace = __native_entry__() + 301 _subr__lzero = __native_entry__() + 13 - _subr__quote = __native_entry__() + 4970 - _subr__skip_array = __native_entry__() + 18226 - _subr__skip_number = __native_entry__() + 21165 - _subr__skip_object = __native_entry__() + 18263 - _subr__skip_one = __native_entry__() + 16378 - _subr__u64toa = __native_entry__() + 3748 - _subr__unquote = __native_entry__() + 6540 - _subr__validate_one = __native_entry__() + 21282 - _subr__value = __native_entry__() + 11437 - _subr__vnumber = __native_entry__() + 14392 - _subr__vsigned = __native_entry__() + 15822 - _subr__vstring = __native_entry__() + 13345 - _subr__vunsigned = __native_entry__() + 16102 + _subr__quote = __native_entry__() + 4955 + _subr__skip_array = __native_entry__() + 18017 + _subr__skip_number = __native_entry__() + 21135 + _subr__skip_object = __native_entry__() + 18054 + _subr__skip_one = __native_entry__() + 16120 + _subr__u64toa = __native_entry__() + 3735 + _subr__unquote = __native_entry__() + 6426 + _subr__validate_one = __native_entry__() + 21252 + _subr__value = __native_entry__() + 11301 + _subr__vnumber = __native_entry__() + 14278 + _subr__vsigned = __native_entry__() + 15592 + _subr__vstring = __native_entry__() + 13243 + _subr__vunsigned = __native_entry__() + 15851 ) const ( _stack__f64toa = 120 - _stack__html_escape = 64 + _stack__html_escape = 72 _stack__i64toa = 24 _stack__lspace = 8 _stack__lzero = 8 - _stack__quote = 80 - _stack__skip_array = 152 - _stack__skip_number = 88 - _stack__skip_object = 152 - _stack__skip_one = 152 + _stack__quote = 56 + _stack__skip_array = 160 + _stack__skip_number = 96 + _stack__skip_object = 160 + _stack__skip_one = 160 _stack__u64toa = 8 - _stack__unquote = 80 - _stack__validate_one = 152 - _stack__value = 424 - _stack__vnumber = 320 + _stack__unquote = 88 + _stack__validate_one = 160 + _stack__value = 416 + _stack__vnumber = 312 _stack__vsigned = 16 - _stack__vstring = 120 - _stack__vunsigned = 16 + _stack__vstring = 128 + _stack__vunsigned = 8 ) var ( diff --git a/internal/native/avx2/native_amd64.s b/internal/native/avx2/native_amd64.s index 923de0393..8ce67ddb5 100644 --- a/internal/native/avx2/native_amd64.s +++ b/internal/native/avx2/native_amd64.s @@ -15,75 +15,89 @@ _lzero: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp CMPQ SI, $32 - JB LBB0_3 - -LBB0_1: + JB LBB0_5 + LEAQ -32(SI), AX + MOVQ AX, CX + ANDQ $-32, CX + LEAQ 32(CX)(DI*1), CX + ANDL $31, AX + +LBB0_2: LONG $0x076ffec5 // vmovdqu (%rdi), %ymm0 LONG $0x177de2c4; BYTE $0xc0 // vptest %ymm0, %ymm0 - JNE LBB0_9 + JNE LBB0_13 ADDQ $32, DI ADDQ $-32, SI CMPQ SI, $31 - JA LBB0_1 + JA LBB0_2 + MOVQ AX, SI + MOVQ CX, DI -LBB0_3: +LBB0_5: WORD $0xf8c5; BYTE $0x77 // vzeroupper CMPQ SI, $16 - JB LBB0_6 - -LBB0_4: + JB LBB0_10 + LEAQ -16(SI), AX + MOVQ AX, CX + ANDQ $-16, CX + LEAQ 16(CX)(DI*1), CX + ANDL $15, AX + +LBB0_7: LONG $0x076ffac5 // vmovdqu (%rdi), %xmm0 LONG $0x1779e2c4; BYTE $0xc0 // vptest %xmm0, %xmm0 - JNE LBB0_10 + JNE LBB0_14 ADDQ $16, DI ADDQ $-16, SI CMPQ SI, $15 - JA LBB0_4 + JA LBB0_7 + MOVQ AX, SI + MOVQ CX, DI -LBB0_6: +LBB0_10: CMPQ SI, $8 - JB LBB0_12 + JB LBB0_16 MOVL $1, AX CMPQ 0(DI), $0 - JNE LBB0_8 + JNE LBB0_12 ADDQ $8, DI ADDQ $-8, SI -LBB0_12: +LBB0_16: CMPQ SI, $4 - JB LBB0_15 + JB LBB0_19 MOVL $1, AX CMPL 0(DI), $0 - JNE LBB0_8 + JNE LBB0_12 ADDQ $4, DI ADDQ $-4, SI -LBB0_15: +LBB0_19: CMPQ SI, $2 - JB LBB0_18 + JB LBB0_22 MOVL $1, AX CMPW 0(DI), $0 - JNE LBB0_8 + JNE LBB0_12 ADDQ $2, DI ADDQ $-2, SI -LBB0_18: +LBB0_22: XORL AX, AX TESTQ SI, SI - JE LBB0_8 + JE LBB0_12 CMPB 0(DI), $0 SETNE AX BYTE $0x5d // popq %rbp RET -LBB0_8: +LBB0_12: BYTE $0x5d // popq %rbp RET -LBB0_9: +LBB0_13: WORD $0xf8c5; BYTE $0x77 // vzeroupper -LBB0_10: +LBB0_14: MOVL $1, AX BYTE $0x5d // popq %rbp RET @@ -119,145 +133,153 @@ LCPI1_7: _lspace: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - MOVQ DX, AX - LEAQ 0(DI)(DX*1), R10 - MOVQ SI, DX - SUBQ AX, DX + LEAQ 0(DI)(DX*1), AX + SUBQ DX, SI JE LBB1_7 - MOVL R10, CX + MOVL AX, CX ANDL $31, CX TESTQ CX, CX JE LBB1_7 - LEAQ 1(DI), R9 - LEAQ -1(SI), CX + LEAQ -1(SI), R9 + XORL DX, DX MOVQ $4294977024, R8 LBB1_3: - MOVBLSX 0(DI)(AX*1), DX - CMPL DX, $32 - JA LBB1_26 - BTQ DX, R8 - JAE LBB1_26 - LEAQ 1(AX), DX - CMPQ CX, AX + MOVBLSX 0(AX)(DX*1), CX + CMPL CX, $32 + JA LBB1_5 + BTQ CX, R8 + JAE LBB1_5 + LEAQ 1(DX), R10 + CMPQ R9, DX JE LBB1_6 - ADDL R9, AX - ANDL $31, AX - TESTQ AX, AX - MOVQ DX, AX + LEAQ 1(AX)(DX*1), CX + ANDL $31, CX + MOVQ R10, DX + TESTQ CX, CX JNE LBB1_3 LBB1_6: - LEAQ 0(DI)(DX*1), R10 - SUBQ DX, SI - MOVQ SI, DX + ADDQ R10, AX + SUBQ R10, SI LBB1_7: - CMPQ DX, $32 - JB LBB1_12 - MOVQ DI, SI - SUBQ R10, SI - QUAD $0xfffffec4056ffec5 // vmovdqu $-316(%rip), %ymm0 /* LCPI1_0(%rip) */ - QUAD $0xfffffedc0d6ffec5 // vmovdqu $-292(%rip), %ymm1 /* LCPI1_1(%rip) */ - QUAD $0xfffffef4156ffec5 // vmovdqu $-268(%rip), %ymm2 /* LCPI1_2(%rip) */ - QUAD $0xffffff0c1d6ffec5 // vmovdqu $-244(%rip), %ymm3 /* LCPI1_3(%rip) */ + CMPQ SI, $32 + JB LBB1_15 + LEAQ -32(SI), CX + MOVQ CX, DX + ANDQ $-32, DX + LEAQ 32(DX)(AX*1), R8 + ANDL $31, CX + QUAD $0xfffffec2056ffec5 // vmovdqu $-318(%rip), %ymm0 /* LCPI1_0(%rip) */ + QUAD $0xfffffeda0d6ffec5 // vmovdqu $-294(%rip), %ymm1 /* LCPI1_1(%rip) */ + QUAD $0xfffffef2156ffec5 // vmovdqu $-270(%rip), %ymm2 /* LCPI1_2(%rip) */ + QUAD $0xffffff0a1d6ffec5 // vmovdqu $-246(%rip), %ymm3 /* LCPI1_3(%rip) */ LBB1_9: - LONG $0x6f7dc1c4; BYTE $0x22 // vmovdqa (%r10), %ymm4 - LONG $0xe874ddc5 // vpcmpeqb %ymm0, %ymm4, %ymm5 - LONG $0xf174ddc5 // vpcmpeqb %ymm1, %ymm4, %ymm6 - LONG $0xeeebd5c5 // vpor %ymm6, %ymm5, %ymm5 - LONG $0xf274ddc5 // vpcmpeqb %ymm2, %ymm4, %ymm6 - LONG $0xe374ddc5 // vpcmpeqb %ymm3, %ymm4, %ymm4 - LONG $0xe6ebddc5 // vpor %ymm6, %ymm4, %ymm4 - LONG $0xe5ebddc5 // vpor %ymm5, %ymm4, %ymm4 - LONG $0xc4d7fdc5 // vpmovmskb %ymm4, %eax - CMPL AX, $-1 + LONG $0x206ffdc5 // vmovdqa (%rax), %ymm4 + LONG $0xe874ddc5 // vpcmpeqb %ymm0, %ymm4, %ymm5 + LONG $0xf174ddc5 // vpcmpeqb %ymm1, %ymm4, %ymm6 + LONG $0xeeebd5c5 // vpor %ymm6, %ymm5, %ymm5 + LONG $0xf274ddc5 // vpcmpeqb %ymm2, %ymm4, %ymm6 + LONG $0xe374ddc5 // vpcmpeqb %ymm3, %ymm4, %ymm4 + LONG $0xe6ebddc5 // vpor %ymm6, %ymm4, %ymm4 + LONG $0xe5ebddc5 // vpor %ymm5, %ymm4, %ymm4 + LONG $0xd4d7fdc5 // vpmovmskb %ymm4, %edx + CMPL DX, $-1 JNE LBB1_10 - ADDQ $32, R10 - ADDQ $-32, DX + ADDQ $32, AX ADDQ $-32, SI - CMPQ DX, $31 + CMPQ SI, $31 JA LBB1_9 + MOVQ CX, SI + MOVQ R8, AX -LBB1_12: +LBB1_15: WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ DX, $16 - JB LBB1_17 - MOVQ DI, SI - SUBQ R10, SI - QUAD $0xfffffed5056ffac5 // vmovdqu $-299(%rip), %xmm0 /* LCPI1_4(%rip) */ - QUAD $0xfffffedd0d6ffac5 // vmovdqu $-291(%rip), %xmm1 /* LCPI1_5(%rip) */ - QUAD $0xfffffee5156ffac5 // vmovdqu $-283(%rip), %xmm2 /* LCPI1_6(%rip) */ - QUAD $0xfffffeed1d6ffac5 // vmovdqu $-275(%rip), %xmm3 /* LCPI1_7(%rip) */ - -LBB1_14: - LONG $0x6f79c1c4; BYTE $0x22 // vmovdqa (%r10), %xmm4 - LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 - LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 - LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 - LONG $0xf274d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm6 - LONG $0xe374d9c5 // vpcmpeqb %xmm3, %xmm4, %xmm4 - LONG $0xe6ebd9c5 // vpor %xmm6, %xmm4, %xmm4 - LONG $0xe5ebd9c5 // vpor %xmm5, %xmm4, %xmm4 - LONG $0xc4d7f9c5 // vpmovmskb %xmm4, %eax - CMPW AX, $-1 - JNE LBB1_15 - ADDQ $16, R10 - ADDQ $-16, DX - ADDQ $-16, SI - CMPQ DX, $15 - JA LBB1_14 + CMPQ SI, $16 + JB LBB1_19 + LEAQ -16(SI), CX + MOVQ CX, DX + ANDQ $-16, DX + LEAQ 16(DX)(AX*1), R8 + ANDL $15, CX + QUAD $0xfffffec5056ffac5 // vmovdqu $-315(%rip), %xmm0 /* LCPI1_4(%rip) */ + QUAD $0xfffffecd0d6ffac5 // vmovdqu $-307(%rip), %xmm1 /* LCPI1_5(%rip) */ + QUAD $0xfffffed5156ffac5 // vmovdqu $-299(%rip), %xmm2 /* LCPI1_6(%rip) */ + QUAD $0xfffffedd1d6ffac5 // vmovdqu $-291(%rip), %xmm3 /* LCPI1_7(%rip) */ LBB1_17: - TESTQ DX, DX - JE LBB1_24 - LEAQ 0(R10)(DX*1), R8 - XORL AX, AX - MOVQ $4294977024, R9 + LONG $0x206ff9c5 // vmovdqa (%rax), %xmm4 + LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 + LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 + LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 + LONG $0xf274d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm6 + LONG $0xe374d9c5 // vpcmpeqb %xmm3, %xmm4, %xmm4 + LONG $0xe6ebd9c5 // vpor %xmm6, %xmm4, %xmm4 + LONG $0xe5ebd9c5 // vpor %xmm5, %xmm4, %xmm4 + LONG $0xd4d7f9c5 // vpmovmskb %xmm4, %edx + CMPW DX, $-1 + JNE LBB1_18 + ADDQ $16, AX + ADDQ $-16, SI + CMPQ SI, $15 + JA LBB1_17 + MOVQ CX, SI + MOVQ R8, AX LBB1_19: - MOVBLSX 0(R10)(AX*1), SI - CMPL SI, $32 - JA LBB1_21 - BTQ SI, R9 - JAE LBB1_21 - ADDQ $1, AX - CMPQ DX, AX - JNE LBB1_19 - MOVQ R8, R10 - -LBB1_24: - SUBQ DI, R10 + TESTQ SI, SI + JE LBB1_28 + LEAQ 0(AX)(SI*1), R8 + INCQ AX + MOVQ $4294977024, DX -LBB1_25: - MOVQ R10, AX +LBB1_21: + MOVBLSX -1(AX), CX + CMPL CX, $32 + JA LBB1_23 + BTQ CX, DX + JAE LBB1_23 + DECQ SI + INCQ AX + TESTQ SI, SI + JNE LBB1_21 + MOVQ R8, AX + JMP LBB1_28 -LBB1_26: - BYTE $0x5d // popq %rbp +LBB1_10: + WORD $0xf8c5; BYTE $0x77 // vzeroupper + SUBQ DI, AX + NOTL DX + MOVLQSX DX, CX + BSFQ CX, CX + ADDQ CX, AX + BYTE $0x5d // popq %rbp RET -LBB1_10: - WORD $0xf8c5; BYTE $0x77 // vzeroupper - NOTL AX - WORD $0x9848 // cltq - BSFQ AX, AX - SUBQ SI, AX - BYTE $0x5d // popq %rbp +LBB1_5: + ADDQ DX, AX + +LBB1_28: + SUBQ DI, AX + BYTE $0x5d // popq %rbp RET -LBB1_15: - MOVWLZX AX, AX - NOTL AX - BSFL AX, AX - SUBQ SI, AX +LBB1_18: + MOVWLZX DX, CX + SUBQ DI, AX + NOTL CX + BSFL CX, CX + ADDQ CX, AX BYTE $0x5d // popq %rbp RET -LBB1_21: - SUBQ DI, R10 - ADDQ AX, R10 - JMP LBB1_25 +LBB1_23: + NOTQ DI + ADDQ DI, AX + BYTE $0x5d // popq %rbp + RET LCPI2_0: QUAD $0x3030303030303030; QUAD $0x3030303030303030 // .space 16, '0000000000000000' @@ -276,24 +298,24 @@ _f64toa: LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax XORL R9, R9 TESTQ AX, AX - JS LBB2_113 + JS LBB2_108 TESTQ AX, AX - JE LBB2_114 + JE LBB2_109 LBB2_2: MOVQ AX, DX SHRQ $52, DX XORL BX, BX CMPL DX, $2047 - JE LBB2_112 + JE LBB2_107 MOVQ $4503599627370495, DI ANDQ DI, AX - ADDQ $1, DI + INCQ DI ORQ AX, DI LEAL -1023(DX), CX CMPL CX, $52 MOVL R9, -44(BP) - MOVQ R13, -56(BP) + MOVQ R13, -64(BP) JA LBB2_5 MOVL $1075, CX SUBQ DX, CX @@ -309,14 +331,14 @@ LBB2_5: LEAL -1077(DX), CX MOVL $-1076, R11 LONG $0xd9450f44 // cmovnel %ecx, %r11d - MOVQ DI, -64(BP) + MOVQ DI, -72(BP) LEAQ 0(DI*4), R8 TESTQ AX, AX SETNE AX CMPL DX, $2 SETCS R13 ORB AX, R13 - MOVBLZX R13, R14 + MOVBLZX R13, R9 TESTL R11, R11 JS LBB2_12 LONG $0x41e36945; WORD $0x0134; BYTE $0x00 // imull $78913, %r11d, %r12d @@ -328,16 +350,16 @@ LBB2_5: LONG $0x4fdc6941; WORD $0x1293; BYTE $0x00 // imull $1217359, %r12d, %ebx MOVQ R12, AX SHLQ $4, AX - LONG $0x870d8d48; WORD $0x008e; BYTE $0x00 // leaq $36487(%rip), %rcx /* _DOUBLE_POW5_INV_SPLIT(%rip) */ + LONG $0x070d8d48; WORD $0x0092; BYTE $0x00 // leaq $37383(%rip), %rcx /* _DOUBLE_POW5_INV_SPLIT(%rip) */ MOVQ R8, DI ORQ $2, DI MOVQ 0(AX)(CX*1), R10 - MOVQ 8(AX)(CX*1), R9 + MOVQ 8(AX)(CX*1), R14 MOVQ R10, AX MULQ DI MOVQ DX, CX - MOVQ R9, AX - MOVQ R9, -72(BP) + MOVQ R14, AX + MOVQ R14, -56(BP) MULQ DI MOVQ AX, R13 MOVQ DX, SI @@ -350,23 +372,23 @@ LBB2_5: ADDB $61, CX LONG $0xf5ad0f49 // shrdq %cl, %rsi, %r13 SHRQ CX, SI - NOTQ R14 - ADDQ R8, R14 + NOTQ R9 + ADDQ R8, R9 MOVQ R10, AX - MULQ R14 + MULQ R9 MOVQ DX, R15 - MOVQ R9, AX - MULQ R14 - MOVQ DX, R9 + MOVQ R14, AX + MULQ R9 + MOVQ DX, R14 MOVQ AX, BX ADDQ R15, BX - ADCQ $0, R9 - LONG $0xcbad0f4c // shrdq %cl, %r9, %rbx - SHRQ CX, R9 + ADCQ $0, R14 + LONG $0xf3ad0f4c // shrdq %cl, %r14, %rbx + SHRQ CX, R14 MOVQ R10, AX MULQ R8 MOVQ DX, R11 - MOVQ -72(BP), AX + MOVQ -56(BP), AX MULQ R8 MOVQ DX, R10 ADDQ R11, AX @@ -375,7 +397,7 @@ LBB2_5: SHRQ CX, R10 TESTB $64, CX LONG $0xf5440f49 // cmoveq %r13, %rsi - LONG $0xcb440f4c // cmoveq %rbx, %r9 + LONG $0xf3440f4c // cmoveq %rbx, %r14 LONG $0xd0440f4c // cmoveq %rax, %r10 CMPL R12, $21 JA LBB2_23 @@ -385,14 +407,14 @@ LBB2_5: IMULQ AX, DX CMPQ DX, CX JBE LBB2_17 - TESTB $1, -64(BP) + TESTB $1, -72(BP) JNE LBB2_20 MOVL $-1, DX LBB2_10: - IMULQ AX, R14 - ADDL $1, DX - CMPQ R14, CX + IMULQ AX, R9 + INCL DX + CMPQ R9, CX JBE LBB2_10 CMPL DX, R12 SETCC R13 @@ -415,15 +437,15 @@ LBB2_12: SHRL $19, BX MOVLQSX AX, SI SHLQ $4, SI - LONG $0xb10d8d4c; WORD $0x00a2; BYTE $0x00 // leaq $41649(%rip), %r9 /* _DOUBLE_POW5_SPLIT(%rip) */ + LONG $0x32158d4c; WORD $0x00a6; BYTE $0x00 // leaq $42546(%rip), %r10 /* _DOUBLE_POW5_SPLIT(%rip) */ MOVQ R8, DI ORQ $2, DI - MOVQ 0(SI)(R9*1), R15 - MOVQ R15, AX - MOVQ R15, -72(BP) + MOVQ 0(SI)(R10*1), R14 + MOVQ R14, AX + MOVQ R14, -56(BP) MULQ DI MOVQ DX, CX - MOVQ 8(SI)(R9*1), R10 + MOVQ 8(SI)(R10*1), R10 MOVQ R10, AX MULQ DI MOVQ DX, DI @@ -435,20 +457,20 @@ LBB2_12: ADDB $60, CX LONG $0xfead0f48 // shrdq %cl, %rdi, %rsi SHRQ CX, DI - NOTQ R14 - ADDQ R8, R14 - MOVQ R15, AX - MULQ R14 + NOTQ R9 + ADDQ R8, R9 + MOVQ R14, AX + MULQ R9 MOVQ DX, R15 MOVQ R10, AX - MULQ R14 - MOVQ DX, R9 + MULQ R9 + MOVQ DX, R14 MOVQ AX, BX ADDQ R15, BX - ADCQ $0, R9 - LONG $0xcbad0f4c // shrdq %cl, %r9, %rbx - SHRQ CX, R9 - MOVQ -72(BP), AX + ADCQ $0, R14 + LONG $0xf3ad0f4c // shrdq %cl, %r14, %rbx + SHRQ CX, R14 + MOVQ -56(BP), AX MULQ R8 MOVQ DX, R15 MOVQ R10, AX @@ -461,19 +483,20 @@ LBB2_12: TESTB $64, CX LONG $0xfe440f48 // cmoveq %rsi, %rdi MOVQ DI, SI - LONG $0xcb440f4c // cmoveq %rbx, %r9 + LONG $0xf3440f4c // cmoveq %rbx, %r14 LONG $0xd0440f4c // cmoveq %rax, %r10 CMPL R12, $1 JA LBB2_15 - MOVQ -64(BP), AX + MOVQ -72(BP), AX ANDL $1, AX TESTQ AX, AX SETEQ CX ANDB CX, R13 SUBQ AX, SI MOVB $1, R15 - MOVL R11, R12 - JMP LBB2_29 + MOVL R11, AX + MOVQ AX, -56(BP) + JMP LBB2_30 LBB2_14: MOVL CX, CX @@ -481,8 +504,8 @@ LBB2_14: XORL R12, R12 MOVL $1, R15 CMPQ DI, $10 - JAE LBB2_45 - JMP LBB2_60 + JAE LBB2_46 + JMP LBB2_61 LBB2_15: CMPL R12, $62 @@ -500,7 +523,7 @@ LBB2_17: LBB2_18: IMULQ AX, R8 - ADDL $1, DX + INCL DX CMPQ R8, CX JBE LBB2_18 CMPL DX, R12 @@ -515,11 +538,13 @@ LBB2_20: LBB2_21: IMULQ AX, DI - ADDL $1, DX + INCL DX CMPQ DI, CX JBE LBB2_21 + XORL AX, AX CMPL DX, R12 - ADCQ $-1, SI + SETCC AX + SUBQ AX, SI LBB2_23: XORL R15, R15 @@ -547,13 +572,13 @@ LBB2_26: MULQ DI MOVQ DX, CX SHRQ $2, CX - MOVQ R9, AX + MOVQ R14, AX SHRQ $2, AX MULQ DI MOVQ DX, SI SHRQ $2, SI CMPQ CX, SI - JBE LBB2_37 + JBE LBB2_39 MOVQ R10, AX SHRQ $2, AX MULQ DI @@ -564,144 +589,150 @@ LBB2_26: CMPL AX, $49 SETHI DI MOVL $2, R11 - MOVQ SI, R9 + MOVQ SI, R14 MOVQ CX, AX MOVQ DX, R10 - MOVQ -56(BP), R13 - JMP LBB2_38 + MOVQ -64(BP), R13 + JMP LBB2_40 LBB2_29: - MOVQ $-3689348814741910323, R14 + MOVQ R12, -56(BP) + +LBB2_30: + MOVQ $-3689348814741910323, BX MOVQ SI, AX - MULQ R14 - MOVQ DX, SI - MOVQ R9, AX - MULQ R14 - SHRQ $3, SI + MULQ BX + MOVQ DX, R12 + MOVQ R14, AX + MULQ BX + SHRQ $3, R12 SHRQ $3, DX - XORL DI, DI + XORL SI, SI XORL R11, R11 - CMPQ SI, DX - JBE LBB2_35 + CMPQ R12, DX + JBE LBB2_37 XORL CX, CX -LBB2_31: +LBB2_32: MOVQ DX, R8 LEAL 0(DX)(DX*1), DI MOVQ R10, AX - MULQ R14 - MOVQ DX, BX + MULQ BX + MOVQ DX, R9 LEAL 0(DI)(DI*4), AX - SHRQ $3, BX - LEAL 0(BX)(BX*1), DX - LEAL 0(DX)(DX*4), DI - NEGL DI - ADDB R10, DI - CMPL AX, R9 + SHRQ $3, R9 + LEAL 0(R9)(R9*1), DX + LEAL 0(DX)(DX*4), SI + NEGL SI + ADDB R10, SI + CMPL R14, AX SETEQ AX ANDB AX, R13 TESTB CX, CX SETEQ AX ANDB AX, R15 - ADDL $1, R11 - MOVQ SI, AX - MULQ R14 - MOVQ DX, SI - SHRQ $3, SI + INCL R11 + MOVQ R12, AX + MULQ BX + MOVQ DX, R12 + SHRQ $3, R12 MOVQ R8, AX - MULQ R14 + MULQ BX SHRQ $3, DX - MOVQ BX, R10 - MOVQ R8, R9 - MOVL DI, CX - CMPQ SI, DX - JA LBB2_31 + MOVQ R9, R10 + MOVQ R8, R14 + MOVL SI, CX + CMPQ R12, DX + JA LBB2_32 TESTB R13, R13 - JE LBB2_36 + JE LBB2_38 -LBB2_33: +LBB2_34: MOVQ R8, AX - MULQ R14 + MULQ BX MOVQ DX, CX SHRQ $3, CX LEAL 0(CX)(CX*1), AX LEAL 0(AX)(AX*4), AX - CMPL AX, R8 - MOVQ -64(BP), R10 - JNE LBB2_42 + CMPL R8, AX + MOVQ -72(BP), DI + MOVQ -56(BP), R12 + JNE LBB2_44 + MOVQ BX, R14 -LBB2_34: - MOVQ BX, AX +LBB2_36: + MOVQ R9, AX MULQ R14 - MOVQ DX, R9 + MOVQ DX, R10 MOVQ CX, R8 - SHRQ $3, R9 - LEAL 0(R9)(R9*1), AX - LEAL 0(AX)(AX*4), SI - NEGL SI - ADDB BX, SI - TESTB DI, DI + SHRQ $3, R10 + LEAL 0(R10)(R10*1), AX + LEAL 0(AX)(AX*4), BX + NEGL BX + ADDB R9, BX + TESTB SI, SI SETEQ AX ANDB AX, R15 - ADDL $1, R11 + INCL R11 MOVQ CX, AX MULQ R14 MOVQ DX, CX SHRQ $3, CX LEAL 0(CX)(CX*1), AX LEAL 0(AX)(AX*4), AX - MOVQ R9, BX - MOVL SI, DI - CMPL AX, R8 - JE LBB2_34 - JMP LBB2_43 + MOVQ R10, R9 + MOVL BX, SI + CMPL R8, AX + JE LBB2_36 + JMP LBB2_45 -LBB2_35: - MOVQ R9, R8 - MOVQ R10, BX +LBB2_37: + MOVQ R14, R8 + MOVQ R10, R9 TESTB R13, R13 - JNE LBB2_33 + JNE LBB2_34 -LBB2_36: - MOVL DI, SI - MOVQ BX, R9 - MOVQ -64(BP), R10 - JMP LBB2_43 +LBB2_38: + MOVL SI, BX + MOVQ R9, R10 + MOVQ -72(BP), DI + MOVQ -56(BP), R12 + JMP LBB2_45 -LBB2_37: +LBB2_39: XORL R11, R11 XORL DI, DI - MOVQ -56(BP), R13 + MOVQ -64(BP), R13 MOVQ BX, AX -LBB2_38: +LBB2_40: MOVQ $-3689348814741910323, BX MULQ BX MOVQ DX, CX SHRQ $3, CX - MOVQ R9, AX + MOVQ R14, AX MULQ BX SHRQ $3, DX CMPQ CX, DX - JBE LBB2_41 + JBE LBB2_43 -LBB2_39: +LBB2_41: MOVQ R10, SI - MOVQ DX, R9 + MOVQ DX, R14 MOVQ R10, AX MULQ BX MOVQ DX, R10 SHRQ $3, R10 - ADDL $1, R11 + INCL R11 MOVQ CX, AX MULQ BX MOVQ DX, CX SHRQ $3, CX - MOVQ R9, AX + MOVQ R14, AX MULQ BX SHRQ $3, DX CMPQ CX, DX - JA LBB2_39 + JA LBB2_41 LEAL 0(R10)(R10*1), AX LEAL 0(AX)(AX*4), AX SUBL AX, SI @@ -709,9 +740,9 @@ LBB2_39: CMPL SI, $4 SETHI DI -LBB2_41: +LBB2_43: XORL AX, AX - CMPQ R10, R9 + CMPQ R10, R14 SETEQ AX ORQ DI, AX ADDQ R10, AX @@ -719,129 +750,128 @@ LBB2_41: ADDL R11, R12 MOVL $1, R15 CMPQ DI, $10 - JB LBB2_60 - JMP LBB2_45 + JAE LBB2_46 + JMP LBB2_61 -LBB2_42: - MOVL DI, SI - MOVQ BX, R9 +LBB2_44: + MOVL SI, BX + MOVQ R9, R10 -LBB2_43: +LBB2_45: TESTB R13, R13 - SETEQ AX + SETEQ SI TESTB R15, R15 - SETNE CX - CMPB SI, $5 + SETNE AX + CMPB BX, $5 SETEQ DX - TESTB $1, R9 - SETEQ BX - ANDB CX, BX - ANDB DX, BX - CMPQ R9, R8 + TESTB $1, R10 SETEQ CX - ORB AX, R10 - ANDB CX, R10 - CMPB SI, $4 + ANDB AX, CX + ANDB DX, CX + CMPQ R10, R8 + SETEQ DX + ORB SI, DI + CMPB BX, $4 SETHI AX - XORB BX, AX - ORB R10, AX - MOVBLZX AX, DI - ADDQ R9, DI - MOVQ -56(BP), R13 + XORB CX, AX + ANDB DX, DI + ORB AX, DI + MOVBLZX DI, DI + ADDQ R10, DI + MOVQ -64(BP), R13 ADDL R11, R12 MOVL $1, R15 CMPQ DI, $10 - JB LBB2_60 + JB LBB2_61 -LBB2_45: +LBB2_46: MOVL $2, R15 CMPQ DI, $100 - JB LBB2_60 + JB LBB2_61 MOVL $3, R15 CMPQ DI, $1000 - JB LBB2_60 + JB LBB2_61 MOVL $4, R15 CMPQ DI, $10000 - JB LBB2_60 + JB LBB2_61 MOVL $5, R15 CMPQ DI, $100000 - JB LBB2_60 + JB LBB2_61 MOVL $6, R15 CMPQ DI, $1000000 - JB LBB2_60 + JB LBB2_61 MOVL $7, R15 CMPQ DI, $10000000 - JB LBB2_60 + JB LBB2_61 MOVL $8, R15 CMPQ DI, $100000000 - JB LBB2_60 + JB LBB2_61 MOVL $9, R15 CMPQ DI, $1000000000 - JB LBB2_60 + JB LBB2_61 MOVQ $8589934464, AX ADDQ $1410065536, AX MOVL $10, R15 CMPQ DI, AX - JB LBB2_60 + JB LBB2_61 MOVQ DI, AX SHRQ $11, AX MOVL $11, R15 CMPQ AX, $48828125 - JB LBB2_60 + JB LBB2_61 MOVQ DI, AX SHRQ $12, AX MOVL $12, R15 CMPQ AX, $244140625 - JB LBB2_60 + JB LBB2_61 MOVQ DI, AX SHRQ $13, AX MOVL $13, R15 CMPQ AX, $1220703125 - JB LBB2_60 + JB LBB2_61 MOVL $14, R15 MOVQ $100000000000000, AX CMPQ DI, AX - JB LBB2_60 + JB LBB2_61 MOVL $15, R15 MOVQ $1000000000000000, AX CMPQ DI, AX - JB LBB2_60 + JB LBB2_61 MOVQ $10000000000000000, AX CMPQ DI, AX MOVL $17, R15 SBBL $0, R15 -LBB2_60: +LBB2_61: LEAL 0(R15)(R12*1), R14 - LEAL 0(R15)(R12*1), AX - ADDL $5, AX + LEAL 5(R15)(R12*1), AX CMPL AX, $27 - JB LBB2_66 + JB LBB2_67 LEAQ 1(R13), BX MOVQ BX, SI MOVL R15, DX - LONG $0x0057e2e8; BYTE $0x00 // callq _print_mantissa + LONG $0x005b99e8; BYTE $0x00 // callq _print_mantissa MOVB 1(R13), AX MOVB AX, 0(R13) MOVL $1, AX CMPL R15, $2 - JB LBB2_63 + JB LBB2_64 MOVB $46, 0(BX) - ADDL $1, R15 + INCL R15 MOVL R15, AX -LBB2_63: +LBB2_64: MOVL AX, BX MOVB $101, 0(R13)(BX*1) - ADDQ $1, BX + INCQ BX TESTL R14, R14 - JLE LBB2_68 - ADDL $-1, R14 + JLE LBB2_69 + DECL R14 MOVL -44(BP), R9 CMPL R14, $100 - JL LBB2_69 + JL LBB2_70 -LBB2_65: +LBB2_66: MOVL R14, AX MOVL $3435973837, CX IMULQ AX, CX @@ -849,27 +879,28 @@ LBB2_65: LEAL 0(CX)(CX*1), AX LEAL 0(AX)(AX*4), AX SUBL AX, R14 - LONG $0x90058d48; WORD $0x00b1; BYTE $0x00 // leaq $45456(%rip), %rax /* _Digits(%rip) */ + LONG $0x04058d48; WORD $0x00b5; BYTE $0x00 // leaq $46340(%rip), %rax /* _Digits(%rip) */ MOVWLZX 0(AX)(CX*2), AX MOVL BX, CX MOVW AX, 0(R13)(CX*1) ORB $48, R14 - MOVB R14, 2(R13)(CX*1) + MOVLQSX BX, AX + MOVB R14, 2(AX)(R13*1) ADDL $3, BX - JMP LBB2_112 + JMP LBB2_107 -LBB2_66: +LBB2_67: TESTL R14, R14 - JLE LBB2_71 + JLE LBB2_72 MOVL R12, R13 SARL $31, R13 ANDL R14, R13 XORL BX, BX TESTL R12, R12 LONG $0xe3480f44 // cmovsl %ebx, %r12d - JMP LBB2_73 + JMP LBB2_74 -LBB2_68: +LBB2_69: ADDL $2, AX MOVB $45, 0(R13)(BX*1) MOVL $1, CX @@ -878,192 +909,182 @@ LBB2_68: MOVL AX, BX MOVL -44(BP), R9 CMPL R14, $100 - JGE LBB2_65 + JGE LBB2_66 -LBB2_69: +LBB2_70: CMPL R14, $10 - JL LBB2_84 + JL LBB2_85 MOVLQSX R14, AX - LONG $0x250d8d48; WORD $0x00b1; BYTE $0x00 // leaq $45349(%rip), %rcx /* _Digits(%rip) */ + LONG $0x960d8d48; WORD $0x00b4; BYTE $0x00 // leaq $46230(%rip), %rcx /* _Digits(%rip) */ MOVWLZX 0(CX)(AX*2), AX MOVL BX, CX MOVW AX, 0(R13)(CX*1) ADDL $2, BX - JMP LBB2_112 + JMP LBB2_107 -LBB2_71: +LBB2_72: MOVW $11824, 0(R13) TESTL R14, R14 - JS LBB2_85 + JS LBB2_86 XORL R13, R13 MOVL $2, BX XORL R12, R12 -LBB2_73: +LBB2_74: MOVL BX, SI - ADDQ -56(BP), SI + ADDQ -64(BP), SI MOVL R15, DX - LONG $0x0056dee8; BYTE $0x00 // callq _print_mantissa + LONG $0x005a95e8; BYTE $0x00 // callq _print_mantissa TESTL R13, R13 - JE LBB2_77 - LEAL 0(BX)(R13*1), AX + JE LBB2_78 + LEAL 0(R13)(BX*1), AX CMPL R13, R15 - JGE LBB2_79 + JGE LBB2_80 LEAL 0(BX)(R15*1), CX MOVL AX, AX - MOVQ -56(BP), R10 + MOVQ -64(BP), R10 -LBB2_76: +LBB2_77: MOVBLZX -1(R10)(CX*1), DX MOVB DX, 0(R10)(CX*1) - LEAQ -1(CX), DX - MOVQ DX, CX - CMPQ DX, AX - JG LBB2_76 - JMP LBB2_80 + DECQ CX + CMPQ CX, AX + JG LBB2_77 + JMP LBB2_81 -LBB2_77: - MOVQ -56(BP), R10 +LBB2_78: + MOVQ -64(BP), R10 ADDL R15, BX TESTL R12, R12 - JNE LBB2_81 - JMP LBB2_111 + JNE LBB2_82 + JMP LBB2_106 -LBB2_79: +LBB2_80: MOVL AX, AX - MOVQ -56(BP), R10 + MOVQ -64(BP), R10 -LBB2_80: +LBB2_81: MOVB $46, 0(R10)(AX*1) ORL $1, BX ADDL R15, BX TESTL R12, R12 - JE LBB2_111 + JE LBB2_106 -LBB2_81: +LBB2_82: MOVLQSX BX, BX LEAL -1(R12), R8 XORL AX, AX CMPL R8, $127 MOVL -44(BP), R9 - JB LBB2_94 - ADDQ $1, R8 + JB LBB2_95 + INCQ R8 MOVQ R8, AX ANDQ $-128, AX - LEAQ -128(AX), CX - MOVQ CX, SI + LEAQ -128(AX), DX + MOVQ DX, SI SHRQ $7, SI - ADDQ $1, SI - MOVL SI, DX - ANDL $3, DX - CMPQ CX, $384 - JAE LBB2_88 - XORL DI, DI - JMP LBB2_90 - -LBB2_84: + INCQ SI + MOVL SI, CX + ANDL $3, CX + CMPQ DX, $384 + JAE LBB2_89 + XORL SI, SI + JMP LBB2_91 + +LBB2_85: ADDB $48, R14 MOVL BX, AX - ADDL $1, BX + INCL BX MOVB R14, 0(R13)(AX*1) - JMP LBB2_112 + JMP LBB2_107 -LBB2_85: +LBB2_86: MOVL $2, BX SUBL R14, BX LEAQ -2(BX), R8 - MOVL $2, DX - CMPQ R8, $4 - JB LBB2_109 + MOVL $2, AX CMPQ R8, $128 - JAE LBB2_96 - XORL CX, CX - JMP LBB2_105 - -LBB2_88: - LEAQ 0(BX)(R10*1), CX - ADDQ $480, CX - ANDQ $-4, SI - NEGQ SI - XORL DI, DI - QUAD $0xfffff68e056ffec5 // vmovdqu $-2418(%rip), %ymm0 /* LCPI2_0(%rip) */ + JB LBB2_104 + MOVQ R8, AX + ANDQ $-128, AX + LEAQ -128(AX), DX + MOVQ DX, SI + SHRQ $7, SI + INCQ SI + MOVL SI, CX + ANDL $3, CX + CMPQ DX, $384 + JAE LBB2_97 + XORL SI, SI + JMP LBB2_99 LBB2_89: - QUAD $0xfffe2039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-480(%rcx,%rdi) - QUAD $0xfffe4039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-448(%rcx,%rdi) - QUAD $0xfffe6039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-416(%rcx,%rdi) - QUAD $0xfffe8039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-384(%rcx,%rdi) - QUAD $0xfffea039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-352(%rcx,%rdi) - QUAD $0xfffec039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-320(%rcx,%rdi) - QUAD $0xfffee039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-288(%rcx,%rdi) - QUAD $0xffff0039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-256(%rcx,%rdi) - QUAD $0xffff2039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-224(%rcx,%rdi) - QUAD $0xffff4039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-192(%rcx,%rdi) - QUAD $0xffff6039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-160(%rcx,%rdi) - LONG $0x447ffec5; WORD $0x8039 // vmovdqu %ymm0, $-128(%rcx,%rdi) - LONG $0x447ffec5; WORD $0xa039 // vmovdqu %ymm0, $-96(%rcx,%rdi) - LONG $0x447ffec5; WORD $0xc039 // vmovdqu %ymm0, $-64(%rcx,%rdi) - LONG $0x447ffec5; WORD $0xe039 // vmovdqu %ymm0, $-32(%rcx,%rdi) - LONG $0x047ffec5; BYTE $0x39 // vmovdqu %ymm0, (%rcx,%rdi) - ADDQ $512, DI - ADDQ $4, SI - JNE LBB2_89 + LEAQ 480(BX)(R10*1), DI + MOVQ CX, DX + SUBQ SI, DX + XORL SI, SI + QUAD $0xfffff66d056ffec5 // vmovdqu $-2451(%rip), %ymm0 /* LCPI2_0(%rip) */ LBB2_90: - TESTQ DX, DX - JE LBB2_93 - ADDQ BX, DI - LEAQ 0(R10)(DI*1), CX - ADDQ $96, CX - SHLQ $7, DX - XORL SI, SI - QUAD $0xfffff5df056ffec5 // vmovdqu $-2593(%rip), %ymm0 /* LCPI2_0(%rip) */ + QUAD $0xfffe2037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-480(%rdi,%rsi) + QUAD $0xfffe4037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-448(%rdi,%rsi) + QUAD $0xfffe6037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-416(%rdi,%rsi) + QUAD $0xfffe8037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-384(%rdi,%rsi) + QUAD $0xfffea037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-352(%rdi,%rsi) + QUAD $0xfffec037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-320(%rdi,%rsi) + QUAD $0xfffee037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-288(%rdi,%rsi) + QUAD $0xffff0037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-256(%rdi,%rsi) + QUAD $0xffff2037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-224(%rdi,%rsi) + QUAD $0xffff4037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-192(%rdi,%rsi) + QUAD $0xffff6037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-160(%rdi,%rsi) + LONG $0x447ffec5; WORD $0x8037 // vmovdqu %ymm0, $-128(%rdi,%rsi) + LONG $0x447ffec5; WORD $0xa037 // vmovdqu %ymm0, $-96(%rdi,%rsi) + LONG $0x447ffec5; WORD $0xc037 // vmovdqu %ymm0, $-64(%rdi,%rsi) + LONG $0x447ffec5; WORD $0xe037 // vmovdqu %ymm0, $-32(%rdi,%rsi) + LONG $0x047ffec5; BYTE $0x37 // vmovdqu %ymm0, (%rdi,%rsi) + ADDQ $512, SI + ADDQ $4, DX + JNE LBB2_90 -LBB2_92: - LONG $0x447ffec5; WORD $0xa031 // vmovdqu %ymm0, $-96(%rcx,%rsi) - LONG $0x447ffec5; WORD $0xc031 // vmovdqu %ymm0, $-64(%rcx,%rsi) - LONG $0x447ffec5; WORD $0xe031 // vmovdqu %ymm0, $-32(%rcx,%rsi) - LONG $0x047ffec5; BYTE $0x31 // vmovdqu %ymm0, (%rcx,%rsi) - SUBQ $-128, SI - CMPQ DX, SI - JNE LBB2_92 +LBB2_91: + TESTQ CX, CX + JE LBB2_94 + ADDQ BX, SI + LEAQ 96(R10)(SI*1), DX + NEGQ CX + QUAD $0xfffff5c4056ffec5 // vmovdqu $-2620(%rip), %ymm0 /* LCPI2_0(%rip) */ LBB2_93: + LONG $0x427ffec5; BYTE $0xa0 // vmovdqu %ymm0, $-96(%rdx) + LONG $0x427ffec5; BYTE $0xc0 // vmovdqu %ymm0, $-64(%rdx) + LONG $0x427ffec5; BYTE $0xe0 // vmovdqu %ymm0, $-32(%rdx) + LONG $0x027ffec5 // vmovdqu %ymm0, (%rdx) + SUBQ $-128, DX + INCQ CX + JNE LBB2_93 + +LBB2_94: ADDQ AX, BX CMPQ R8, AX - JE LBB2_112 + JE LBB2_107 -LBB2_94: +LBB2_95: SUBL AX, R12 -LBB2_95: +LBB2_96: MOVB $48, 0(R10)(BX*1) - ADDQ $1, BX - ADDL $-1, R12 - JNE LBB2_95 - JMP LBB2_112 + INCQ BX + DECL R12 + JNE LBB2_96 + JMP LBB2_107 -LBB2_96: - MOVQ R8, CX - ANDQ $-128, CX - LEAQ -128(CX), SI - MOVQ SI, DX - SHRQ $7, DX - ADDQ $1, DX - MOVL DX, AX - ANDL $3, AX - CMPQ SI, $384 - JAE LBB2_98 +LBB2_97: + MOVQ CX, DX + SUBQ SI, DX XORL SI, SI - JMP LBB2_100 + QUAD $0xfffff577056ffec5 // vmovdqu $-2697(%rip), %ymm0 /* LCPI2_0(%rip) */ LBB2_98: - ANDQ $-4, DX - NEGQ DX - XORL SI, SI - QUAD $0xfffff560056ffec5 // vmovdqu $-2720(%rip), %ymm0 /* LCPI2_0(%rip) */ - -LBB2_99: LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x02 // vmovdqu %ymm0, $2(%r13,%rsi) LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x22 // vmovdqu %ymm0, $34(%r13,%rsi) LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x42 // vmovdqu %ymm0, $66(%r13,%rsi) @@ -1082,15 +1103,15 @@ LBB2_99: QUAD $0x01e235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $482(%r13,%rsi) ADDQ $512, SI ADDQ $4, DX - JNE LBB2_99 + JNE LBB2_98 -LBB2_100: - TESTQ AX, AX - JE LBB2_103 - NEGQ AX - QUAD $0xfffff4ab056ffec5 // vmovdqu $-2901(%rip), %ymm0 /* LCPI2_0(%rip) */ +LBB2_99: + TESTQ CX, CX + JE LBB2_102 + NEGQ CX + QUAD $0xfffff4c2056ffec5 // vmovdqu $-2878(%rip), %ymm0 /* LCPI2_0(%rip) */ -LBB2_102: +LBB2_101: MOVQ SI, DX ORQ $2, DX LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x00 // vmovdqu %ymm0, (%r13,%rdx) @@ -1098,70 +1119,51 @@ LBB2_102: LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x40 // vmovdqu %ymm0, $64(%r13,%rdx) LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x60 // vmovdqu %ymm0, $96(%r13,%rdx) SUBQ $-128, SI - INCQ AX - JNE LBB2_102 + INCQ CX + JNE LBB2_101 -LBB2_103: - CMPQ R8, CX - JE LBB2_110 - TESTB $124, R8 - JE LBB2_108 +LBB2_102: + CMPQ R8, AX + JE LBB2_105 + ORQ $2, AX + +LBB2_104: + MOVB $48, 0(R13)(AX*1) + INCQ AX + CMPQ BX, AX + JNE LBB2_104 LBB2_105: - MOVQ R8, SI - ANDQ $-4, SI - LEAQ 2(SI), DX + ADDQ BX, R13 + MOVQ R13, SI + MOVL R15, DX + WORD $0xf8c5; BYTE $0x77 // vzeroupper + LONG $0x005783e8; BYTE $0x00 // callq _print_mantissa + ADDL BX, R15 + MOVL R15, BX LBB2_106: - MOVL $808464432, 2(R13)(CX*1) - ADDQ $4, CX - CMPQ SI, CX - JNE LBB2_106 - CMPQ R8, SI - JNE LBB2_109 - JMP LBB2_110 - -LBB2_108: - ORQ $2, CX - MOVQ CX, DX - -LBB2_109: - MOVB $48, 0(R13)(DX*1) - ADDQ $1, DX - CMPQ BX, DX - JNE LBB2_109 - -LBB2_110: - ADDQ BX, R13 - MOVQ R13, SI - MOVL R15, DX - WORD $0xf8c5; BYTE $0x77 // vzeroupper - LONG $0x005379e8; BYTE $0x00 // callq _print_mantissa - ADDL BX, R15 - MOVL R15, BX - -LBB2_111: MOVL -44(BP), R9 -LBB2_112: +LBB2_107: ADDL R9, BX - JMP LBB2_115 + JMP LBB2_110 -LBB2_113: +LBB2_108: MOVQ $9223372036854775807, CX ANDQ CX, AX MOVB $45, 0(R13) - ADDQ $1, R13 + INCQ R13 MOVL $1, R9 TESTQ AX, AX JNE LBB2_2 -LBB2_114: +LBB2_109: MOVB $48, 0(R13) - ADDL $1, R9 + INCL R9 MOVL R9, BX -LBB2_115: +LBB2_110: MOVL BX, AX ADDQ $40, SP BYTE $0x5b // popq %rbx @@ -1182,10 +1184,10 @@ LBB3_1: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp MOVB $45, 0(DI) - ADDQ $1, DI + INCQ DI NEGQ SI - LONG $0x000045e8; BYTE $0x00 // callq _u64toa - ADDL $1, AX + LONG $0x000044e8; BYTE $0x00 // callq _u64toa + INCL AX BYTE $0x5d // popq %rbp RET @@ -1229,7 +1231,7 @@ _u64toa: ADDQ AX, AX CMPL SI, $1000 JB LBB4_3 - LONG $0x900d8d48; WORD $0x00ac; BYTE $0x00 // leaq $44176(%rip), %rcx /* _Digits(%rip) */ + LONG $0x580d8d48; WORD $0x00b0; BYTE $0x00 // leaq $45144(%rip), %rcx /* _Digits(%rip) */ MOVB 0(DX)(CX*1), CX MOVB CX, 0(DI) MOVL $1, CX @@ -1243,26 +1245,26 @@ LBB4_3: LBB4_4: MOVWLZX DX, DX ORQ $1, DX - LONG $0x6f358d48; WORD $0x00ac; BYTE $0x00 // leaq $44143(%rip), %rsi /* _Digits(%rip) */ + LONG $0x37358d48; WORD $0x00b0; BYTE $0x00 // leaq $45111(%rip), %rsi /* _Digits(%rip) */ MOVB 0(DX)(SI*1), DX MOVL CX, SI - ADDL $1, CX + INCL CX MOVB DX, 0(DI)(SI*1) LBB4_6: - LONG $0x5d158d48; WORD $0x00ac; BYTE $0x00 // leaq $44125(%rip), %rdx /* _Digits(%rip) */ + LONG $0x26158d48; WORD $0x00b0; BYTE $0x00 // leaq $45094(%rip), %rdx /* _Digits(%rip) */ MOVB 0(AX)(DX*1), DX MOVL CX, SI - ADDL $1, CX + INCL CX MOVB DX, 0(DI)(SI*1) LBB4_7: MOVWLZX AX, AX ORQ $1, AX - LONG $0x44158d48; WORD $0x00ac; BYTE $0x00 // leaq $44100(%rip), %rdx /* _Digits(%rip) */ + LONG $0x0e158d48; WORD $0x00b0; BYTE $0x00 // leaq $45070(%rip), %rdx /* _Digits(%rip) */ MOVB 0(AX)(DX*1), AX MOVL CX, DX - ADDL $1, CX + INCL CX MOVB AX, 0(DI)(DX*1) MOVL CX, AX BYTE $0x5d // popq %rbp @@ -1306,7 +1308,7 @@ LBB4_8: ADDQ R11, R11 CMPL SI, $10000000 JB LBB4_11 - LONG $0xac058d48; WORD $0x00ab; BYTE $0x00 // leaq $43948(%rip), %rax /* _Digits(%rip) */ + LONG $0x77058d48; WORD $0x00af; BYTE $0x00 // leaq $44919(%rip), %rax /* _Digits(%rip) */ MOVB 0(R10)(AX*1), AX MOVB AX, 0(DI) MOVL $1, CX @@ -1320,39 +1322,39 @@ LBB4_11: LBB4_12: MOVL R10, AX ORQ $1, AX - LONG $0x87358d48; WORD $0x00ab; BYTE $0x00 // leaq $43911(%rip), %rsi /* _Digits(%rip) */ + LONG $0x52358d48; WORD $0x00af; BYTE $0x00 // leaq $44882(%rip), %rsi /* _Digits(%rip) */ MOVB 0(AX)(SI*1), AX MOVL CX, SI - ADDL $1, CX + INCL CX MOVB AX, 0(DI)(SI*1) LBB4_14: - LONG $0x75058d48; WORD $0x00ab; BYTE $0x00 // leaq $43893(%rip), %rax /* _Digits(%rip) */ + LONG $0x41058d48; WORD $0x00af; BYTE $0x00 // leaq $44865(%rip), %rax /* _Digits(%rip) */ MOVB 0(R9)(AX*1), AX MOVL CX, SI - ADDL $1, CX + INCL CX MOVB AX, 0(DI)(SI*1) LBB4_15: MOVWLZX R9, AX ORQ $1, AX - LONG $0x5a358d48; WORD $0x00ab; BYTE $0x00 // leaq $43866(%rip), %rsi /* _Digits(%rip) */ + LONG $0x27358d48; WORD $0x00af; BYTE $0x00 // leaq $44839(%rip), %rsi /* _Digits(%rip) */ MOVB 0(AX)(SI*1), AX MOVL CX, DX - MOVB AX, 0(DI)(DX*1) + MOVB AX, 0(DX)(DI*1) MOVB 0(R8)(SI*1), AX - MOVB AX, 1(DI)(DX*1) + MOVB AX, 1(DX)(DI*1) MOVWLZX R8, AX ORQ $1, AX MOVB 0(AX)(SI*1), AX - MOVB AX, 2(DI)(DX*1) + MOVB AX, 2(DX)(DI*1) MOVB 0(R11)(SI*1), AX - MOVB AX, 3(DI)(DX*1) + MOVB AX, 3(DX)(DI*1) MOVWLZX R11, AX ORQ $1, AX MOVB 0(AX)(SI*1), AX ADDL $5, CX - MOVB AX, 4(DI)(DX*1) + MOVB AX, 4(DX)(DI*1) MOVL CX, AX BYTE $0x5d // popq %rbp RET @@ -1374,7 +1376,7 @@ LBB4_16: LONG $0xe100c269; WORD $0x05f5 // imull $100000000, %edx, %eax SUBL AX, SI LONG $0xc26ef9c5 // vmovd %edx, %xmm0 - QUAD $0xfffffdc60d6ffac5 // vmovdqu $-570(%rip), %xmm1 /* LCPI4_0(%rip) */ + QUAD $0xfffffdcb0d6ffac5 // vmovdqu $-565(%rip), %xmm1 /* LCPI4_0(%rip) */ LONG $0xd1f4f9c5 // vpmuludq %xmm1, %xmm0, %xmm2 LONG $0xd273e9c5; BYTE $0x2d // vpsrlq $45, %xmm2, %xmm2 MOVL $10000, AX @@ -1385,11 +1387,11 @@ LBB4_16: LONG $0xf073f9c5; BYTE $0x02 // vpsllq $2, %xmm0, %xmm0 LONG $0xc070fbc5; BYTE $0x50 // vpshuflw $80, %xmm0, %xmm0 LONG $0xc070f9c5; BYTE $0x50 // vpshufd $80, %xmm0, %xmm0 - QUAD $0xfffdbf155979e2c4; BYTE $0xff // vpbroadcastq $-577(%rip), %xmm2 /* LCPI4_1(%rip) */ - QUAD $0xfffdbe255979e2c4; BYTE $0xff // vpbroadcastq $-578(%rip), %xmm4 /* LCPI4_2(%rip) */ + QUAD $0xfffdc4155979e2c4; BYTE $0xff // vpbroadcastq $-572(%rip), %xmm2 /* LCPI4_1(%rip) */ + QUAD $0xfffdc3255979e2c4; BYTE $0xff // vpbroadcastq $-573(%rip), %xmm4 /* LCPI4_2(%rip) */ LONG $0xc2e4f9c5 // vpmulhuw %xmm2, %xmm0, %xmm0 LONG $0xc4e4f9c5 // vpmulhuw %xmm4, %xmm0, %xmm0 - QUAD $0xfffffd862d6ffac5 // vmovdqu $-634(%rip), %xmm5 /* LCPI4_3(%rip) */ + QUAD $0xfffffd8b2d6ffac5 // vmovdqu $-629(%rip), %xmm5 /* LCPI4_3(%rip) */ LONG $0xf5d5f9c5 // vpmullw %xmm5, %xmm0, %xmm6 LONG $0xf673c9c5; BYTE $0x10 // vpsllq $16, %xmm6, %xmm6 LONG $0xc6f9f9c5 // vpsubw %xmm6, %xmm0, %xmm0 @@ -1408,17 +1410,17 @@ LBB4_16: LONG $0xf273e9c5; BYTE $0x10 // vpsllq $16, %xmm2, %xmm2 LONG $0xcaf9f1c5 // vpsubw %xmm2, %xmm1, %xmm1 LONG $0xc167f9c5 // vpackuswb %xmm1, %xmm0, %xmm0 - QUAD $0xfffffd400dfcf9c5 // vpaddb $-704(%rip), %xmm0, %xmm1 /* LCPI4_4(%rip) */ + QUAD $0xfffffd450dfcf9c5 // vpaddb $-699(%rip), %xmm0, %xmm1 /* LCPI4_4(%rip) */ LONG $0xd2efe9c5 // vpxor %xmm2, %xmm2, %xmm2 LONG $0xc274f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm0 LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax - NOTL AX ORL $32768, AX + XORL $-32769, AX BSFL AX, AX MOVL $16, CX SUBL AX, CX SHLQ $4, AX - LONG $0xd0158d48; WORD $0x00aa; BYTE $0x00 // leaq $43728(%rip), %rdx /* _VecShiftShuffles(%rip) */ + LONG $0x9a158d48; WORD $0x00ae; BYTE $0x00 // leaq $44698(%rip), %rdx /* _VecShiftShuffles(%rip) */ LONG $0x0071e2c4; WORD $0x1004 // vpshufb (%rax,%rdx), %xmm1, %xmm0 LONG $0x077ffac5 // vmovdqu %xmm0, (%rdi) MOVL CX, AX @@ -1444,7 +1446,7 @@ LBB4_20: CMPL DX, $99 JA LBB4_22 MOVL DX, AX - LONG $0xb30d8d48; WORD $0x00a9; BYTE $0x00 // leaq $43443(%rip), %rcx /* _Digits(%rip) */ + LONG $0x7d0d8d48; WORD $0x00ad; BYTE $0x00 // leaq $44413(%rip), %rcx /* _Digits(%rip) */ MOVB 0(CX)(AX*2), DX MOVB 1(CX)(AX*2), AX MOVB DX, 0(DI) @@ -1469,7 +1471,7 @@ LBB4_22: WORD $0xc96b; BYTE $0x64 // imull $100, %ecx, %ecx SUBL CX, AX MOVWLZX AX, AX - LONG $0x620d8d48; WORD $0x00a9; BYTE $0x00 // leaq $43362(%rip), %rcx /* _Digits(%rip) */ + LONG $0x2c0d8d48; WORD $0x00ad; BYTE $0x00 // leaq $44332(%rip), %rcx /* _Digits(%rip) */ MOVB 0(CX)(AX*2), DX MOVB 1(CX)(AX*2), AX MOVB DX, 1(DI) @@ -1481,7 +1483,7 @@ LBB4_24: WORD $0xc86b; BYTE $0x64 // imull $100, %eax, %ecx SUBL CX, DX MOVWLZX AX, AX - LONG $0x3f058d4c; WORD $0x00a9; BYTE $0x00 // leaq $43327(%rip), %r8 /* _Digits(%rip) */ + LONG $0x09058d4c; WORD $0x00ad; BYTE $0x00 // leaq $44297(%rip), %r8 /* _Digits(%rip) */ MOVB 0(R8)(AX*2), CX MOVB 1(R8)(AX*2), AX MOVB CX, 0(DI) @@ -1502,7 +1504,7 @@ LBB4_25: MULQ DX SHRQ $26, DX LONG $0xc26ef9c5 // vmovd %edx, %xmm0 - QUAD $0xfffffbe20d6ffac5 // vmovdqu $-1054(%rip), %xmm1 /* LCPI4_0(%rip) */ + QUAD $0xfffffbe40d6ffac5 // vmovdqu $-1052(%rip), %xmm1 /* LCPI4_0(%rip) */ LONG $0xd1f4f9c5 // vpmuludq %xmm1, %xmm0, %xmm2 LONG $0xd273e9c5; BYTE $0x2d // vpsrlq $45, %xmm2, %xmm2 MOVL $10000, AX @@ -1512,12 +1514,12 @@ LBB4_25: LONG $0xc061e9c5 // vpunpcklwd %xmm0, %xmm2, %xmm0 LONG $0xf073f9c5; BYTE $0x02 // vpsllq $2, %xmm0, %xmm0 LONG $0xc070fbc5; BYTE $0x50 // vpshuflw $80, %xmm0, %xmm0 - QUAD $0xfffbe0155979e2c4; BYTE $0xff // vpbroadcastq $-1056(%rip), %xmm2 /* LCPI4_1(%rip) */ + QUAD $0xfffbe2155979e2c4; BYTE $0xff // vpbroadcastq $-1054(%rip), %xmm2 /* LCPI4_1(%rip) */ LONG $0xc070f9c5; BYTE $0x50 // vpshufd $80, %xmm0, %xmm0 LONG $0xc2e4f9c5 // vpmulhuw %xmm2, %xmm0, %xmm0 - QUAD $0xfffbd6255979e2c4; BYTE $0xff // vpbroadcastq $-1066(%rip), %xmm4 /* LCPI4_2(%rip) */ + QUAD $0xfffbd8255979e2c4; BYTE $0xff // vpbroadcastq $-1064(%rip), %xmm4 /* LCPI4_2(%rip) */ LONG $0xc4e4f9c5 // vpmulhuw %xmm4, %xmm0, %xmm0 - QUAD $0xfffffba22d6ffac5 // vmovdqu $-1118(%rip), %xmm5 /* LCPI4_3(%rip) */ + QUAD $0xfffffba42d6ffac5 // vmovdqu $-1116(%rip), %xmm5 /* LCPI4_3(%rip) */ LONG $0xf5d5f9c5 // vpmullw %xmm5, %xmm0, %xmm6 LONG $0xf673c9c5; BYTE $0x10 // vpsllq $16, %xmm6, %xmm6 LONG $0xc6f9f9c5 // vpsubw %xmm6, %xmm0, %xmm0 @@ -1538,7 +1540,7 @@ LBB4_25: LONG $0xf273e9c5; BYTE $0x10 // vpsllq $16, %xmm2, %xmm2 LONG $0xcaf9f1c5 // vpsubw %xmm2, %xmm1, %xmm1 LONG $0xc167f9c5 // vpackuswb %xmm1, %xmm0, %xmm0 - QUAD $0xfffffb5405fcf9c5 // vpaddb $-1196(%rip), %xmm0, %xmm0 /* LCPI4_4(%rip) */ + QUAD $0xfffffb5605fcf9c5 // vpaddb $-1194(%rip), %xmm0, %xmm0 /* LCPI4_4(%rip) */ MOVL CX, AX LONG $0x047ffac5; BYTE $0x07 // vmovdqu %xmm0, (%rdi,%rax) ORL $16, CX @@ -1575,130 +1577,120 @@ _quote: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $24, SP - MOVQ CX, R11 + BYTE $0x50 // pushq %rax + MOVQ CX, R15 MOVQ SI, R14 - MOVQ 0(CX), R10 + MOVQ 0(CX), R9 TESTB $1, R8 - LONG $0xd9058d48; WORD $0x00a8; BYTE $0x00 // leaq $43225(%rip), %rax /* __SingleQuoteTab(%rip) */ - LONG $0xd2058d4c; WORD $0x00b8; BYTE $0x00 // leaq $47314(%rip), %r8 /* __DoubleQuoteTab(%rip) */ + LONG $0xa6058d48; WORD $0x00ac; BYTE $0x00 // leaq $44198(%rip), %rax /* __SingleQuoteTab(%rip) */ + LONG $0x9f058d4c; WORD $0x00bc; BYTE $0x00 // leaq $48287(%rip), %r8 /* __DoubleQuoteTab(%rip) */ LONG $0xc0440f4c // cmoveq %rax, %r8 LEAQ 0(SI*8), AX - CMPQ R10, AX - JGE LBB5_1 + CMPQ R9, AX + JGE LBB5_86 MOVQ DX, R12 - MOVQ DI, R9 + MOVQ DI, R11 TESTQ R14, R14 - JE LBB5_130 - QUAD $0xffffff18256ffec5 // vmovdqu $-232(%rip), %ymm4 /* LCPI5_0(%rip) */ - QUAD $0xffffff302d6ffec5 // vmovdqu $-208(%rip), %ymm5 /* LCPI5_1(%rip) */ - QUAD $0xffffff48356ffec5 // vmovdqu $-184(%rip), %ymm6 /* LCPI5_2(%rip) */ + JE LBB5_118 + QUAD $0xffffff1b256ffec5 // vmovdqu $-229(%rip), %ymm4 /* LCPI5_0(%rip) */ + QUAD $0xffffff332d6ffec5 // vmovdqu $-205(%rip), %ymm5 /* LCPI5_1(%rip) */ + QUAD $0xffffff4b356ffec5 // vmovdqu $-181(%rip), %ymm6 /* LCPI5_2(%rip) */ LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 - MOVQ DI, AX - MOVQ DX, -64(BP) + MOVQ DI, R11 MOVQ DX, R12 + MOVQ DX, -48(BP) -LBB5_10: - MOVQ AX, R9 +LBB5_3: CMPQ R14, $31 SETGT CX - MOVQ R10, BX + MOVQ R9, R10 MOVQ R12, SI - MOVQ R14, R15 - MOVQ AX, R13 - CMPQ R10, $32 - JL LBB5_43 + MOVQ R14, AX + MOVQ R11, R13 + CMPQ R9, $32 + JL LBB5_9 CMPQ R14, $32 - JL LBB5_43 - XORL SI, SI + JL LBB5_9 + MOVQ R11, R13 MOVQ R14, AX - MOVQ R10, DX + MOVQ R12, SI + MOVQ R9, BX -LBB5_13: - LONG $0x6f7ec1c4; WORD $0x3104 // vmovdqu (%r9,%rsi), %ymm0 +LBB5_6: + LONG $0x6f7ec1c4; WORD $0x0045 // vmovdqu (%r13), %ymm0 LONG $0xc864ddc5 // vpcmpgtb %ymm0, %ymm4, %ymm1 LONG $0xd574fdc5 // vpcmpeqb %ymm5, %ymm0, %ymm2 LONG $0xde74fdc5 // vpcmpeqb %ymm6, %ymm0, %ymm3 LONG $0xd2ebe5c5 // vpor %ymm2, %ymm3, %ymm2 - LONG $0x7f7ec1c4; WORD $0x3404 // vmovdqu %ymm0, (%r12,%rsi) + LONG $0x067ffec5 // vmovdqu %ymm0, (%rsi) LONG $0x647dc1c4; BYTE $0xc0 // vpcmpgtb %ymm8, %ymm0, %ymm0 - LONG $0xc0dbf5c5 // vpand %ymm0, %ymm1, %ymm0 + LONG $0xc1dbfdc5 // vpand %ymm1, %ymm0, %ymm0 LONG $0xc0ebedc5 // vpor %ymm0, %ymm2, %ymm0 LONG $0xc8d7fdc5 // vpmovmskb %ymm0, %ecx TESTL CX, CX - JNE LBB5_14 - LEAQ -32(AX), R15 - LEAQ -32(DX), BX + JNE LBB5_19 + ADDQ $32, R13 + ADDQ $32, SI + LEAQ -32(BX), R10 CMPQ AX, $63 SETGT CX - ADDQ $32, SI CMPQ AX, $64 - JL LBB5_42 - MOVQ R15, AX - CMPQ DX, $63 - MOVQ BX, DX - JG LBB5_13 - -LBB5_42: - LEAQ 0(R9)(SI*1), R13 - ADDQ R12, SI + LEAQ -32(AX), AX + JL LBB5_9 + CMPQ BX, $63 + MOVQ R10, BX + JG LBB5_6 -LBB5_43: +LBB5_9: TESTB CX, CX - JE LBB5_69 - MOVQ R14, DX + JE LBB5_13 LONG $0x6f7ec1c4; WORD $0x0045 // vmovdqu (%r13), %ymm0 LONG $0xc864ddc5 // vpcmpgtb %ymm0, %ymm4, %ymm1 LONG $0xd574fdc5 // vpcmpeqb %ymm5, %ymm0, %ymm2 LONG $0xde74fdc5 // vpcmpeqb %ymm6, %ymm0, %ymm3 LONG $0xd2ebe5c5 // vpor %ymm2, %ymm3, %ymm2 LONG $0x647dc1c4; BYTE $0xc0 // vpcmpgtb %ymm8, %ymm0, %ymm0 - LONG $0xc0dbf5c5 // vpand %ymm0, %ymm1, %ymm0 + LONG $0xc1dbfdc5 // vpand %ymm1, %ymm0, %ymm0 LONG $0xc0ebedc5 // vpor %ymm0, %ymm2, %ymm0 LONG $0xc0d7fdc5 // vpmovmskb %ymm0, %eax MOVQ $4294967296, CX ORQ CX, AX - BSFQ AX, R14 + BSFQ AX, CX LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 LONG $0x16f9e3c4; WORD $0x01c0 // vpextrq $1, %xmm0, %rax - LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx - CMPQ R14, BX - JLE LBB5_45 - CMPQ BX, $16 - MOVQ $12884901889, R15 - JB LBB5_58 - MOVQ CX, 0(SI) + LONG $0x7ef9e1c4; BYTE $0xc3 // vmovq %xmm0, %rbx + CMPQ CX, R10 + JLE LBB5_20 + CMPQ R10, $16 + JB LBB5_23 + MOVQ BX, 0(SI) MOVQ AX, 8(SI) - LEAQ 16(R13), AX + LEAQ 16(R13), CX ADDQ $16, SI - LEAQ -16(BX), CX - MOVQ DX, R14 - CMPQ CX, $8 - JAE LBB5_61 - JMP LBB5_62 + LEAQ -16(R10), AX + CMPQ AX, $8 + JAE LBB5_24 + JMP LBB5_25 -LBB5_69: +LBB5_13: + MOVQ R15, DX WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ R15, $15 - SETGT DX - CMPQ BX, $16 - MOVQ R11, -56(BP) - MOVQ R14, -48(BP) - JL LBB5_70 - CMPQ R15, $16 - QUAD $0xfffffe103d6ffac5 // vmovdqu $-496(%rip), %xmm7 /* LCPI5_3(%rip) */ - QUAD $0xfffffe180d6f7ac5 // vmovdqu $-488(%rip), %xmm9 /* LCPI5_4(%rip) */ - QUAD $0xfffffe20156f7ac5 // vmovdqu $-480(%rip), %xmm10 /* LCPI5_5(%rip) */ + CMPQ AX, $15 + SETGT R15 + CMPQ R10, $16 + JL LBB5_30 + CMPQ AX, $16 + QUAD $0xfffffe323d6ffac5 // vmovdqu $-462(%rip), %xmm7 /* LCPI5_3(%rip) */ + QUAD $0xfffffe3a0d6f7ac5 // vmovdqu $-454(%rip), %xmm9 /* LCPI5_4(%rip) */ + QUAD $0xfffffe42156f7ac5 // vmovdqu $-446(%rip), %xmm10 /* LCPI5_5(%rip) */ LONG $0x762141c4; BYTE $0xdb // vpcmpeqd %xmm11, %xmm11, %xmm11 - JL LBB5_72 - MOVQ R9, CX - SUBQ R13, CX - QUAD $0xfffffd87256ffec5 // vmovdqu $-633(%rip), %ymm4 /* LCPI5_0(%rip) */ - QUAD $0xfffffd9f2d6ffec5 // vmovdqu $-609(%rip), %ymm5 /* LCPI5_1(%rip) */ - QUAD $0xfffffdb7356ffec5 // vmovdqu $-585(%rip), %ymm6 /* LCPI5_2(%rip) */ + JL LBB5_35 + QUAD $0xfffffdaf256ffec5 // vmovdqu $-593(%rip), %ymm4 /* LCPI5_0(%rip) */ + QUAD $0xfffffdc72d6ffec5 // vmovdqu $-569(%rip), %ymm5 /* LCPI5_1(%rip) */ + QUAD $0xfffffddf356ffec5 // vmovdqu $-545(%rip), %ymm6 /* LCPI5_2(%rip) */ LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 -LBB5_74: +LBB5_16: LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 LONG $0xc864c1c5 // vpcmpgtb %xmm0, %xmm7, %xmm1 LONG $0xd074b1c5 // vpcmpeqb %xmm0, %xmm9, %xmm2 @@ -1706,481 +1698,421 @@ LBB5_74: LONG $0xd2ebe1c5 // vpor %xmm2, %xmm3, %xmm2 LONG $0x067ffac5 // vmovdqu %xmm0, (%rsi) LONG $0x6479c1c4; BYTE $0xc3 // vpcmpgtb %xmm11, %xmm0, %xmm0 - LONG $0xc0dbf1c5 // vpand %xmm0, %xmm1, %xmm0 + LONG $0xc1dbf9c5 // vpand %xmm1, %xmm0, %xmm0 LONG $0xc0ebe9c5 // vpor %xmm0, %xmm2, %xmm0 - LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax - TESTW AX, AX - JNE LBB5_75 + LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx + TESTW CX, CX + JNE LBB5_22 ADDQ $16, R13 ADDQ $16, SI - LEAQ -16(R15), R11 - LEAQ -16(BX), R14 - CMPQ R15, $31 - SETGT DX - CMPQ R15, $32 - JL LBB5_78 - ADDQ $-16, CX - MOVQ R11, R15 - CMPQ BX, $31 - MOVQ R14, BX - JG LBB5_74 - JMP LBB5_78 + LEAQ -16(R10), CX + CMPQ AX, $31 + SETGT R15 + CMPQ AX, $32 + LEAQ -16(AX), AX + JL LBB5_31 + CMPQ R10, $31 + MOVQ CX, R10 + JG LBB5_16 + +LBB5_31: + TESTB R15, R15 + JE LBB5_36 -LBB5_14: - BSFL CX, R13 - ADDQ SI, R13 - MOVQ $12884901889, R15 - JMP LBB5_113 +LBB5_32: + LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 + LONG $0xc864c1c5 // vpcmpgtb %xmm0, %xmm7, %xmm1 + LONG $0xd074b1c5 // vpcmpeqb %xmm0, %xmm9, %xmm2 + LONG $0xd874a9c5 // vpcmpeqb %xmm0, %xmm10, %xmm3 + LONG $0xd2ebe1c5 // vpor %xmm2, %xmm3, %xmm2 + LONG $0x6479c1c4; BYTE $0xdb // vpcmpgtb %xmm11, %xmm0, %xmm3 + LONG $0xc9dbe1c5 // vpand %xmm1, %xmm3, %xmm1 + LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 + LONG $0xc1d7f9c5 // vpmovmskb %xmm1, %eax + ORL $65536, AX + BSFL AX, BX + LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax + CMPQ CX, BX + MOVQ DX, R15 + JGE LBB5_53 + CMPQ CX, $8 + JB LBB5_56 + MOVQ AX, 0(SI) + LEAQ 8(R13), AX + ADDQ $8, SI + LEAQ -8(CX), BX + JMP LBB5_57 -LBB5_45: - CMPL R14, $16 - MOVQ $12884901889, R15 - JB LBB5_46 - MOVQ CX, 0(SI) +LBB5_19: + SUBQ R11, R13 + BSFL CX, R10 + ADDQ R13, R10 + JMP LBB5_69 + +LBB5_20: + CMPL CX, $16 + JB LBB5_42 + MOVQ BX, 0(SI) MOVQ AX, 8(SI) - LEAQ 16(R13), AX + LEAQ 16(R13), BX ADDQ $16, SI - LEAQ -16(R14), BX - CMPQ BX, $8 - JAE LBB5_49 - JMP LBB5_50 + LEAQ -16(CX), AX + CMPQ AX, $8 + JAE LBB5_43 + JMP LBB5_44 -LBB5_75: - MOVWLZX AX, AX - BSFL AX, R13 - SUBQ CX, R13 - JMP LBB5_84 +LBB5_22: + MOVWLZX CX, AX + SUBQ R11, R13 + BSFL AX, R10 + ADDQ R13, R10 + MOVQ DX, R15 + JMP LBB5_69 -LBB5_58: - MOVQ R13, AX - MOVQ BX, CX - MOVQ DX, R14 - CMPQ CX, $8 - JB LBB5_62 +LBB5_23: + MOVQ R13, CX + MOVQ R10, AX + CMPQ AX, $8 + JB LBB5_25 -LBB5_61: - MOVQ 0(AX), DX +LBB5_24: + MOVQ 0(CX), DX MOVQ DX, 0(SI) - ADDQ $8, AX + ADDQ $8, CX ADDQ $8, SI - ADDQ $-8, CX + ADDQ $-8, AX -LBB5_62: - CMPQ CX, $4 - JAE LBB5_63 - CMPQ CX, $2 - JAE LBB5_65 +LBB5_25: + CMPQ AX, $4 + JB LBB5_26 + MOVL 0(CX), DX + MOVL DX, 0(SI) + ADDQ $4, CX + ADDQ $4, SI + ADDQ $-4, AX + CMPQ AX, $2 + JAE LBB5_50 -LBB5_66: - TESTQ CX, CX - JE LBB5_68 +LBB5_27: + TESTQ AX, AX + JE LBB5_29 -LBB5_67: - MOVB 0(AX), AX +LBB5_28: + MOVB 0(CX), AX MOVB AX, 0(SI) -LBB5_68: - NOTQ BX - ADDQ R9, BX - SUBQ R13, BX - MOVQ BX, R13 - JMP LBB5_113 - -LBB5_70: - MOVQ BX, R14 - MOVQ R15, R11 - QUAD $0xfffffc50256ffec5 // vmovdqu $-944(%rip), %ymm4 /* LCPI5_0(%rip) */ - QUAD $0xfffffc682d6ffec5 // vmovdqu $-920(%rip), %ymm5 /* LCPI5_1(%rip) */ - QUAD $0xfffffc80356ffec5 // vmovdqu $-896(%rip), %ymm6 /* LCPI5_2(%rip) */ - LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 - QUAD $0xfffffc933d6ffac5 // vmovdqu $-877(%rip), %xmm7 /* LCPI5_3(%rip) */ - QUAD $0xfffffc9b0d6f7ac5 // vmovdqu $-869(%rip), %xmm9 /* LCPI5_4(%rip) */ - QUAD $0xfffffca3156f7ac5 // vmovdqu $-861(%rip), %xmm10 /* LCPI5_5(%rip) */ - LONG $0x762141c4; BYTE $0xdb // vpcmpeqd %xmm11, %xmm11, %xmm11 +LBB5_29: + SUBQ R11, R10 + ADDQ R13, R10 + NOTQ R10 + JMP LBB5_69 -LBB5_78: - TESTB DX, DX - JE LBB5_79 +LBB5_26: + CMPQ AX, $2 + JB LBB5_27 -LBB5_85: - LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 - LONG $0xc864c1c5 // vpcmpgtb %xmm0, %xmm7, %xmm1 - LONG $0xd074b1c5 // vpcmpeqb %xmm0, %xmm9, %xmm2 - LONG $0xd874a9c5 // vpcmpeqb %xmm0, %xmm10, %xmm3 - LONG $0xd2ebe1c5 // vpor %xmm2, %xmm3, %xmm2 - LONG $0x6479c1c4; BYTE $0xdb // vpcmpgtb %xmm11, %xmm0, %xmm3 - LONG $0xcbdbf1c5 // vpand %xmm3, %xmm1, %xmm1 - LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 - LONG $0xc1d7f9c5 // vpmovmskb %xmm1, %eax - ORL $65536, AX - BSFL AX, AX - LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx - CMPQ R14, AX - JGE LBB5_86 - CMPQ R14, $8 - MOVQ -56(BP), R11 - MOVQ $12884901889, R15 - JB LBB5_97 - MOVQ CX, 0(SI) - LEAQ 8(R13), AX - ADDQ $8, SI - LEAQ -8(R14), CX - JMP LBB5_99 +LBB5_50: + MOVWLZX 0(CX), DX + MOVW DX, 0(SI) + ADDQ $2, CX + ADDQ $2, SI + ADDQ $-2, AX + TESTQ AX, AX + JNE LBB5_28 + JMP LBB5_29 -LBB5_72: - MOVQ BX, R14 - MOVQ R15, R11 - QUAD $0xfffffb9f256ffec5 // vmovdqu $-1121(%rip), %ymm4 /* LCPI5_0(%rip) */ - QUAD $0xfffffbb72d6ffec5 // vmovdqu $-1097(%rip), %ymm5 /* LCPI5_1(%rip) */ - QUAD $0xfffffbcf356ffec5 // vmovdqu $-1073(%rip), %ymm6 /* LCPI5_2(%rip) */ +LBB5_30: + MOVQ R10, CX + QUAD $0xfffffc0e256ffec5 // vmovdqu $-1010(%rip), %ymm4 /* LCPI5_0(%rip) */ + QUAD $0xfffffc262d6ffec5 // vmovdqu $-986(%rip), %ymm5 /* LCPI5_1(%rip) */ + QUAD $0xfffffc3e356ffec5 // vmovdqu $-962(%rip), %ymm6 /* LCPI5_2(%rip) */ LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 - TESTB DX, DX - JNE LBB5_85 + QUAD $0xfffffc513d6ffac5 // vmovdqu $-943(%rip), %xmm7 /* LCPI5_3(%rip) */ + QUAD $0xfffffc590d6f7ac5 // vmovdqu $-935(%rip), %xmm9 /* LCPI5_4(%rip) */ + QUAD $0xfffffc61156f7ac5 // vmovdqu $-927(%rip), %xmm10 /* LCPI5_5(%rip) */ + LONG $0x762141c4; BYTE $0xdb // vpcmpeqd %xmm11, %xmm11, %xmm11 + TESTB R15, R15 + JE LBB5_36 + JMP LBB5_32 + +LBB5_35: + MOVQ R10, CX + QUAD $0xfffffbc7256ffec5 // vmovdqu $-1081(%rip), %ymm4 /* LCPI5_0(%rip) */ + QUAD $0xfffffbdf2d6ffec5 // vmovdqu $-1057(%rip), %ymm5 /* LCPI5_1(%rip) */ + QUAD $0xfffffbf7356ffec5 // vmovdqu $-1033(%rip), %ymm6 /* LCPI5_2(%rip) */ + LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 + TESTB R15, R15 + JNE LBB5_32 -LBB5_79: - TESTQ R14, R14 - LONG $0x063d8d4c; WORD $0x00a5; BYTE $0x00 // leaq $42246(%rip), %r15 /* __SingleQuoteTab(%rip) */ - JLE LBB5_109 - TESTQ R11, R11 - JLE LBB5_109 - XORL CX, CX - XORL AX, AX +LBB5_36: + TESTQ CX, CX + MOVQ DX, R15 + LONG $0xf4158d4c; WORD $0x00a8; BYTE $0x00 // leaq $43252(%rip), %r10 /* __SingleQuoteTab(%rip) */ + JLE LBB5_41 + TESTQ AX, AX + JLE LBB5_41 -LBB5_82: - MOVBLZX 0(R13)(CX*1), DX - MOVQ DX, BX - SHLQ $4, BX - CMPQ 0(BX)(R15*1), $0 - JNE LBB5_83 - LEAQ 0(R11)(AX*1), BX - MOVB DX, 0(SI)(CX*1) - LEAQ -1(AX), DX - CMPQ BX, $2 - JL LBB5_108 - ADDQ R14, AX - ADDQ $1, CX - CMPQ AX, $1 - MOVQ DX, AX - JG LBB5_82 +LBB5_38: + MOVBLZX 0(R13), BX + MOVQ BX, DX + SHLQ $4, DX + CMPQ 0(DX)(R10*1), $0 + JNE LBB5_55 + INCQ R13 + MOVB BX, 0(SI) + CMPQ AX, $2 + LEAQ -1(AX), AX + JL LBB5_41 + INCQ SI + CMPQ CX, $1 + LEAQ -1(CX), CX + JG LBB5_38 -LBB5_108: - SUBQ DX, R13 - ADDQ DX, R11 +LBB5_41: + SUBQ R11, R13 + NEGQ AX + SBBQ R10, R10 + XORQ R13, R10 + JMP LBB5_69 -LBB5_109: - TESTQ R11, R11 - MOVQ -48(BP), R14 - MOVQ $12884901889, R15 - JE LBB5_110 - NOTQ R13 - ADDQ R9, R13 - JMP LBB5_112 +LBB5_42: + MOVQ R13, BX + MOVQ CX, AX + CMPQ AX, $8 + JB LBB5_44 + +LBB5_43: + MOVQ 0(BX), DX + MOVQ DX, 0(SI) + ADDQ $8, BX + ADDQ $8, SI + ADDQ $-8, AX + +LBB5_44: + CMPQ AX, $4 + JB LBB5_45 + MOVL 0(BX), DX + MOVL DX, 0(SI) + ADDQ $4, BX + ADDQ $4, SI + ADDQ $-4, AX + CMPQ AX, $2 + JAE LBB5_52 LBB5_46: - MOVQ R13, AX - MOVQ R14, BX - CMPQ BX, $8 - JB LBB5_50 + TESTQ AX, AX + JE LBB5_48 -LBB5_49: - MOVQ 0(AX), CX - MOVQ CX, 0(SI) - ADDQ $8, AX - ADDQ $8, SI - ADDQ $-8, BX +LBB5_47: + MOVB 0(BX), AX + MOVB AX, 0(SI) -LBB5_50: - CMPQ BX, $4 - JAE LBB5_51 - CMPQ BX, $2 - JAE LBB5_53 +LBB5_48: + SUBQ R11, R13 + ADDQ CX, R13 + JMP LBB5_68 -LBB5_54: - TESTQ BX, BX - JE LBB5_56 +LBB5_45: + CMPQ AX, $2 + JB LBB5_46 + +LBB5_52: + MOVWLZX 0(BX), DX + MOVW DX, 0(SI) + ADDQ $2, BX + ADDQ $2, SI + ADDQ $-2, AX + TESTQ AX, AX + JNE LBB5_47 + JMP LBB5_48 + +LBB5_53: + CMPL BX, $8 + JB LBB5_62 + MOVQ AX, 0(SI) + LEAQ 8(R13), R10 + ADDQ $8, SI + LEAQ -8(BX), AX + JMP LBB5_63 LBB5_55: - MOVB 0(AX), AX - MOVB AX, 0(SI) + SUBQ R11, R13 + JMP LBB5_68 LBB5_56: - SUBQ R9, R13 - ADDQ R14, R13 - MOVQ DX, R14 - JMP LBB5_113 + MOVQ R13, AX + MOVQ CX, BX -LBB5_63: +LBB5_57: + CMPQ BX, $4 + JB LBB5_58 MOVL 0(AX), DX MOVL DX, 0(SI) ADDQ $4, AX ADDQ $4, SI - ADDQ $-4, CX - CMPQ CX, $2 - JB LBB5_66 + ADDQ $-4, BX + CMPQ BX, $2 + JAE LBB5_83 -LBB5_65: - MOVWLZX 0(AX), DX - MOVW DX, 0(SI) - ADDQ $2, AX - ADDQ $2, SI - ADDQ $-2, CX - TESTQ CX, CX - JNE LBB5_67 - JMP LBB5_68 +LBB5_59: + TESTQ BX, BX + JE LBB5_61 -LBB5_51: - MOVL 0(AX), CX - MOVL CX, 0(SI) - ADDQ $4, AX - ADDQ $4, SI - ADDQ $-4, BX +LBB5_60: + MOVB 0(AX), AX + MOVB AX, 0(SI) + +LBB5_61: + SUBQ R11, CX + ADDQ R13, CX + NOTQ CX + MOVQ CX, R10 + JMP LBB5_69 + +LBB5_58: CMPQ BX, $2 - JB LBB5_54 + JB LBB5_59 -LBB5_53: - MOVWLZX 0(AX), CX - MOVW CX, 0(SI) +LBB5_83: + MOVWLZX 0(AX), DX + MOVW DX, 0(SI) ADDQ $2, AX ADDQ $2, SI ADDQ $-2, BX TESTQ BX, BX - JNE LBB5_55 - JMP LBB5_56 + JNE LBB5_60 + JMP LBB5_61 -LBB5_86: - CMPL AX, $8 - MOVQ -56(BP), R11 - MOVQ $12884901889, R15 - JB LBB5_87 - MOVQ CX, 0(SI) - LEAQ 8(R13), CX - ADDQ $8, SI - LEAQ -8(AX), BX - JMP LBB5_89 - -LBB5_110: - SUBQ R9, R13 - -LBB5_112: - MOVQ -56(BP), R11 - JMP LBB5_113 - -LBB5_83: - SUBQ R9, R13 - SUBQ AX, R13 - -LBB5_84: - MOVQ -56(BP), R11 - MOVQ -48(BP), R14 - MOVQ $12884901889, R15 - JMP LBB5_113 - -LBB5_97: - MOVQ R13, AX - MOVQ R14, CX +LBB5_62: + MOVQ R13, R10 + MOVQ BX, AX -LBB5_99: - CMPQ CX, $4 - JAE LBB5_100 - CMPQ CX, $2 - JAE LBB5_102 +LBB5_63: + CMPQ AX, $4 + JB LBB5_64 + MOVL 0(R10), CX + MOVL CX, 0(SI) + ADDQ $4, R10 + ADDQ $4, SI + ADDQ $-4, AX + CMPQ AX, $2 + JAE LBB5_85 -LBB5_103: - TESTQ CX, CX - JE LBB5_105 +LBB5_65: + TESTQ AX, AX + JE LBB5_67 -LBB5_104: - MOVB 0(AX), AX +LBB5_66: + MOVB 0(R10), AX MOVB AX, 0(SI) -LBB5_105: - NOTQ R14 - ADDQ R9, R14 - SUBQ R13, R14 - MOVQ R14, R13 - MOVQ -48(BP), R14 - JMP LBB5_113 - -LBB5_87: - MOVQ R13, CX - MOVQ AX, BX - -LBB5_89: - MOVQ -48(BP), R14 - CMPQ BX, $4 - JAE LBB5_90 - CMPQ BX, $2 - JAE LBB5_92 - -LBB5_93: - TESTQ BX, BX - JE LBB5_95 +LBB5_67: + SUBQ R11, R13 + ADDQ BX, R13 -LBB5_94: - MOVB 0(CX), CX - MOVB CX, 0(SI) +LBB5_68: + MOVQ R13, R10 -LBB5_95: - SUBQ R9, R13 - ADDQ AX, R13 +LBB5_69: + TESTQ R10, R10 + MOVQ -48(BP), DX + MOVQ $12884901889, R13 + JS LBB5_122 + ADDQ R10, R11 + ADDQ R10, R12 + CMPQ R14, R10 + JE LBB5_118 + SUBQ R10, R9 + SUBQ R14, R10 + JMP LBB5_73 -LBB5_113: - TESTQ R13, R13 - JS LBB5_114 - ADDQ R13, R12 - CMPQ R14, R13 - JE LBB5_129 - SUBQ R13, R10 - JMP LBB5_117 - -LBB5_127: +LBB5_72: + INCQ R11 ADDQ AX, R12 - ADDQ $1, R13 - CMPQ R14, R13 - JE LBB5_129 + INCQ R10 + JE LBB5_118 -LBB5_117: - MOVBLZX 0(R9)(R13*1), DX - SHLQ $4, DX - MOVQ 0(R8)(DX*1), SI - TESTL SI, SI - JE LBB5_128 - MOVLQSX SI, AX - SUBQ AX, R10 +LBB5_73: + MOVBLZX 0(R11), SI + SHLQ $4, SI + MOVQ 0(R8)(SI*1), BX + TESTL BX, BX + JE LBB5_81 + MOVLQSX BX, AX + SUBQ AX, R9 JL LBB5_119 - SHLQ $32, SI - LEAQ 0(R8)(DX*1), CX - ADDQ $8, CX - CMPQ SI, R15 - JL LBB5_121 + SHLQ $32, BX + LEAQ 8(R8)(SI*1), CX + CMPQ BX, R13 + JL LBB5_77 MOVL 0(CX), CX MOVL CX, 0(R12) - LEAQ 0(R8)(DX*1), CX - ADDQ $12, CX - LEAQ 4(R12), DX - LEAQ -4(AX), SI - CMPQ SI, $2 - JGE LBB5_124 - JMP LBB5_125 + LEAQ 12(R8)(SI*1), CX + LEAQ 4(R12), R14 + LEAQ -4(AX), BX + CMPQ BX, $2 + JGE LBB5_78 + JMP LBB5_79 -LBB5_121: - MOVQ R12, DX - MOVQ AX, SI - CMPQ SI, $2 - JL LBB5_125 +LBB5_77: + MOVQ R12, R14 + MOVQ AX, BX + CMPQ BX, $2 + JL LBB5_79 -LBB5_124: - MOVWLZX 0(CX), BX - MOVW BX, 0(DX) +LBB5_78: + MOVWLZX 0(CX), SI + MOVW SI, 0(R14) ADDQ $2, CX - ADDQ $2, DX - ADDQ $-2, SI + ADDQ $2, R14 + ADDQ $-2, BX -LBB5_125: - TESTQ SI, SI - JLE LBB5_127 +LBB5_79: + TESTQ BX, BX + JLE LBB5_72 MOVBLZX 0(CX), CX - MOVB CX, 0(DX) - JMP LBB5_127 + MOVB CX, 0(R14) + JMP LBB5_72 -LBB5_128: - LEAQ 0(R9)(R13*1), AX - SUBQ R13, R14 - JNE LBB5_10 - JMP LBB5_129 +LBB5_81: + MOVQ R10, R14 + NEGQ R14 + TESTQ R10, R10 + JNE LBB5_3 + JMP LBB5_118 -LBB5_100: - MOVL 0(AX), DX - MOVL DX, 0(SI) - ADDQ $4, AX - ADDQ $4, SI - ADDQ $-4, CX - CMPQ CX, $2 - JB LBB5_103 +LBB5_64: + CMPQ AX, $2 + JB LBB5_65 -LBB5_102: - MOVWLZX 0(AX), DX - MOVW DX, 0(SI) - ADDQ $2, AX +LBB5_85: + MOVWLZX 0(R10), CX + MOVW CX, 0(SI) + ADDQ $2, R10 ADDQ $2, SI - ADDQ $-2, CX - TESTQ CX, CX - JNE LBB5_104 - JMP LBB5_105 - -LBB5_90: - MOVL 0(CX), DX - MOVL DX, 0(SI) - ADDQ $4, CX - ADDQ $4, SI - ADDQ $-4, BX - CMPQ BX, $2 - JB LBB5_93 + ADDQ $-2, AX + TESTQ AX, AX + JNE LBB5_66 + JMP LBB5_67 -LBB5_92: - MOVWLZX 0(CX), DX - MOVW DX, 0(SI) - ADDQ $2, CX - ADDQ $2, SI - ADDQ $-2, BX - TESTQ BX, BX - JNE LBB5_94 - JMP LBB5_95 - -LBB5_1: - LONG $0x160d8d4c; WORD $0x00c2; BYTE $0x00 // leaq $49686(%rip), %r9 /* __EscTab(%rip) */ - QUAD $0xfffff880156f7ec5 // vmovdqu $-1920(%rip), %ymm10 /* LCPI5_0(%rip) */ - QUAD $0xfffff8980d6f7ec5 // vmovdqu $-1896(%rip), %ymm9 /* LCPI5_1(%rip) */ - QUAD $0xfffff8b01d6f7ec5 // vmovdqu $-1872(%rip), %ymm11 /* LCPI5_2(%rip) */ +LBB5_86: + LONG $0x900d8d4c; WORD $0x00c6; BYTE $0x00 // leaq $50832(%rip), %r9 /* __EscTab(%rip) */ + QUAD $0xfffff930156f7ec5 // vmovdqu $-1744(%rip), %ymm10 /* LCPI5_0(%rip) */ + QUAD $0xfffff9480d6f7ec5 // vmovdqu $-1720(%rip), %ymm9 /* LCPI5_1(%rip) */ + QUAD $0xfffff9601d6f7ec5 // vmovdqu $-1696(%rip), %ymm11 /* LCPI5_2(%rip) */ LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 MOVQ DX, BX MOVQ R14, R10 -LBB5_2: +LBB5_87: CMPQ R10, $16 - JL LBB5_23 - CMPQ R10, $32 - JL LBB5_15 - LEAQ 32(R10), CX - XORL AX, AX - -LBB5_5: - LONG $0x046ffec5; BYTE $0x07 // vmovdqu (%rdi,%rax), %ymm0 - LONG $0xc864adc5 // vpcmpgtb %ymm0, %ymm10, %ymm1 - LONG $0xd074b5c5 // vpcmpeqb %ymm0, %ymm9, %ymm2 - LONG $0xd874a5c5 // vpcmpeqb %ymm0, %ymm11, %ymm3 - LONG $0xd2ebe5c5 // vpor %ymm2, %ymm3, %ymm2 - LONG $0x047ffec5; BYTE $0x03 // vmovdqu %ymm0, (%rbx,%rax) - LONG $0x647dc1c4; BYTE $0xc0 // vpcmpgtb %ymm8, %ymm0, %ymm0 - LONG $0xc0dbf5c5 // vpand %ymm0, %ymm1, %ymm0 - LONG $0xc0ebedc5 // vpor %ymm0, %ymm2, %ymm0 - LONG $0xf0d7fdc5 // vpmovmskb %ymm0, %esi - TESTL SI, SI - JNE LBB5_6 - ADDQ $32, AX - ADDQ $-32, CX - CMPQ CX, $63 - JG LBB5_5 - WORD $0xf8c5; BYTE $0x77 // vzeroupper - LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 - QUAD $0xfffff83a1d6f7ec5 // vmovdqu $-1990(%rip), %ymm11 /* LCPI5_2(%rip) */ - QUAD $0xfffff8120d6f7ec5 // vmovdqu $-2030(%rip), %ymm9 /* LCPI5_1(%rip) */ - QUAD $0xfffff7ea156f7ec5 // vmovdqu $-2070(%rip), %ymm10 /* LCPI5_0(%rip) */ - ADDQ AX, DI - SUBQ AX, R10 - ADDQ AX, BX - CMPQ CX, $48 - QUAD $0xfffff8353d6ffac5 // vmovdqu $-1995(%rip), %xmm7 /* LCPI5_3(%rip) */ - QUAD $0xfffff83d2d6ffac5 // vmovdqu $-1987(%rip), %xmm5 /* LCPI5_4(%rip) */ - QUAD $0xfffff845256ffac5 // vmovdqu $-1979(%rip), %xmm4 /* LCPI5_5(%rip) */ - LONG $0xf676c9c5 // vpcmpeqd %xmm6, %xmm6, %xmm6 - JGE LBB5_18 + JGE LBB5_88 -LBB5_23: +LBB5_93: CMPQ R10, $8 - JL LBB5_33 + JL LBB5_97 -LBB5_24: +LBB5_94: MOVBLZX 0(DI), AX MOVBLZX 0(AX)(R9*1), AX MOVBLZX 1(DI), CX @@ -2198,7 +2130,7 @@ LBB5_24: MOVQ 0(DI), CX MOVQ CX, 0(BX) TESTB AX, AX - JNE LBB5_25 + JNE LBB5_115 MOVBLZX 4(DI), AX MOVBLZX 0(AX)(R9*1), AX MOVBLZX 5(DI), CX @@ -2214,14 +2146,14 @@ LBB5_24: ORL SI, AX ORL CX, AX TESTB AX, AX - JNE LBB5_31 + JNE LBB5_116 ADDQ $8, BX ADDQ $8, DI ADDQ $-8, R10 -LBB5_33: +LBB5_97: CMPQ R10, $4 - JL LBB5_36 + JL LBB5_100 MOVBLZX 0(DI), AX MOVBLZX 0(AX)(R9*1), AX MOVBLZX 1(DI), CX @@ -2239,44 +2171,82 @@ LBB5_33: MOVL 0(DI), CX MOVL CX, 0(BX) TESTB AX, AX - JNE LBB5_25 + JNE LBB5_115 ADDQ $4, BX ADDQ $4, DI ADDQ $-4, R10 -LBB5_36: +LBB5_100: TESTQ R10, R10 - JLE LBB5_39 + JLE LBB5_117 -LBB5_37: +LBB5_101: MOVBLZX 0(DI), AX CMPB 0(AX)(R9*1), $0 - JNE LBB5_27 - ADDQ $1, DI + JNE LBB5_112 + INCQ DI MOVB AX, 0(BX) - ADDQ $1, BX - LEAQ -1(R10), AX + INCQ BX CMPQ R10, $1 - MOVQ AX, R10 - JG LBB5_37 - JMP LBB5_39 + LEAQ -1(R10), R10 + JG LBB5_101 + JMP LBB5_117 + +LBB5_88: + CMPQ R10, $32 + JL LBB5_103 + LEAQ 32(R10), CX + XORL AX, AX + +LBB5_90: + LONG $0x046ffec5; BYTE $0x07 // vmovdqu (%rdi,%rax), %ymm0 + LONG $0xc864adc5 // vpcmpgtb %ymm0, %ymm10, %ymm1 + LONG $0xd074b5c5 // vpcmpeqb %ymm0, %ymm9, %ymm2 + LONG $0xd874a5c5 // vpcmpeqb %ymm0, %ymm11, %ymm3 + LONG $0xd2ebe5c5 // vpor %ymm2, %ymm3, %ymm2 + LONG $0x047ffec5; BYTE $0x03 // vmovdqu %ymm0, (%rbx,%rax) + LONG $0x647dc1c4; BYTE $0xc0 // vpcmpgtb %ymm8, %ymm0, %ymm0 + LONG $0xc1dbfdc5 // vpand %ymm1, %ymm0, %ymm0 + LONG $0xc0ebedc5 // vpor %ymm0, %ymm2, %ymm0 + LONG $0xf0d7fdc5 // vpmovmskb %ymm0, %esi + TESTL SI, SI + JNE LBB5_109 + ADDQ $32, AX + ADDQ $-32, CX + CMPQ CX, $63 + JG LBB5_90 + WORD $0xf8c5; BYTE $0x77 // vzeroupper + LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 + QUAD $0xfffff7de1d6f7ec5 // vmovdqu $-2082(%rip), %ymm11 /* LCPI5_2(%rip) */ + QUAD $0xfffff7b60d6f7ec5 // vmovdqu $-2122(%rip), %ymm9 /* LCPI5_1(%rip) */ + QUAD $0xfffff78e156f7ec5 // vmovdqu $-2162(%rip), %ymm10 /* LCPI5_0(%rip) */ + ADDQ AX, DI + SUBQ AX, R10 + ADDQ AX, BX + CMPQ CX, $48 + QUAD $0xfffff7d93d6ffac5 // vmovdqu $-2087(%rip), %xmm7 /* LCPI5_3(%rip) */ + QUAD $0xfffff7e12d6ffac5 // vmovdqu $-2079(%rip), %xmm5 /* LCPI5_4(%rip) */ + QUAD $0xfffff7e9256ffac5 // vmovdqu $-2071(%rip), %xmm4 /* LCPI5_5(%rip) */ + LONG $0xf676c9c5 // vpcmpeqd %xmm6, %xmm6, %xmm6 + JGE LBB5_104 + JMP LBB5_93 -LBB5_15: +LBB5_103: WORD $0xf8c5; BYTE $0x77 // vzeroupper LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 - QUAD $0xfffff6da1d6f7ec5 // vmovdqu $-2342(%rip), %ymm11 /* LCPI5_2(%rip) */ - QUAD $0xfffff6b20d6f7ec5 // vmovdqu $-2382(%rip), %ymm9 /* LCPI5_1(%rip) */ - QUAD $0xfffff68a156f7ec5 // vmovdqu $-2422(%rip), %ymm10 /* LCPI5_0(%rip) */ - QUAD $0xfffff6e23d6ffac5 // vmovdqu $-2334(%rip), %xmm7 /* LCPI5_3(%rip) */ - QUAD $0xfffff6ea2d6ffac5 // vmovdqu $-2326(%rip), %xmm5 /* LCPI5_4(%rip) */ - QUAD $0xfffff6f2256ffac5 // vmovdqu $-2318(%rip), %xmm4 /* LCPI5_5(%rip) */ + QUAD $0xfffff78e1d6f7ec5 // vmovdqu $-2162(%rip), %ymm11 /* LCPI5_2(%rip) */ + QUAD $0xfffff7660d6f7ec5 // vmovdqu $-2202(%rip), %ymm9 /* LCPI5_1(%rip) */ + QUAD $0xfffff73e156f7ec5 // vmovdqu $-2242(%rip), %ymm10 /* LCPI5_0(%rip) */ + QUAD $0xfffff7963d6ffac5 // vmovdqu $-2154(%rip), %xmm7 /* LCPI5_3(%rip) */ + QUAD $0xfffff79e2d6ffac5 // vmovdqu $-2146(%rip), %xmm5 /* LCPI5_4(%rip) */ + QUAD $0xfffff7a6256ffac5 // vmovdqu $-2138(%rip), %xmm4 /* LCPI5_5(%rip) */ LONG $0xf676c9c5 // vpcmpeqd %xmm6, %xmm6, %xmm6 -LBB5_18: +LBB5_104: MOVL $16, CX XORL AX, AX -LBB5_19: +LBB5_105: LONG $0x046ffac5; BYTE $0x07 // vmovdqu (%rdi,%rax), %xmm0 LONG $0xc864c1c5 // vpcmpgtb %xmm0, %xmm7, %xmm1 LONG $0xd574f9c5 // vpcmpeqb %xmm5, %xmm0, %xmm2 @@ -2284,46 +2254,45 @@ LBB5_19: LONG $0xd2ebe1c5 // vpor %xmm2, %xmm3, %xmm2 LONG $0x047ffac5; BYTE $0x03 // vmovdqu %xmm0, (%rbx,%rax) LONG $0xc664f9c5 // vpcmpgtb %xmm6, %xmm0, %xmm0 - LONG $0xc0dbf1c5 // vpand %xmm0, %xmm1, %xmm0 + LONG $0xc1dbf9c5 // vpand %xmm1, %xmm0, %xmm0 LONG $0xc0ebe9c5 // vpor %xmm0, %xmm2, %xmm0 LONG $0xf0d7f9c5 // vpmovmskb %xmm0, %esi TESTW SI, SI - JNE LBB5_20 + JNE LBB5_108 ADDQ $16, AX - LEAQ 0(R10)(CX*1), SI - ADDQ $-16, SI + LEAQ -16(R10)(CX*1), SI ADDQ $-16, CX CMPQ SI, $31 - JG LBB5_19 + JG LBB5_105 ADDQ AX, DI SUBQ AX, R10 ADDQ AX, BX CMPQ R10, $8 - JGE LBB5_24 - JMP LBB5_33 + JGE LBB5_94 + JMP LBB5_97 -LBB5_20: +LBB5_108: MOVWLZX SI, CX BSFL CX, CX - JMP LBB5_7 + JMP LBB5_110 -LBB5_6: +LBB5_109: BSFL SI, CX -LBB5_7: +LBB5_110: ADDQ CX, DI ADDQ AX, DI SUBQ CX, R10 SUBQ AX, R10 ADDQ CX, BX -LBB5_26: +LBB5_111: ADDQ AX, BX -LBB5_27: +LBB5_112: MOVB 0(DI), AX -LBB5_28: +LBB5_113: MOVQ BX, CX MOVBLZX AX, AX SHLQ $4, AX @@ -2332,57 +2301,51 @@ LBB5_28: MOVQ AX, 0(CX) ADDQ CX, BX CMPQ R10, $2 - JL LBB5_39 - ADDQ $-1, R10 + JL LBB5_117 + DECQ R10 MOVBLZX 1(DI), AX - ADDQ $1, DI + INCQ DI CMPB 0(AX)(R9*1), $0 - JNE LBB5_28 - JMP LBB5_2 + JNE LBB5_113 + JMP LBB5_87 -LBB5_25: +LBB5_115: BSFL AX, AX ADDQ AX, DI SUBQ AX, R10 - JMP LBB5_26 + JMP LBB5_111 -LBB5_31: +LBB5_116: BSFL AX, AX LEAQ 4(AX), CX - ADDQ AX, DI - ADDQ $4, DI + LEAQ 4(DI)(AX*1), DI SUBQ CX, R10 - ADDQ AX, BX - ADDQ $4, BX - JMP LBB5_27 + LEAQ 4(BX)(AX*1), BX + JMP LBB5_112 -LBB5_39: +LBB5_117: SUBQ DX, BX - MOVQ BX, 0(R11) - JMP LBB5_131 - -LBB5_129: - ADDQ R13, R9 - MOVQ -64(BP), DX + MOVQ BX, 0(R15) + JMP LBB5_121 -LBB5_130: +LBB5_118: SUBQ DX, R12 - MOVQ R12, 0(R11) - SUBQ DI, R9 - MOVQ R9, R14 - JMP LBB5_131 + MOVQ R12, 0(R15) + SUBQ DI, R11 + JMP LBB5_120 LBB5_119: - SUBQ -64(BP), R12 - MOVQ R12, 0(R11) - SUBQ R9, DI - NOTQ R13 - ADDQ DI, R13 - MOVQ R13, R14 + SUBQ DX, R12 + MOVQ R12, 0(R15) + NOTQ R11 + ADDQ DI, R11 + +LBB5_120: + MOVQ R11, R14 -LBB5_131: +LBB5_121: MOVQ R14, AX - ADDQ $24, SP + ADDQ $8, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -2392,16 +2355,15 @@ LBB5_131: WORD $0xf8c5; BYTE $0x77 // vzeroupper RET -LBB5_114: - SUBQ -64(BP), R12 - MOVQ R13, AX - NOTQ AX - ADDQ R12, AX - MOVQ AX, 0(R11) - SUBQ R9, DI - ADDQ R13, DI - MOVQ DI, R14 - JMP LBB5_131 +LBB5_122: + SUBQ DX, R12 + NOTQ R10 + ADDQ R10, R12 + MOVQ R12, 0(R15) + SUBQ DI, R11 + ADDQ R10, R11 + NOTQ R11 + JMP LBB5_120 LCPI6_0: QUAD $0x5c5c5c5c5c5c5c5c; QUAD $0x5c5c5c5c5c5c5c5c // .space 16, '\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' @@ -2420,717 +2382,693 @@ _unquote: BYTE $0x53 // pushq %rbx SUBQ $24, SP TESTQ SI, SI - JE LBB6_86 + JE LBB6_2 MOVQ CX, -48(BP) - MOVQ R8, AX - MOVQ R8, -64(BP) MOVL R8, R10 ANDL $1, R10 - QUAD $0xffffff9c0d6ffec5 // vmovdqu $-100(%rip), %ymm1 /* LCPI6_0(%rip) */ - QUAD $0xffffffb4156ffac5 // vmovdqu $-76(%rip), %xmm2 /* LCPI6_1(%rip) */ + QUAD $0xffffffa70d6ffec5 // vmovdqu $-89(%rip), %ymm1 /* LCPI6_0(%rip) */ + QUAD $0xffffffbf156ffac5 // vmovdqu $-65(%rip), %xmm2 /* LCPI6_1(%rip) */ MOVQ DI, R9 - MOVQ SI, R12 - MOVQ DX, R8 - JMP LBB6_17 + MOVQ SI, R13 + MOVQ DX, AX + JMP LBB6_8 LBB6_2: - CMPL R15, $2048 - JB LBB6_77 - MOVL R15, AX - ANDL $-2048, AX - CMPL AX, $55296 - JNE LBB6_64 - CMPL R15, $56319 - JA LBB6_15 - CMPQ R14, $6 - JL LBB6_15 - CMPB 4(R9)(BX*1), $92 - JNE LBB6_15 - CMPB 5(R9)(BX*1), $117 - JNE LBB6_15 - MOVL 6(R9)(BX*1), DX - MOVL DX, R11 - NOTL R11 - LEAL -808464432(DX), AX - ANDL $-2139062144, R11 - TESTL AX, R11 - JNE LBB6_102 - LEAL 421075225(DX), AX - ORL DX, AX - TESTL $-2139062144, AX - JNE LBB6_102 - MOVL DX, CX - ANDL $2139062143, CX - MOVL $-1061109568, AX - SUBL CX, AX - MOVL AX, -52(BP) - LEAL 1179010630(CX), AX - MOVL AX, -56(BP) - MOVL -52(BP), AX - ANDL R11, AX - TESTL AX, -56(BP) - JNE LBB6_102 - MOVL $-522133280, AX - SUBL CX, AX - ADDL $960051513, CX - ANDL AX, R11 - TESTL CX, R11 - JNE LBB6_102 - BSWAPL DX - MOVL DX, AX - SHRL $4, AX - NOTL AX - ANDL $16843009, AX - LEAL 0(AX)(AX*8), AX - ANDL $252645135, DX - ADDL AX, DX - MOVL DX, AX - SHRL $4, AX - ORL DX, AX - MOVL AX, CX - SHRL $8, CX - ANDL $65280, CX - MOVBLZX AX, R11 - ORL CX, R11 - ANDL $16515072, AX - CMPL AX, $14417920 - JE LBB6_16 - TESTB $2, -64(BP) - JE LBB6_117 - MOVQ R13, DX - ADDQ $-6, R14 - MOVW $-16401, 0(R8) - MOVB $-67, 2(R8) - ADDQ $3, R8 - ADDQ $6, BX - MOVL R11, R15 - CMPL R11, $127 - JA LBB6_2 - JMP LBB6_63 + XORL R13, R13 + MOVQ DX, AX -LBB6_15: - ADDQ BX, R9 - ADDQ $4, R9 - JMP LBB6_79 +LBB6_3: + ADDQ R13, AX + SUBQ DX, AX + +LBB6_4: + ADDQ $24, SP + BYTE $0x5b // popq %rbx + WORD $0x5c41 // popq %r12 + WORD $0x5d41 // popq %r13 + WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 + BYTE $0x5d // popq %rbp + WORD $0xf8c5; BYTE $0x77 // vzeroupper + RET + +LBB6_5: + LEAQ 4(R9)(R12*1), R9 + MOVL R15, CX + SHRL $6, CX + ORB $-64, CX + MOVB CX, 0(AX) + ANDB $63, R15 + ORB $-128, R15 + MOVB R15, 1(AX) + ADDQ $2, AX -LBB6_16: - ADDQ BX, R9 - ADDQ $10, R9 - SUBQ BX, R12 - ADDQ $-10, R12 - JMP LBB6_85 +LBB6_6: + MOVQ R14, R13 -LBB6_17: +LBB6_7: + TESTQ R13, R13 + JE LBB6_101 + +LBB6_8: CMPB 0(R9), $92 - JNE LBB6_19 - XORL R14, R14 - JMP LBB6_36 + JNE LBB6_10 + XORL BX, BX + JMP LBB6_24 -LBB6_19: - MOVQ R12, R15 - MOVQ R8, AX +LBB6_10: + MOVQ R13, R12 + MOVQ AX, R15 MOVQ R9, R14 - CMPQ R12, $32 - JL LBB6_24 - XORL AX, AX - MOVQ R12, CX + CMPQ R13, $32 + JL LBB6_14 + MOVQ R9, R14 + MOVQ AX, R15 + MOVQ R13, R12 -LBB6_21: - LONG $0x6f7ec1c4; WORD $0x0104 // vmovdqu (%r9,%rax), %ymm0 - LONG $0x7f7ec1c4; WORD $0x0004 // vmovdqu %ymm0, (%r8,%rax) - LONG $0xc174fdc5 // vpcmpeqb %ymm1, %ymm0, %ymm0 - LONG $0xd8d7fdc5 // vpmovmskb %ymm0, %ebx +LBB6_12: + LONG $0x6f7ec1c4; BYTE $0x06 // vmovdqu (%r14), %ymm0 + LONG $0x7f7ec1c4; BYTE $0x07 // vmovdqu %ymm0, (%r15) + LONG $0xc174fdc5 // vpcmpeqb %ymm1, %ymm0, %ymm0 + LONG $0xd8d7fdc5 // vpmovmskb %ymm0, %ebx TESTL BX, BX - JNE LBB6_34 - LEAQ -32(CX), R15 - ADDQ $32, AX - CMPQ CX, $63 - MOVQ R15, CX - JG LBB6_21 - LEAQ 0(R9)(AX*1), R14 - ADDQ R8, AX - -LBB6_24: + JNE LBB6_22 + ADDQ $32, R14 + ADDQ $32, R15 + CMPQ R12, $63 + LEAQ -32(R12), R12 + JG LBB6_12 + +LBB6_14: WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ R15, $16 - JL LBB6_28 - MOVQ R9, R13 - SUBQ R14, R13 - QUAD $0xfffffdc00d6ffec5 // vmovdqu $-576(%rip), %ymm1 /* LCPI6_0(%rip) */ - QUAD $0xfffffdd8156ffac5 // vmovdqu $-552(%rip), %xmm2 /* LCPI6_1(%rip) */ + QUAD $0xffffff0d156ffac5 // vmovdqu $-243(%rip), %xmm2 /* LCPI6_1(%rip) */ + CMPQ R12, $16 + JL LBB6_17 -LBB6_26: +LBB6_15: LONG $0x6f7ac1c4; BYTE $0x06 // vmovdqu (%r14), %xmm0 - LONG $0x007ffac5 // vmovdqu %xmm0, (%rax) + LONG $0x7f7ac1c4; BYTE $0x07 // vmovdqu %xmm0, (%r15) LONG $0xc274f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm0 - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - TESTW CX, CX - JNE LBB6_35 + LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx + TESTW BX, BX + JNE LBB6_23 ADDQ $16, R14 - ADDQ $16, AX - LEAQ -16(R15), R11 - ADDQ $-16, R13 - CMPQ R15, $31 - MOVQ R11, R15 - JG LBB6_26 - JMP LBB6_29 - -LBB6_28: - MOVQ R15, R11 - QUAD $0xfffffd7c0d6ffec5 // vmovdqu $-644(%rip), %ymm1 /* LCPI6_0(%rip) */ - QUAD $0xfffffd94156ffac5 // vmovdqu $-620(%rip), %xmm2 /* LCPI6_1(%rip) */ + ADDQ $16, R15 + CMPQ R12, $31 + LEAQ -16(R12), R12 + JG LBB6_15 -LBB6_29: - TESTQ R11, R11 - JE LBB6_87 - XORL CX, CX +LBB6_17: + TESTQ R12, R12 + JE LBB6_3 + XORL BX, BX + QUAD $0xfffffeaa0d6ffec5 // vmovdqu $-342(%rip), %ymm1 /* LCPI6_0(%rip) */ -LBB6_31: - MOVBLZX 0(R14)(CX*1), BX - CMPB BX, $92 - JE LBB6_33 - MOVB BX, 0(AX)(CX*1) - ADDQ $1, CX - CMPQ R11, CX - JNE LBB6_31 - JMP LBB6_87 - -LBB6_33: - SUBQ R9, R14 - ADDQ CX, R14 - CMPQ R14, $-1 - JNE LBB6_36 - JMP LBB6_87 +LBB6_19: + MOVBLZX 0(R14)(BX*1), R11 + CMPB R11, $92 + JE LBB6_21 + MOVB R11, 0(R15)(BX*1) + INCQ BX + CMPQ R12, BX + JNE LBB6_19 + JMP LBB6_3 -LBB6_34: - MOVLQSX BX, CX - BSFQ CX, R14 - ADDQ AX, R14 - CMPQ R14, $-1 - JNE LBB6_36 - JMP LBB6_87 +LBB6_21: + ADDQ BX, R14 + SUBQ R9, R14 + MOVQ R14, BX + CMPQ BX, $-1 + JNE LBB6_24 + JMP LBB6_3 -LBB6_35: - MOVWLZX CX, AX - BSFQ AX, R14 - SUBQ R13, R14 - CMPQ R14, $-1 - JE LBB6_87 +LBB6_22: + MOVLQSX BX, BX + SUBQ R9, R14 + BSFQ BX, BX + ADDQ R14, BX + CMPQ BX, $-1 + JNE LBB6_24 + JMP LBB6_3 + +LBB6_23: + MOVWLZX BX, BX + SUBQ R9, R14 + BSFQ BX, BX + ADDQ R14, BX + QUAD $0xfffffe4d0d6ffec5 // vmovdqu $-435(%rip), %ymm1 /* LCPI6_0(%rip) */ + CMPQ BX, $-1 + JE LBB6_3 -LBB6_36: - LEAQ 2(R14), AX - SUBQ AX, R12 - JS LBB6_116 - ADDQ R14, R9 - ADDQ $2, R9 +LBB6_24: + LEAQ 2(BX), CX + SUBQ CX, R13 + JS LBB6_99 + LEAQ 2(R9)(BX*1), R9 TESTQ R10, R10 JNE LBB6_67 -LBB6_38: - ADDQ R14, R8 - MOVBLZX -1(R9), AX - LONG $0xa50d8d48; WORD $0x00bc; BYTE $0x00 // leaq $48293(%rip), %rcx /* __UnquoteTab(%rip) */ - MOVB 0(AX)(CX*1), CX - CMPB CX, $-1 - JE LBB6_41 - TESTB CX, CX - JE LBB6_99 - MOVB CX, 0(R8) - ADDQ $1, R8 - TESTQ R12, R12 - JNE LBB6_17 - JMP LBB6_98 +LBB6_26: + ADDQ BX, AX + MOVBLZX -1(R9), CX + LONG $0x6f1d8d48; WORD $0x00c2; BYTE $0x00 // leaq $49775(%rip), %rbx /* __UnquoteTab(%rip) */ + MOVB 0(CX)(BX*1), BX + CMPB BX, $-1 + JE LBB6_29 + TESTB BX, BX + JE LBB6_100 + MOVB BX, 0(AX) + INCQ AX + JMP LBB6_7 -LBB6_41: - CMPQ R12, $3 - JLE LBB6_116 - MOVL 0(R9), R11 - MOVL R11, CX - NOTL CX - LEAL -808464432(R11), AX - ANDL $-2139062144, CX - TESTL AX, CX - JNE LBB6_89 - LEAL 421075225(R11), AX - ORL R11, AX - TESTL $-2139062144, AX - JNE LBB6_89 - MOVL R11, BX +LBB6_29: + CMPQ R13, $3 + JLE LBB6_99 + MOVL 0(R9), R14 + MOVL R14, R15 + NOTL R15 + LEAL -808464432(R14), CX + ANDL $-2139062144, R15 + TESTL CX, R15 + JNE LBB6_90 + LEAL 421075225(R14), CX + ORL R14, CX + TESTL $-2139062144, CX + JNE LBB6_90 + MOVL R14, BX ANDL $2139062143, BX - MOVL $-1061109568, AX - SUBL BX, AX - MOVQ DX, R13 - LEAL 1179010630(BX), DX - ANDL CX, AX - TESTL DX, AX - JNE LBB6_89 - MOVL $-522133280, AX - SUBL BX, AX + MOVL $-1061109568, CX + SUBL BX, CX + LEAL 1179010630(BX), R11 + ANDL R15, CX + TESTL R11, CX + JNE LBB6_90 + MOVL $-522133280, CX + SUBL BX, CX ADDL $960051513, BX - ANDL AX, CX - TESTL BX, CX - JNE LBB6_89 - MOVQ R13, DX - BSWAPL R11 - MOVL R11, AX - SHRL $4, AX - NOTL AX - ANDL $16843009, AX - LEAL 0(AX)(AX*8), AX - ANDL $252645135, R11 - ADDL AX, R11 - MOVL R11, AX - SHRL $4, AX - ORL R11, AX - MOVBLZX AX, R15 - SHRL $8, AX - ANDL $65280, AX - ORL AX, R15 - LEAQ -4(R12), R14 + ANDL CX, R15 + TESTL BX, R15 + JNE LBB6_90 + BSWAPL R14 + MOVL R14, CX + SHRL $4, CX + NOTL CX + ANDL $16843009, CX + LEAL 0(CX)(CX*8), CX + ANDL $252645135, R14 + ADDL CX, R14 + MOVL R14, CX + SHRL $4, CX + ORL R14, CX + MOVBLZX CX, R15 + SHRL $8, CX + ANDL $65280, CX + ORL CX, R15 + LEAQ -4(R13), R14 CMPL R15, $128 JB LBB6_75 - XORL BX, BX + XORL R12, R12 TESTQ R10, R10 - JE LBB6_2 + JE LBB6_51 -LBB6_48: +LBB6_36: CMPL R15, $2048 - JB LBB6_77 - MOVL R15, AX - ANDL $-2048, AX - CMPL AX, $55296 - JNE LBB6_64 + JB LBB6_5 + MOVL R15, CX + ANDL $-2048, CX + CMPL CX, $55296 + JNE LBB6_65 TESTQ R14, R14 - JLE LBB6_114 - CMPB 4(R9)(BX*1), $92 - JNE LBB6_82 + JLE LBB6_80 + CMPB 4(R9)(R12*1), $92 + JNE LBB6_81 CMPL R15, $56319 JA LBB6_78 CMPQ R14, $7 JL LBB6_78 - CMPB 5(R9)(BX*1), $92 + CMPB 5(R9)(R12*1), $92 JNE LBB6_78 - CMPB 6(R9)(BX*1), $117 + CMPB 6(R9)(R12*1), $117 JNE LBB6_78 - MOVL 7(R9)(BX*1), DX - MOVL DX, R11 - NOTL R11 - LEAL -808464432(DX), AX - ANDL $-2139062144, R11 - TESTL AX, R11 + MOVL 7(R9)(R12*1), R11 + MOVL R11, BX + NOTL BX + LEAL -808464432(R11), CX + ANDL $-2139062144, BX + MOVL BX, -52(BP) + TESTL CX, BX + JNE LBB6_104 + LEAL 421075225(R11), CX + ORL R11, CX + TESTL $-2139062144, CX + JNE LBB6_104 + MOVL R11, BX + ANDL $2139062143, BX + MOVL $-1061109568, CX + SUBL BX, CX + MOVQ R11, -64(BP) + LEAL 1179010630(BX), R11 + ANDL -52(BP), CX + TESTL R11, CX + MOVQ -64(BP), R11 + JNE LBB6_104 + MOVL $-522133280, CX + SUBL BX, CX + MOVL CX, -56(BP) + ADDL $960051513, BX + MOVL -52(BP), CX + ANDL -56(BP), CX + TESTL BX, CX + JNE LBB6_104 + BSWAPL R11 + MOVL R11, CX + SHRL $4, CX + NOTL CX + ANDL $16843009, CX + LEAL 0(CX)(CX*8), CX + ANDL $252645135, R11 + ADDL CX, R11 + MOVL R11, CX + SHRL $4, CX + ORL R11, CX + MOVL CX, BX + SHRL $8, BX + ANDL $65280, BX + MOVBLZX CX, R11 + ORL BX, R11 + ANDL $16515072, CX + CMPL CX, $14417920 + JE LBB6_85 + TESTB $2, R8 + JE LBB6_119 + ADDQ $-7, R14 + MOVW $-16401, 0(AX) + MOVB $-67, 2(AX) + ADDQ $3, AX + ADDQ $7, R12 + MOVL R11, R15 + CMPL R11, $127 + JA LBB6_36 + JMP LBB6_64 + +LBB6_51: + CMPL R15, $2048 + JB LBB6_5 + MOVL R15, CX + ANDL $-2048, CX + CMPL CX, $55296 + JNE LBB6_65 + CMPL R15, $56319 + JA LBB6_77 + CMPQ R14, $6 + JL LBB6_77 + CMPB 4(R9)(R12*1), $92 + JNE LBB6_77 + CMPB 5(R9)(R12*1), $117 + JNE LBB6_77 + MOVL 6(R9)(R12*1), R11 + MOVL R11, BX + NOTL BX + LEAL -808464432(R11), CX + ANDL $-2139062144, BX + MOVL BX, -52(BP) + TESTL CX, BX JNE LBB6_103 - LEAL 421075225(DX), AX - ORL DX, AX - TESTL $-2139062144, AX + LEAL 421075225(R11), CX + ORL R11, CX + TESTL $-2139062144, CX JNE LBB6_103 - MOVL DX, CX - ANDL $2139062143, CX - MOVL $-1061109568, AX - SUBL CX, AX - MOVL AX, -52(BP) - LEAL 1179010630(CX), AX - MOVL AX, -56(BP) - MOVL -52(BP), AX - ANDL R11, AX - TESTL AX, -56(BP) + MOVL R11, BX + ANDL $2139062143, BX + MOVL $-1061109568, CX + SUBL BX, CX + MOVQ R11, -64(BP) + LEAL 1179010630(BX), R11 + ANDL -52(BP), CX + TESTL R11, CX + MOVQ -64(BP), R11 JNE LBB6_103 - MOVL $-522133280, AX - SUBL CX, AX - ADDL $960051513, CX - ANDL AX, R11 - TESTL CX, R11 + MOVL $-522133280, CX + SUBL BX, CX + MOVL CX, -56(BP) + ADDL $960051513, BX + MOVL -52(BP), CX + ANDL -56(BP), CX + TESTL BX, CX JNE LBB6_103 - BSWAPL DX - MOVL DX, AX - SHRL $4, AX - NOTL AX - ANDL $16843009, AX - LEAL 0(AX)(AX*8), AX - ANDL $252645135, DX - ADDL AX, DX - MOVL DX, AX - SHRL $4, AX - ORL DX, AX - MOVL AX, CX - SHRL $8, CX - ANDL $65280, CX - MOVBLZX AX, R11 - ORL CX, R11 - ANDL $16515072, AX - CMPL AX, $14417920 + BSWAPL R11 + MOVL R11, CX + SHRL $4, CX + NOTL CX + ANDL $16843009, CX + LEAL 0(CX)(CX*8), CX + ANDL $252645135, R11 + ADDL CX, R11 + MOVL R11, CX + SHRL $4, CX + ORL R11, CX + MOVL CX, BX + SHRL $8, BX + ANDL $65280, BX + MOVBLZX CX, R11 + ORL BX, R11 + ANDL $16515072, CX + CMPL CX, $14417920 JE LBB6_84 - TESTB $2, -64(BP) + TESTB $2, R8 JE LBB6_118 - MOVQ R13, DX - ADDQ $-7, R14 - MOVW $-16401, 0(R8) - MOVB $-67, 2(R8) - ADDQ $3, R8 - ADDQ $7, BX + ADDQ $-6, R14 + MOVW $-16401, 0(AX) + MOVB $-67, 2(AX) + ADDQ $3, AX + ADDQ $6, R12 MOVL R11, R15 CMPL R11, $128 - JAE LBB6_48 + JAE LBB6_51 -LBB6_63: - ADDQ BX, R9 - ADDQ $4, R9 +LBB6_64: + LEAQ 4(R9)(R12*1), R9 MOVL R11, R15 JMP LBB6_76 -LBB6_64: - ADDQ BX, R9 - ADDQ $4, R9 - MOVL R15, AX - SHRL $12, AX - ORB $-32, AX - MOVB AX, 0(R8) - MOVL R15, AX - SHRL $6, AX - ANDB $63, AX - ORB $-128, AX - MOVB AX, 1(R8) - ANDB $63, R15 - ORB $-128, R15 - MOVB R15, 2(R8) - ADDQ $3, R8 - MOVQ R14, R12 - TESTQ R12, R12 - JNE LBB6_17 - JMP LBB6_98 +LBB6_65: + LEAQ 4(R9)(R12*1), R9 + MOVL R15, CX + SHRL $12, CX + ORB $-32, CX + MOVB CX, 0(AX) + MOVL R15, CX + SHRL $6, CX + ANDB $63, CX + ORB $-128, CX + MOVB CX, 1(AX) + ANDB $63, R15 + ORB $-128, R15 + MOVB R15, 2(AX) + +LBB6_66: + ADDQ $3, AX + JMP LBB6_6 LBB6_67: - TESTL R12, R12 - JE LBB6_116 + TESTL R13, R13 + JE LBB6_99 CMPB -1(R9), $92 - JNE LBB6_100 + JNE LBB6_102 CMPB 0(R9), $92 JNE LBB6_74 - CMPL R12, $1 - JLE LBB6_116 - MOVB 1(R9), AX - CMPB AX, $34 + CMPL R13, $1 + JLE LBB6_99 + MOVB 1(R9), R11 + CMPB R11, $34 JE LBB6_73 - CMPB AX, $92 - JNE LBB6_113 + CMPB R11, $92 + JNE LBB6_114 LBB6_73: - ADDQ $1, R9 - ADDQ $-1, R12 + INCQ R9 + DECQ R13 LBB6_74: - ADDQ $1, R9 - ADDQ $-1, R12 - JMP LBB6_38 + INCQ R9 + DECQ R13 + JMP LBB6_26 LBB6_75: ADDQ $4, R9 LBB6_76: - MOVB R15, 0(R8) - ADDQ $1, R8 - MOVQ R14, R12 - TESTQ R12, R12 - JNE LBB6_17 - JMP LBB6_98 + MOVB R15, 0(AX) + INCQ AX + JMP LBB6_6 LBB6_77: - ADDQ BX, R9 - ADDQ $4, R9 - MOVL R15, AX - SHRL $6, AX - ORB $-64, AX - MOVB AX, 0(R8) - ANDB $63, R15 - ORB $-128, R15 - MOVB R15, 1(R8) - ADDQ $2, R8 - MOVQ R14, R12 - TESTQ R12, R12 - JNE LBB6_17 - JMP LBB6_98 + LEAQ 4(R9)(R12*1), R9 + JMP LBB6_79 LBB6_78: - ADDQ BX, R9 - ADDQ $5, R9 - SUBQ BX, R12 - ADDQ $-5, R12 - MOVQ R12, R14 + LEAQ 5(R9)(R12*1), R9 + SUBQ R12, R13 + ADDQ $-5, R13 + MOVQ R13, R14 LBB6_79: - TESTB $2, -64(BP) - JE LBB6_120 + TESTB $2, R8 + JNE LBB6_83 + JMP LBB6_121 LBB6_80: - MOVW $-16401, 0(R8) - MOVB $-67, 2(R8) - ADDQ $3, R8 - MOVQ R14, R12 - MOVQ R13, DX - TESTQ R12, R12 - JNE LBB6_17 - JMP LBB6_98 + TESTB $2, R8 + JNE LBB6_82 + JMP LBB6_99 -LBB6_82: - TESTB $2, -64(BP) +LBB6_81: + TESTB $2, R8 JE LBB6_122 - ADDQ BX, R9 - ADDQ $4, R9 - JMP LBB6_80 + +LBB6_82: + LEAQ 4(R9)(R12*1), R9 + +LBB6_83: + MOVW $-16401, 0(AX) + MOVB $-67, 2(AX) + JMP LBB6_66 LBB6_84: - ADDQ BX, R9 - ADDQ $11, R9 - SUBQ BX, R12 - ADDQ $-11, R12 + LEAQ 10(R9)(R12*1), R9 + SUBQ R12, R13 + ADDQ $-10, R13 + JMP LBB6_86 LBB6_85: - SHLL $10, R15 - MOVL R11, AX - ADDL R15, AX - LEAL 0(R11)(R15*1), CX - ADDL $-56613888, CX - MOVL CX, DX - SHRL $18, DX - ORB $-16, DX - MOVB DX, 0(R8) - MOVL CX, DX - SHRL $12, DX - ANDB $63, DX - ORB $-128, DX - MOVB DX, 1(R8) - SHRL $6, CX - ANDB $63, CX - ORB $-128, CX - MOVB CX, 2(R8) - ANDB $63, AX - ORB $-128, AX - MOVB AX, 3(R8) - ADDQ $4, R8 - MOVQ R13, DX - TESTQ R12, R12 - JNE LBB6_17 - -LBB6_98: - XORL R12, R12 - JMP LBB6_87 + LEAQ 11(R9)(R12*1), R9 + SUBQ R12, R13 + ADDQ $-11, R13 LBB6_86: - XORL R12, R12 - MOVQ DX, R8 - -LBB6_87: - ADDQ R12, R8 - SUBQ DX, R8 - -LBB6_88: - MOVQ R8, AX - ADDQ $24, SP - BYTE $0x5b // popq %rbx - WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 - WORD $0x5e41 // popq %r14 - WORD $0x5f41 // popq %r15 - BYTE $0x5d // popq %rbp - WORD $0xf8c5; BYTE $0x77 // vzeroupper - RET + SHLL $10, R15 + LEAL -56613888(R15)(R11*1), BX + CMPL BX, $1114112 + JB LBB6_89 + TESTB $2, R8 + JE LBB6_116 + MOVW $-16401, 0(AX) + MOVB $-67, 2(AX) + ADDQ $3, AX + JMP LBB6_7 LBB6_89: - MOVQ R9, AX - SUBQ DI, AX - MOVQ -48(BP), SI - MOVQ AX, 0(SI) - MOVB 0(R9), CX - LEAL -48(CX), DX - MOVQ $-2, R8 - CMPB DX, $10 - JB LBB6_91 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_88 - -LBB6_91: - LEAQ 1(AX), CX - MOVQ CX, 0(SI) - MOVB 1(R9), CX - LEAL -48(CX), DX - CMPB DX, $10 - JB LBB6_93 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_88 - -LBB6_93: - LEAQ 2(AX), CX - MOVQ CX, 0(SI) - MOVB 2(R9), CX - LEAL -48(CX), DX - CMPB DX, $10 - JB LBB6_95 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_88 - -LBB6_95: - LEAQ 3(AX), CX - MOVQ CX, 0(SI) - MOVB 3(R9), CX - LEAL -48(CX), DX - CMPB DX, $10 - JB LBB6_97 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_88 - -LBB6_97: + MOVL BX, CX + SHRL $18, CX + ORB $-16, CX + MOVB CX, 0(AX) + MOVL BX, CX + SHRL $12, CX + ANDB $63, CX + ORB $-128, CX + MOVB CX, 1(AX) + MOVL BX, CX + SHRL $6, CX + ANDB $63, CX + ORB $-128, CX + MOVB CX, 2(AX) + ANDB $63, BX + ORB $-128, BX + MOVB BX, 3(AX) ADDQ $4, AX - MOVQ AX, 0(SI) - JMP LBB6_88 + JMP LBB6_7 + +LBB6_90: + MOVQ R9, DX + SUBQ DI, DX + MOVQ -48(BP), DI + MOVQ DX, 0(DI) + MOVB 0(R9), SI + LEAL -48(SI), CX + MOVQ $-2, AX + CMPB CX, $10 + JB LBB6_92 + ANDB $-33, SI + ADDB $-65, SI + CMPB SI, $5 + JA LBB6_4 + +LBB6_92: + LEAQ 1(DX), CX + MOVQ CX, 0(DI) + MOVB 1(R9), SI + LEAL -48(SI), CX + CMPB CX, $10 + JB LBB6_94 + ANDB $-33, SI + ADDB $-65, SI + CMPB SI, $5 + JA LBB6_4 + +LBB6_94: + LEAQ 2(DX), CX + MOVQ CX, 0(DI) + MOVB 2(R9), SI + LEAL -48(SI), CX + CMPB CX, $10 + JB LBB6_96 + ANDB $-33, SI + ADDB $-65, SI + CMPB SI, $5 + JA LBB6_4 + +LBB6_96: + LEAQ 3(DX), CX + MOVQ CX, 0(DI) + MOVB 3(R9), SI + LEAL -48(SI), CX + CMPB CX, $10 + JB LBB6_98 + ANDB $-33, SI + ADDB $-65, SI + CMPB SI, $5 + JA LBB6_4 + +LBB6_98: + ADDQ $4, DX + MOVQ DX, 0(DI) + JMP LBB6_4 LBB6_99: - NOTQ DI - ADDQ DI, R9 MOVQ -48(BP), AX - MOVQ R9, 0(AX) - MOVQ $-3, R8 - JMP LBB6_88 + MOVQ SI, 0(AX) + MOVQ $-1, AX + JMP LBB6_4 LBB6_100: NOTQ DI ADDQ DI, R9 - -LBB6_101: MOVQ -48(BP), AX MOVQ R9, 0(AX) - MOVQ $-2, R8 - JMP LBB6_88 + MOVQ $-3, AX + JMP LBB6_4 + +LBB6_101: + XORL R13, R13 + JMP LBB6_3 LBB6_102: - LEAQ 0(R9)(BX*1), SI - ADDQ $4, SI - JMP LBB6_104 + NOTQ DI + ADDQ DI, R9 + JMP LBB6_115 LBB6_103: - LEAQ 0(R9)(BX*1), SI - ADDQ $5, SI + LEAQ 4(R9)(R12*1), SI + JMP LBB6_105 LBB6_104: - MOVQ SI, AX - SUBQ DI, AX - ADDQ $2, AX - MOVQ -48(BP), CX - MOVQ AX, 0(CX) - MOVB 2(SI), CX - LEAL -48(CX), DX - MOVQ $-2, R8 - CMPB DX, $10 - JB LBB6_106 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_88 - -LBB6_106: - LEAQ 1(AX), CX - MOVQ -48(BP), DX - MOVQ CX, 0(DX) - MOVB 3(SI), CX - LEAL -48(CX), DX - CMPB DX, $10 - JB LBB6_108 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_88 - -LBB6_108: - LEAQ 2(AX), CX - MOVQ -48(BP), DX - MOVQ CX, 0(DX) - MOVB 4(SI), CX - LEAL -48(CX), DX - CMPB DX, $10 - JB LBB6_110 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_88 - -LBB6_110: - LEAQ 3(AX), CX - MOVQ -48(BP), DX - MOVQ CX, 0(DX) - MOVB 5(SI), CX - LEAL -48(CX), DX - CMPB DX, $10 - JB LBB6_112 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_88 + LEAQ 5(R9)(R12*1), SI -LBB6_112: - ADDQ $4, AX - MOVQ -48(BP), CX - MOVQ AX, 0(CX) - JMP LBB6_88 +LBB6_105: + MOVQ SI, DX + SUBQ DI, DX + ADDQ $2, DX + MOVQ -48(BP), AX + MOVQ DX, 0(AX) + MOVB 2(SI), DI + LEAL -48(DI), CX + MOVQ $-2, AX + CMPB CX, $10 + JB LBB6_107 + ANDB $-33, DI + ADDB $-65, DI + CMPB DI, $5 + JA LBB6_4 + +LBB6_107: + LEAQ 1(DX), CX + MOVQ -48(BP), DI + MOVQ CX, 0(DI) + MOVB 3(SI), DI + LEAL -48(DI), CX + CMPB CX, $10 + JB LBB6_109 + ANDB $-33, DI + ADDB $-65, DI + CMPB DI, $5 + JA LBB6_4 + +LBB6_109: + LEAQ 2(DX), CX + MOVQ -48(BP), DI + MOVQ CX, 0(DI) + MOVB 4(SI), DI + LEAL -48(DI), CX + CMPB CX, $10 + JB LBB6_111 + ANDB $-33, DI + ADDB $-65, DI + CMPB DI, $5 + JA LBB6_4 + +LBB6_111: + LEAQ 3(DX), CX + MOVQ -48(BP), DI + MOVQ CX, 0(DI) + MOVB 5(SI), SI + LEAL -48(SI), CX + CMPB CX, $10 + JB LBB6_113 + ANDB $-33, SI + ADDB $-65, SI + CMPB SI, $5 + JA LBB6_4 LBB6_113: - SUBQ DI, R9 - ADDQ $1, R9 - JMP LBB6_101 + ADDQ $4, DX + MOVQ -48(BP), CX + MOVQ DX, 0(CX) + JMP LBB6_4 LBB6_114: - TESTB $2, -64(BP) - JE LBB6_116 - MOVW $-16401, 0(R8) - MOVB $-67, 2(R8) - ADDQ $3, R8 - XORL R12, R12 - MOVQ R13, DX - JMP LBB6_87 + SUBQ DI, R9 + INCQ R9 -LBB6_116: +LBB6_115: MOVQ -48(BP), AX - MOVQ SI, 0(AX) - MOVQ $-1, R8 - JMP LBB6_88 + MOVQ R9, 0(AX) + MOVQ $-2, AX + JMP LBB6_4 + +LBB6_116: + SUBQ DI, R9 + ADDQ $-4, R9 LBB6_117: - LEAQ 0(R9)(BX*1), AX - ADDQ $10, AX - JMP LBB6_119 + MOVQ -48(BP), AX + MOVQ R9, 0(AX) + MOVQ $-4, AX + JMP LBB6_4 LBB6_118: - LEAQ 0(R9)(BX*1), AX - ADDQ $11, AX + LEAQ 10(R9)(R12*1), AX + JMP LBB6_120 LBB6_119: + LEAQ 11(R9)(R12*1), AX + +LBB6_120: SUBQ DI, AX ADDQ $-4, AX MOVQ -48(BP), CX MOVQ AX, 0(CX) - MOVQ $-4, R8 - JMP LBB6_88 - -LBB6_120: - ADDQ R10, DI - SUBQ DI, R9 - ADDQ $-4, R9 + MOVQ $-4, AX + JMP LBB6_4 LBB6_121: - MOVQ -48(BP), AX - MOVQ R9, 0(AX) - MOVQ $-4, R8 - JMP LBB6_88 + LEAQ 4(R10)(DI*1), AX + SUBQ AX, R9 + JMP LBB6_117 LBB6_122: + ADDQ R12, R9 SUBQ DI, R9 - ADDQ BX, R9 - JMP LBB6_121 + JMP LBB6_117 LCPI7_0: QUAD $0x2626262626262626; QUAD $0x2626262626262626 // .space 16, '&&&&&&&&&&&&&&&&' @@ -3141,12 +3079,12 @@ LCPI7_1: QUAD $0xe2e2e2e2e2e2e2e2; QUAD $0xe2e2e2e2e2e2e2e2 // .space 16, '\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2' LCPI7_2: - QUAD $0xfdfdfdfdfdfdfdfd; QUAD $0xfdfdfdfdfdfdfdfd // .space 16, '\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd' - QUAD $0xfdfdfdfdfdfdfdfd; QUAD $0xfdfdfdfdfdfdfdfd // .space 16, '\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd' + QUAD $0x0202020202020202; QUAD $0x0202020202020202 // .space 16, '\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02' + QUAD $0x0202020202020202; QUAD $0x0202020202020202 // .space 16, '\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02' LCPI7_3: - QUAD $0x3c3c3c3c3c3c3c3c; QUAD $0x3c3c3c3c3c3c3c3c // .space 16, '<<<<<<<<<<<<<<<<' - QUAD $0x3c3c3c3c3c3c3c3c; QUAD $0x3c3c3c3c3c3c3c3c // .space 16, '<<<<<<<<<<<<<<<<' + QUAD $0x3e3e3e3e3e3e3e3e; QUAD $0x3e3e3e3e3e3e3e3e // .space 16, '>>>>>>>>>>>>>>>>' + QUAD $0x3e3e3e3e3e3e3e3e; QUAD $0x3e3e3e3e3e3e3e3e // .space 16, '>>>>>>>>>>>>>>>>' LCPI7_4: QUAD $0x2626262626262626; QUAD $0x2626262626262626 // .space 16, '&&&&&&&&&&&&&&&&' @@ -3155,10 +3093,10 @@ LCPI7_5: QUAD $0xe2e2e2e2e2e2e2e2; QUAD $0xe2e2e2e2e2e2e2e2 // .space 16, '\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2' LCPI7_6: - QUAD $0xfdfdfdfdfdfdfdfd; QUAD $0xfdfdfdfdfdfdfdfd // .space 16, '\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd' + QUAD $0x0202020202020202; QUAD $0x0202020202020202 // .space 16, '\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02' LCPI7_7: - QUAD $0x3c3c3c3c3c3c3c3c; QUAD $0x3c3c3c3c3c3c3c3c // .space 16, '<<<<<<<<<<<<<<<<' + QUAD $0x3e3e3e3e3e3e3e3e; QUAD $0x3e3e3e3e3e3e3e3e // .space 16, '>>>>>>>>>>>>>>>>' _html_escape: BYTE $0x55 // pushq %rbp @@ -3170,597 +3108,591 @@ _html_escape: BYTE $0x53 // pushq %rbx SUBQ $24, SP MOVQ CX, -64(BP) + MOVQ DX, R15 MOVQ DX, -56(BP) - MOVQ DX, R8 MOVQ DI, -48(BP) MOVQ DI, AX TESTQ SI, SI - JLE LBB7_105 + JLE LBB7_94 + MOVQ SI, R10 MOVQ -64(BP), AX MOVQ 0(AX), R9 - QUAD $0xffffff051d6ffec5 // vmovdqu $-251(%rip), %ymm3 /* LCPI7_0(%rip) */ - QUAD $0xffffff1d256ffec5 // vmovdqu $-227(%rip), %ymm4 /* LCPI7_1(%rip) */ - QUAD $0xffffff352d6ffec5 // vmovdqu $-203(%rip), %ymm5 /* LCPI7_2(%rip) */ - QUAD $0xffffff4d356ffec5 // vmovdqu $-179(%rip), %ymm6 /* LCPI7_3(%rip) */ - LONG $0x743d8d4c; WORD $0x00b6; BYTE $0x00 // leaq $46708(%rip), %r15 /* __HtmlQuoteTab(%rip) */ - MOVQ -48(BP), R11 - MOVQ -56(BP), R8 + QUAD $0xffffff021d6ffec5 // vmovdqu $-254(%rip), %ymm3 /* LCPI7_0(%rip) */ + QUAD $0xffffff1a256ffec5 // vmovdqu $-230(%rip), %ymm4 /* LCPI7_1(%rip) */ + QUAD $0xffffff322d6ffec5 // vmovdqu $-206(%rip), %ymm5 /* LCPI7_2(%rip) */ + QUAD $0xffffff4a356ffec5 // vmovdqu $-182(%rip), %ymm6 /* LCPI7_3(%rip) */ + LONG $0x32358d4c; WORD $0x00bb; BYTE $0x00 // leaq $47922(%rip), %r14 /* __HtmlQuoteTab(%rip) */ + MOVQ $12884901889, DI + MOVQ -48(BP), R12 + MOVQ -56(BP), R15 LBB7_2: TESTQ R9, R9 - JLE LBB7_3 - CMPQ SI, $31 - SETGT CX + JLE LBB7_96 + CMPQ R10, $31 + SETGT BX MOVQ R9, AX - MOVQ R8, BX - MOVQ SI, R10 - MOVQ R11, R12 + MOVQ R15, R8 + MOVQ R10, SI + MOVQ R12, R13 CMPQ R9, $32 - JL LBB7_12 - CMPQ SI, $32 - JL LBB7_12 - XORL BX, BX - MOVQ SI, DX - MOVQ R9, DI + JL LBB7_9 + CMPQ R10, $32 + JL LBB7_9 + MOVQ R12, R13 + MOVQ R10, SI + MOVQ R15, R8 + MOVQ R9, DX -LBB7_7: - LONG $0x6f7ec1c4; WORD $0x1b04 // vmovdqu (%r11,%rbx), %ymm0 +LBB7_6: + LONG $0x6f7ec1c4; WORD $0x0045 // vmovdqu (%r13), %ymm0 LONG $0xcb74fdc5 // vpcmpeqb %ymm3, %ymm0, %ymm1 LONG $0xd474fdc5 // vpcmpeqb %ymm4, %ymm0, %ymm2 LONG $0xc9ebedc5 // vpor %ymm1, %ymm2, %ymm1 - LONG $0xd5dbfdc5 // vpand %ymm5, %ymm0, %ymm2 + LONG $0xd5ebfdc5 // vpor %ymm5, %ymm0, %ymm2 LONG $0xd674edc5 // vpcmpeqb %ymm6, %ymm2, %ymm2 LONG $0xcaebf5c5 // vpor %ymm2, %ymm1, %ymm1 - LONG $0x7f7ec1c4; WORD $0x1804 // vmovdqu %ymm0, (%r8,%rbx) + LONG $0x7f7ec1c4; BYTE $0x00 // vmovdqu %ymm0, (%r8) LONG $0xc1d7fdc5 // vpmovmskb %ymm1, %eax TESTL AX, AX - JNE LBB7_8 - LEAQ -32(DX), R10 - LEAQ -32(DI), AX + JNE LBB7_19 + ADDQ $32, R13 + ADDQ $32, R8 + LEAQ -32(DX), AX + CMPQ SI, $63 + SETGT BX + CMPQ SI, $64 + LEAQ -32(SI), SI + JL LBB7_9 CMPQ DX, $63 - SETGT CX - ADDQ $32, BX - CMPQ DX, $64 - JL LBB7_11 - MOVQ R10, DX - CMPQ DI, $63 - MOVQ AX, DI - JG LBB7_7 - -LBB7_11: - LEAQ 0(R11)(BX*1), R12 - ADDQ R8, BX + MOVQ AX, DX + JG LBB7_6 -LBB7_12: - TESTB CX, CX - JE LBB7_38 - LONG $0x6f7ec1c4; WORD $0x2404 // vmovdqu (%r12), %ymm0 +LBB7_9: + TESTB BX, BX + JE LBB7_13 + LONG $0x6f7ec1c4; WORD $0x0045 // vmovdqu (%r13), %ymm0 LONG $0xcb74fdc5 // vpcmpeqb %ymm3, %ymm0, %ymm1 LONG $0xd474fdc5 // vpcmpeqb %ymm4, %ymm0, %ymm2 LONG $0xc9ebedc5 // vpor %ymm1, %ymm2, %ymm1 - LONG $0xc5dbfdc5 // vpand %ymm5, %ymm0, %ymm0 + LONG $0xc5ebfdc5 // vpor %ymm5, %ymm0, %ymm0 LONG $0xc674fdc5 // vpcmpeqb %ymm6, %ymm0, %ymm0 LONG $0xc0ebf5c5 // vpor %ymm0, %ymm1, %ymm0 LONG $0xc8d7fdc5 // vpmovmskb %ymm0, %ecx MOVQ $4294967296, DX ORQ DX, CX - BSFQ CX, R13 - LONG $0x6f7ac1c4; WORD $0x2404 // vmovdqu (%r12), %xmm0 + BSFQ CX, R11 + LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 LONG $0x16f9e3c4; WORD $0x01c1 // vpextrq $1, %xmm0, %rcx LONG $0x7ef9e1c4; BYTE $0xc2 // vmovq %xmm0, %rdx - CMPQ R13, AX - JLE LBB7_14 + CMPQ R11, AX + JLE LBB7_20 CMPQ AX, $16 - JB LBB7_27 - MOVQ DX, 0(BX) - MOVQ CX, 8(BX) - LEAQ 16(R12), R10 - ADDQ $16, BX - LEAQ -16(AX), R14 - CMPQ R14, $8 - JAE LBB7_30 - JMP LBB7_31 - -LBB7_38: + JB LBB7_23 + MOVQ DX, 0(R8) + MOVQ CX, 8(R8) + LEAQ 16(R13), R11 + ADDQ $16, R8 + LEAQ -16(AX), SI + CMPQ SI, $8 + JAE LBB7_24 + JMP LBB7_25 + +LBB7_13: + MOVQ R14, DX WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ R10, $15 - SETGT CX + CMPQ SI, $15 + SETGT R14 CMPQ AX, $16 - JL LBB7_39 - CMPQ R10, $16 - QUAD $0xfffffe343d6ffac5 // vmovdqu $-460(%rip), %xmm7 /* LCPI7_4(%rip) */ - QUAD $0xfffffe3c056f7ac5 // vmovdqu $-452(%rip), %xmm8 /* LCPI7_5(%rip) */ - QUAD $0xfffffe440d6f7ac5 // vmovdqu $-444(%rip), %xmm9 /* LCPI7_6(%rip) */ - QUAD $0xfffffe4c156f7ac5 // vmovdqu $-436(%rip), %xmm10 /* LCPI7_7(%rip) */ - JL LBB7_41 - MOVQ R11, DI - SUBQ R12, DI - QUAD $0xfffffd881d6ffec5 // vmovdqu $-632(%rip), %ymm3 /* LCPI7_0(%rip) */ - QUAD $0xfffffda0256ffec5 // vmovdqu $-608(%rip), %ymm4 /* LCPI7_1(%rip) */ - QUAD $0xfffffdb82d6ffec5 // vmovdqu $-584(%rip), %ymm5 /* LCPI7_2(%rip) */ - QUAD $0xfffffdd0356ffec5 // vmovdqu $-560(%rip), %ymm6 /* LCPI7_3(%rip) */ - -LBB7_43: - LONG $0x6f7ac1c4; WORD $0x2404 // vmovdqu (%r12), %xmm0 + JL LBB7_30 + CMPQ SI, $16 + QUAD $0xfffffe273d6ffac5 // vmovdqu $-473(%rip), %xmm7 /* LCPI7_4(%rip) */ + QUAD $0xfffffe2f056f7ac5 // vmovdqu $-465(%rip), %xmm8 /* LCPI7_5(%rip) */ + QUAD $0xfffffe370d6f7ac5 // vmovdqu $-457(%rip), %xmm9 /* LCPI7_6(%rip) */ + QUAD $0xfffffe3f156f7ac5 // vmovdqu $-449(%rip), %xmm10 /* LCPI7_7(%rip) */ + JL LBB7_35 + QUAD $0xfffffd811d6ffec5 // vmovdqu $-639(%rip), %ymm3 /* LCPI7_0(%rip) */ + QUAD $0xfffffd99256ffec5 // vmovdqu $-615(%rip), %ymm4 /* LCPI7_1(%rip) */ + QUAD $0xfffffdb12d6ffec5 // vmovdqu $-591(%rip), %ymm5 /* LCPI7_2(%rip) */ + QUAD $0xfffffdc9356ffec5 // vmovdqu $-567(%rip), %ymm6 /* LCPI7_3(%rip) */ + +LBB7_16: + LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 LONG $0xcf74f9c5 // vpcmpeqb %xmm7, %xmm0, %xmm1 LONG $0xd074b9c5 // vpcmpeqb %xmm0, %xmm8, %xmm2 LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 - LONG $0xd0dbb1c5 // vpand %xmm0, %xmm9, %xmm2 + LONG $0xd0ebb1c5 // vpor %xmm0, %xmm9, %xmm2 LONG $0xd274a9c5 // vpcmpeqb %xmm2, %xmm10, %xmm2 LONG $0xcaebf1c5 // vpor %xmm2, %xmm1, %xmm1 - LONG $0x037ffac5 // vmovdqu %xmm0, (%rbx) + LONG $0x7f7ac1c4; BYTE $0x00 // vmovdqu %xmm0, (%r8) LONG $0xc9d7f9c5 // vpmovmskb %xmm1, %ecx TESTW CX, CX - JNE LBB7_44 - ADDQ $16, R12 - ADDQ $16, BX - LEAQ -16(R10), R14 - LEAQ -16(AX), R13 - CMPQ R10, $31 - SETGT CX - CMPQ R10, $32 - JL LBB7_47 - ADDQ $-16, DI - MOVQ R14, R10 + JNE LBB7_22 + ADDQ $16, R13 + ADDQ $16, R8 + LEAQ -16(AX), R11 + CMPQ SI, $31 + SETGT R14 + CMPQ SI, $32 + LEAQ -16(SI), SI + JL LBB7_31 CMPQ AX, $31 - MOVQ R13, AX - JG LBB7_43 - JMP LBB7_47 - -LBB7_8: - BSFL AX, R12 - ADDQ BX, R12 - TESTQ R12, R12 - JNS LBB7_84 - JMP LBB7_83 - -LBB7_14: - CMPL R13, $16 - JB LBB7_15 - MOVQ DX, 0(BX) - MOVQ CX, 8(BX) - LEAQ 16(R12), R10 - ADDQ $16, BX - LEAQ -16(R13), AX - CMPQ AX, $8 - JAE LBB7_18 - JMP LBB7_19 - -LBB7_44: - MOVWLZX CX, AX - BSFL AX, R12 - SUBQ DI, R12 - TESTQ R12, R12 - JNS LBB7_84 - JMP LBB7_83 - -LBB7_27: - MOVQ R12, R10 - MOVQ AX, R14 - CMPQ R14, $8 - JB LBB7_31 - -LBB7_30: - MOVQ 0(R10), CX - MOVQ CX, 0(BX) - ADDQ $8, R10 - ADDQ $8, BX - ADDQ $-8, R14 + MOVQ R11, AX + JG LBB7_16 LBB7_31: - CMPQ R14, $4 - JAE LBB7_32 - CMPQ R14, $2 - JAE LBB7_34 - -LBB7_35: - TESTQ R14, R14 - JE LBB7_37 - -LBB7_36: - MOVB 0(R10), CX - MOVB CX, 0(BX) - -LBB7_37: - NOTQ AX - ADDQ R11, AX - SUBQ R12, AX - MOVQ AX, R12 - TESTQ R12, R12 - JNS LBB7_84 - JMP LBB7_83 - -LBB7_39: - MOVQ AX, R13 - MOVQ R10, R14 - QUAD $0xfffffc511d6ffec5 // vmovdqu $-943(%rip), %ymm3 /* LCPI7_0(%rip) */ - QUAD $0xfffffc69256ffec5 // vmovdqu $-919(%rip), %ymm4 /* LCPI7_1(%rip) */ - QUAD $0xfffffc812d6ffec5 // vmovdqu $-895(%rip), %ymm5 /* LCPI7_2(%rip) */ - QUAD $0xfffffc99356ffec5 // vmovdqu $-871(%rip), %ymm6 /* LCPI7_3(%rip) */ - QUAD $0xfffffcb13d6ffac5 // vmovdqu $-847(%rip), %xmm7 /* LCPI7_4(%rip) */ - QUAD $0xfffffcb9056f7ac5 // vmovdqu $-839(%rip), %xmm8 /* LCPI7_5(%rip) */ - QUAD $0xfffffcc10d6f7ac5 // vmovdqu $-831(%rip), %xmm9 /* LCPI7_6(%rip) */ - QUAD $0xfffffcc9156f7ac5 // vmovdqu $-823(%rip), %xmm10 /* LCPI7_7(%rip) */ - -LBB7_47: - TESTB CX, CX - JE LBB7_48 + TESTB R14, R14 + JE LBB7_36 -LBB7_58: - LONG $0x6f7ac1c4; WORD $0x2404 // vmovdqu (%r12), %xmm0 +LBB7_32: + LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 LONG $0xcf74f9c5 // vpcmpeqb %xmm7, %xmm0, %xmm1 LONG $0xd074b9c5 // vpcmpeqb %xmm0, %xmm8, %xmm2 LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 - LONG $0xd0dbb1c5 // vpand %xmm0, %xmm9, %xmm2 + LONG $0xd0ebb1c5 // vpor %xmm0, %xmm9, %xmm2 LONG $0xd274a9c5 // vpcmpeqb %xmm2, %xmm10, %xmm2 LONG $0xcaebf1c5 // vpor %xmm2, %xmm1, %xmm1 LONG $0xc1d7f9c5 // vpmovmskb %xmm1, %eax ORL $65536, AX - BSFL AX, AX - LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx - CMPQ R13, AX - JGE LBB7_59 - CMPQ R13, $8 - JB LBB7_70 - MOVQ CX, 0(BX) - LEAQ 8(R12), AX - ADDQ $8, BX - LEAQ -8(R13), DI - CMPQ DI, $4 - JAE LBB7_73 - JMP LBB7_74 - -LBB7_41: - MOVQ AX, R13 - MOVQ R10, R14 - QUAD $0xfffffba21d6ffec5 // vmovdqu $-1118(%rip), %ymm3 /* LCPI7_0(%rip) */ - QUAD $0xfffffbba256ffec5 // vmovdqu $-1094(%rip), %ymm4 /* LCPI7_1(%rip) */ - QUAD $0xfffffbd22d6ffec5 // vmovdqu $-1070(%rip), %ymm5 /* LCPI7_2(%rip) */ - QUAD $0xfffffbea356ffec5 // vmovdqu $-1046(%rip), %ymm6 /* LCPI7_3(%rip) */ - TESTB CX, CX - JNE LBB7_58 + BSFL AX, R14 + LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax + CMPQ R11, R14 + JGE LBB7_55 + CMPQ R11, $8 + JB LBB7_58 + MOVQ AX, 0(R8) + LEAQ 8(R13), AX + ADDQ $8, R8 + LEAQ -8(R11), SI + MOVQ DX, R14 + CMPQ SI, $4 + JAE LBB7_59 + JMP LBB7_60 -LBB7_48: - TESTQ R13, R13 - JLE LBB7_56 - TESTQ R14, R14 - JLE LBB7_56 - XORL CX, CX - XORL AX, AX +LBB7_19: + SUBQ R12, R13 + BSFL AX, AX + ADDQ R13, AX + TESTQ AX, AX + JNS LBB7_72 + JMP LBB7_92 -LBB7_51: - MOVBLZX 0(R12)(CX*1), DX - CMPQ DX, $62 - JA LBB7_52 - MOVQ $5764607797912141824, DI - BTQ DX, DI - JB LBB7_80 +LBB7_20: + CMPL R11, $16 + JB LBB7_43 + MOVQ DX, 0(R8) + MOVQ CX, 8(R8) + LEAQ 16(R13), AX + ADDQ $16, R8 + LEAQ -16(R11), SI + CMPQ SI, $8 + JAE LBB7_44 + JMP LBB7_45 -LBB7_52: - CMPB DX, $-30 - JE LBB7_80 - LEAQ 0(R14)(AX*1), DI - MOVB DX, 0(BX)(CX*1) - LEAQ -1(AX), DX - CMPQ DI, $2 - JL LBB7_55 - ADDQ R13, AX - ADDQ $1, CX - CMPQ AX, $1 - MOVQ DX, AX - JG LBB7_51 +LBB7_22: + MOVWLZX CX, AX + SUBQ R12, R13 + BSFL AX, AX + ADDQ R13, AX + MOVQ DX, R14 + TESTQ AX, AX + JNS LBB7_72 + JMP LBB7_92 -LBB7_55: - SUBQ DX, R12 - ADDQ DX, R14 +LBB7_23: + MOVQ R13, R11 + MOVQ AX, SI + CMPQ SI, $8 + JB LBB7_25 -LBB7_56: - TESTQ R14, R14 - JE LBB7_57 - NOTQ R12 - ADDQ R11, R12 - TESTQ R12, R12 - JNS LBB7_84 - JMP LBB7_83 +LBB7_24: + MOVQ 0(R11), CX + MOVQ CX, 0(R8) + ADDQ $8, R11 + ADDQ $8, R8 + ADDQ $-8, SI -LBB7_15: - MOVQ R12, R10 - MOVQ R13, AX - CMPQ AX, $8 - JB LBB7_19 +LBB7_25: + CMPQ SI, $4 + JB LBB7_26 + MOVL 0(R11), CX + MOVL CX, 0(R8) + ADDQ $4, R11 + ADDQ $4, R8 + ADDQ $-4, SI + CMPQ SI, $2 + JAE LBB7_52 -LBB7_18: - MOVQ 0(R10), CX - MOVQ CX, 0(BX) - ADDQ $8, R10 - ADDQ $8, BX - ADDQ $-8, AX +LBB7_27: + TESTQ SI, SI + JE LBB7_29 -LBB7_19: - CMPQ AX, $4 - JAE LBB7_20 - CMPQ AX, $2 - JAE LBB7_22 +LBB7_28: + MOVB 0(R11), CX + MOVB CX, 0(R8) -LBB7_23: +LBB7_29: + SUBQ R12, AX + ADDQ R13, AX + NOTQ AX TESTQ AX, AX - JE LBB7_25 + JNS LBB7_72 + JMP LBB7_92 -LBB7_24: - MOVB 0(R10), AX - MOVB AX, 0(BX) +LBB7_26: + CMPQ SI, $2 + JB LBB7_27 -LBB7_25: - SUBQ R11, R12 - ADDQ R13, R12 - TESTQ R12, R12 - JNS LBB7_84 - JMP LBB7_83 +LBB7_52: + MOVWLZX 0(R11), CX + MOVW CX, 0(R8) + ADDQ $2, R11 + ADDQ $2, R8 + ADDQ $-2, SI + TESTQ SI, SI + JNE LBB7_28 + JMP LBB7_29 -LBB7_32: - MOVL 0(R10), CX - MOVL CX, 0(BX) - ADDQ $4, R10 - ADDQ $4, BX - ADDQ $-4, R14 - CMPQ R14, $2 - JB LBB7_35 +LBB7_30: + MOVQ AX, R11 + QUAD $0xfffffbbb1d6ffec5 // vmovdqu $-1093(%rip), %ymm3 /* LCPI7_0(%rip) */ + QUAD $0xfffffbd3256ffec5 // vmovdqu $-1069(%rip), %ymm4 /* LCPI7_1(%rip) */ + QUAD $0xfffffbeb2d6ffec5 // vmovdqu $-1045(%rip), %ymm5 /* LCPI7_2(%rip) */ + QUAD $0xfffffc03356ffec5 // vmovdqu $-1021(%rip), %ymm6 /* LCPI7_3(%rip) */ + QUAD $0xfffffc1b3d6ffac5 // vmovdqu $-997(%rip), %xmm7 /* LCPI7_4(%rip) */ + QUAD $0xfffffc23056f7ac5 // vmovdqu $-989(%rip), %xmm8 /* LCPI7_5(%rip) */ + QUAD $0xfffffc2b0d6f7ac5 // vmovdqu $-981(%rip), %xmm9 /* LCPI7_6(%rip) */ + QUAD $0xfffffc33156f7ac5 // vmovdqu $-973(%rip), %xmm10 /* LCPI7_7(%rip) */ + TESTB R14, R14 + JE LBB7_36 + JMP LBB7_32 -LBB7_34: - MOVWLZX 0(R10), CX - MOVW CX, 0(BX) - ADDQ $2, R10 - ADDQ $2, BX - ADDQ $-2, R14 - TESTQ R14, R14 - JNE LBB7_36 - JMP LBB7_37 +LBB7_35: + MOVQ AX, R11 + QUAD $0xfffffb6e1d6ffec5 // vmovdqu $-1170(%rip), %ymm3 /* LCPI7_0(%rip) */ + QUAD $0xfffffb86256ffec5 // vmovdqu $-1146(%rip), %ymm4 /* LCPI7_1(%rip) */ + QUAD $0xfffffb9e2d6ffec5 // vmovdqu $-1122(%rip), %ymm5 /* LCPI7_2(%rip) */ + QUAD $0xfffffbb6356ffec5 // vmovdqu $-1098(%rip), %ymm6 /* LCPI7_3(%rip) */ + TESTB R14, R14 + JNE LBB7_32 -LBB7_20: - MOVL 0(R10), CX - MOVL CX, 0(BX) - ADDQ $4, R10 - ADDQ $4, BX - ADDQ $-4, AX - CMPQ AX, $2 - JB LBB7_23 +LBB7_36: + TESTQ R11, R11 + JLE LBB7_64 + TESTQ SI, SI + MOVQ DX, R14 + JLE LBB7_65 -LBB7_22: - MOVWLZX 0(R10), CX - MOVW CX, 0(BX) - ADDQ $2, R10 - ADDQ $2, BX - ADDQ $-2, AX - TESTQ AX, AX - JNE LBB7_24 - JMP LBB7_25 +LBB7_38: + MOVBLZX 0(R13), AX + CMPQ AX, $62 + JA LBB7_40 + MOVQ $5764607797912141824, CX + BTQ AX, CX + JB LBB7_57 -LBB7_59: - CMPL AX, $8 - JB LBB7_60 - MOVQ CX, 0(BX) - LEAQ 8(R12), R10 - ADDQ $8, BX - LEAQ -8(AX), DI - CMPQ DI, $4 - JAE LBB7_63 - JMP LBB7_64 +LBB7_40: + CMPB AX, $-30 + JE LBB7_57 + INCQ R13 + MOVB AX, 0(R8) + CMPQ SI, $2 + LEAQ -1(SI), SI + JL LBB7_65 + INCQ R8 + CMPQ R11, $1 + LEAQ -1(R11), R11 + JG LBB7_38 + JMP LBB7_65 -LBB7_80: - SUBQ R11, R12 - SUBQ AX, R12 - TESTQ R12, R12 - JNS LBB7_84 - JMP LBB7_83 +LBB7_43: + MOVQ R13, AX + MOVQ R11, SI + CMPQ SI, $8 + JB LBB7_45 -LBB7_70: - MOVQ R12, AX - MOVQ R13, DI - CMPQ DI, $4 - JB LBB7_74 +LBB7_44: + MOVQ 0(AX), CX + MOVQ CX, 0(R8) + ADDQ $8, AX + ADDQ $8, R8 + ADDQ $-8, SI -LBB7_73: +LBB7_45: + CMPQ SI, $4 + JB LBB7_46 MOVL 0(AX), CX - MOVL CX, 0(BX) + MOVL CX, 0(R8) ADDQ $4, AX - ADDQ $4, BX - ADDQ $-4, DI + ADDQ $4, R8 + ADDQ $-4, SI + CMPQ SI, $2 + JAE LBB7_54 -LBB7_74: - CMPQ DI, $2 - JAE LBB7_75 - TESTQ DI, DI - JE LBB7_78 +LBB7_47: + TESTQ SI, SI + JE LBB7_49 -LBB7_77: +LBB7_48: MOVB 0(AX), AX - MOVB AX, 0(BX) + MOVB AX, 0(R8) -LBB7_78: - NOTQ R13 +LBB7_49: + SUBQ R12, R13 ADDQ R11, R13 + MOVQ R13, AX + TESTQ AX, AX + JNS LBB7_72 + JMP LBB7_92 + +LBB7_46: + CMPQ SI, $2 + JB LBB7_47 + +LBB7_54: + MOVWLZX 0(AX), CX + MOVW CX, 0(R8) + ADDQ $2, AX + ADDQ $2, R8 + ADDQ $-2, SI + TESTQ SI, SI + JNE LBB7_48 + JMP LBB7_49 + +LBB7_55: + CMPL R14, $8 + JB LBB7_66 + MOVQ AX, 0(R8) + LEAQ 8(R13), SI + ADDQ $8, R8 + LEAQ -8(R14), AX + CMPQ AX, $4 + JAE LBB7_67 + JMP LBB7_68 + +LBB7_57: SUBQ R12, R13 - MOVQ R13, R12 - TESTQ R12, R12 - JNS LBB7_84 - JMP LBB7_83 + MOVQ R13, AX + TESTQ AX, AX + JNS LBB7_72 + JMP LBB7_92 + +LBB7_58: + MOVQ R13, AX + MOVQ R11, SI + MOVQ DX, R14 + CMPQ SI, $4 + JB LBB7_60 + +LBB7_59: + MOVL 0(AX), CX + MOVL CX, 0(R8) + ADDQ $4, AX + ADDQ $4, R8 + ADDQ $-4, SI LBB7_60: - MOVQ R12, R10 - MOVQ AX, DI - CMPQ DI, $4 - JB LBB7_64 + CMPQ SI, $2 + JB LBB7_61 + MOVWLZX 0(AX), CX + MOVW CX, 0(R8) + ADDQ $2, AX + ADDQ $2, R8 + ADDQ $-2, SI + TESTQ SI, SI + JNE LBB7_62 + JMP LBB7_63 + +LBB7_61: + TESTQ SI, SI + JE LBB7_63 + +LBB7_62: + MOVB 0(AX), AX + MOVB AX, 0(R8) LBB7_63: - MOVL 0(R10), CX - MOVL CX, 0(BX) - ADDQ $4, R10 - ADDQ $4, BX - ADDQ $-4, DI + SUBQ R12, R11 + ADDQ R13, R11 + NOTQ R11 + MOVQ R11, AX + TESTQ AX, AX + JNS LBB7_72 + JMP LBB7_92 LBB7_64: - CMPQ DI, $2 - JAE LBB7_65 - TESTQ DI, DI - JE LBB7_68 + MOVQ DX, R14 + +LBB7_65: + SUBQ R12, R13 + NEGQ SI + SBBQ AX, AX + XORQ R13, AX + TESTQ AX, AX + JNS LBB7_72 + JMP LBB7_92 + +LBB7_66: + MOVQ R13, SI + MOVQ R14, AX + CMPQ AX, $4 + JB LBB7_68 LBB7_67: - MOVB 0(R10), CX - MOVB CX, 0(BX) + MOVL 0(SI), CX + MOVL CX, 0(R8) + ADDQ $4, SI + ADDQ $4, R8 + ADDQ $-4, AX LBB7_68: - SUBQ R11, R12 - ADDQ AX, R12 - TESTQ R12, R12 - JNS LBB7_84 - JMP LBB7_83 + CMPQ AX, $2 + JB LBB7_69 + MOVWLZX 0(SI), CX + MOVW CX, 0(R8) + ADDQ $2, SI + ADDQ $2, R8 + ADDQ $-2, AX + TESTQ AX, AX + JNE LBB7_70 + JMP LBB7_71 -LBB7_75: - MOVWLZX 0(AX), CX - MOVW CX, 0(BX) - ADDQ $2, AX - ADDQ $2, BX - ADDQ $-2, DI - TESTQ DI, DI - JNE LBB7_77 - JMP LBB7_78 +LBB7_69: + TESTQ AX, AX + JE LBB7_71 -LBB7_65: - MOVWLZX 0(R10), CX - MOVW CX, 0(BX) - ADDQ $2, R10 - ADDQ $2, BX - ADDQ $-2, DI - TESTQ DI, DI - JNE LBB7_67 - JMP LBB7_68 +LBB7_70: + MOVB 0(SI), AX + MOVB AX, 0(R8) -LBB7_57: - SUBQ R11, R12 - TESTQ R12, R12 - JS LBB7_83 - -LBB7_84: - ADDQ R12, R11 - ADDQ R12, R8 - SUBQ R12, SI - JLE LBB7_85 - SUBQ R12, R9 - MOVB 0(R11), CX +LBB7_71: + SUBQ R12, R13 + ADDQ R14, R13 + MOVQ R13, AX + MOVQ DX, R14 + TESTQ AX, AX + JS LBB7_92 + +LBB7_72: + ADDQ AX, R12 + ADDQ AX, R15 + SUBQ AX, R10 + JLE LBB7_93 + SUBQ AX, R9 + MOVB 0(R12), CX CMPB CX, $-30 - JE LBB7_88 - MOVQ R11, AX + JE LBB7_86 + MOVQ R12, AX -LBB7_92: - MOVBLZX CX, DX - SHLQ $4, DX - MOVQ 0(DX)(R15*1), DI - MOVLQSX DI, BX - SUBQ BX, R9 - JL LBB7_93 - SHLQ $32, DI - LEAQ 0(DX)(R15*1), R11 - ADDQ $8, R11 - MOVQ $12884901889, CX - CMPQ DI, CX - JL LBB7_97 - MOVL 0(R11), CX - MOVL CX, 0(R8) - LEAQ 0(DX)(R15*1), R11 - ADDQ $12, R11 - LEAQ 4(R8), R10 - LEAQ -4(BX), DX - CMPQ DX, $2 - JGE LBB7_100 - JMP LBB7_101 +LBB7_75: + MOVBLZX CX, CX + SHLQ $4, CX + MOVQ 0(CX)(R14*1), DX + MOVLQSX DX, SI + SUBQ SI, R9 + JL LBB7_95 + SHLQ $32, DX + LEAQ 8(CX)(R14*1), BX + CMPQ DX, DI + JL LBB7_78 + MOVL 0(BX), DX + MOVL DX, 0(R15) + LEAQ 12(CX)(R14*1), BX + LEAQ 4(R15), DX + LEAQ -4(SI), CX + CMPQ CX, $2 + JGE LBB7_79 + JMP LBB7_80 -LBB7_97: - MOVQ R8, R10 - MOVQ BX, DX - CMPQ DX, $2 - JL LBB7_101 - -LBB7_100: - MOVWLZX 0(R11), DI - MOVW DI, 0(R10) - ADDQ $2, R11 - ADDQ $2, R10 - ADDQ $-2, DX +LBB7_78: + MOVQ R15, DX + MOVQ SI, CX + CMPQ CX, $2 + JL LBB7_80 -LBB7_101: - TESTQ DX, DX - JLE LBB7_103 - MOVB 0(R11), CX - MOVB CX, 0(R10) +LBB7_79: + MOVQ DI, R8 + MOVWLZX 0(BX), DI + MOVW DI, 0(DX) + MOVQ R8, DI + ADDQ $2, BX + ADDQ $2, DX + ADDQ $-2, CX -LBB7_103: - ADDQ BX, R8 +LBB7_80: + TESTQ CX, CX + JLE LBB7_82 + MOVB 0(BX), CX + MOVB CX, 0(DX) -LBB7_104: - ADDQ $1, AX - LEAQ -1(SI), CX - MOVQ AX, R11 - CMPQ SI, $1 - MOVQ CX, SI +LBB7_82: + ADDQ SI, R15 + +LBB7_83: + INCQ AX + MOVQ AX, R12 + CMPQ R10, $1 + LEAQ -1(R10), R10 JG LBB7_2 - JMP LBB7_105 - -LBB7_88: - CMPQ SI, $3 - JL LBB7_94 - CMPB 1(R11), $-128 - JNE LBB7_94 - MOVB 2(R11), CX + JMP LBB7_94 + +LBB7_86: + CMPQ R10, $3 + JL LBB7_90 + CMPB 1(R12), $-128 + JNE LBB7_90 + MOVB 2(R12), CX MOVL CX, AX ANDB $-2, AX CMPB AX, $-88 - JNE LBB7_94 - LEAQ 2(R11), AX - ADDQ $-2, SI - JMP LBB7_92 + JNE LBB7_90 + LEAQ 2(R12), AX + ADDQ $-2, R10 + JMP LBB7_75 -LBB7_94: +LBB7_90: TESTQ R9, R9 - JLE LBB7_3 - MOVB $-30, 0(R8) - ADDQ $1, R8 - ADDQ $-1, R9 - MOVQ R11, AX - JMP LBB7_104 + JLE LBB7_96 + MOVB $-30, 0(R15) + INCQ R15 + DECQ R9 + MOVQ R12, AX + JMP LBB7_83 -LBB7_83: - SUBQ -56(BP), R8 - MOVQ R12, AX +LBB7_92: + SUBQ -56(BP), R15 NOTQ AX - ADDQ R8, AX + ADDQ AX, R15 MOVQ -64(BP), CX - MOVQ AX, 0(CX) - MOVQ -48(BP), AX - SUBQ R11, AX - ADDQ R12, AX - JMP LBB7_106 + MOVQ R15, 0(CX) + SUBQ -48(BP), R12 + ADDQ AX, R12 + NOTQ R12 + JMP LBB7_97 -LBB7_85: - MOVQ R11, AX +LBB7_93: + MOVQ R12, AX -LBB7_105: - SUBQ -56(BP), R8 +LBB7_94: + SUBQ -56(BP), R15 MOVQ -64(BP), CX - MOVQ R8, 0(CX) + MOVQ R15, 0(CX) SUBQ -48(BP), AX - JMP LBB7_106 + JMP LBB7_98 -LBB7_93: - SUBQ -56(BP), R8 +LBB7_95: + SUBQ -56(BP), R15 MOVQ -64(BP), AX - MOVQ R8, 0(AX) + MOVQ R15, 0(AX) -LBB7_3: - NOTQ R11 - ADDQ -48(BP), R11 - MOVQ R11, AX +LBB7_96: + NOTQ R12 + ADDQ -48(BP), R12 + +LBB7_97: + MOVQ R12, AX -LBB7_106: +LBB7_98: ADDQ $24, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 @@ -3796,7 +3728,7 @@ LBB8_5: SHLQ CX, DI MOVL AX, CX SHLQ $4, CX - LONG $0x783d8d4c; WORD $0x0036; BYTE $0x00 // leaq $13944(%rip), %r15 /* _POW10_M128_TAB(%rip) */ + LONG $0x513d8d4c; WORD $0x003b; BYTE $0x00 // leaq $15185(%rip), %r15 /* _POW10_M128_TAB(%rip) */ MOVQ DI, AX MULQ 8(CX)(R15*1) MOVQ AX, R11 @@ -3822,35 +3754,35 @@ LBB8_5: JE LBB8_1 LBB8_11: - MOVQ R14, AX - SHRQ $63, AX - LEAL 9(AX), CX + MOVQ R14, DI + SHRQ $63, DI + LEAL 9(DI), CX SHRQ CX, R14 ORQ R11, DX JNE LBB8_14 - MOVL R14, CX - ANDL $3, CX - CMPL CX, $1 + MOVL R14, AX + ANDL $3, AX + CMPL AX, $1 JE LBB8_1 LBB8_14: - LONG $0x526ace69; WORD $0x0003 // imull $217706, %esi, %ecx - SARL $16, CX - ADDL $1087, CX - MOVLQSX CX, SI - MOVL R14, DX - ANDL $1, DX - ADDQ R14, DX - MOVQ $126100789566373888, CX - ANDQ DX, CX - NOTQ R10 - ADDQ SI, R10 - ADDQ AX, R10 - CMPQ CX, $1 - SBBQ $-1, R10 - LEAQ -1(R10), AX - CMPQ AX, $2045 - JBE LBB8_16 + LONG $0x526ac669; WORD $0x0003 // imull $217706, %esi, %eax + SARL $16, AX + ADDL $1087, AX + WORD $0x9848 // cltq + SUBQ R10, AX + XORQ $1, DI + SUBQ DI, AX + MOVL R14, DX + ANDL $1, DX + ADDQ R14, DX + MOVQ $126100789566373888, CX + ANDQ DX, CX + CMPQ CX, $1 + SBBQ $-1, AX + LEAQ -1(AX), SI + CMPQ SI, $2045 + JBE LBB8_16 LBB8_1: XORL AX, AX @@ -3867,15 +3799,15 @@ LBB8_16: MOVB $2, CX SBBB $0, CX SHRQ CX, DX - SHLQ $52, R10 - MOVQ $4503599627370495, AX - ANDQ DX, AX - ORQ R10, AX - MOVQ $-9223372036854775808, CX + SHLQ $52, AX + MOVQ $4503599627370495, CX + ANDQ DX, CX ORQ AX, CX + MOVQ $-9223372036854775808, AX + ORQ CX, AX CMPL R9, $-1 - LONG $0xc8450f48 // cmovneq %rax, %rcx - MOVQ CX, 0(R8) + LONG $0xc1450f48 // cmovneq %rcx, %rax + MOVQ AX, 0(R8) MOVB $1, AX JMP LBB8_17 @@ -3903,217 +3835,284 @@ _decimal_to_f64: WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx BYTE $0x50 // pushq %rax - MOVQ SI, BX - MOVQ DI, R15 - MOVQ $4503599627370496, R13 + MOVQ SI, R13 + MOVQ DI, R12 + MOVQ $4503599627370496, BX CMPL 16(DI), $0 JE LBB9_4 MOVQ $9218868437227405312, R14 - MOVL 20(R15), AX - XORL R12, R12 + MOVL 20(R12), AX + XORL R15, R15 CMPL AX, $310 - JG LBB9_69 + JG LBB9_84 CMPL AX, $-330 JGE LBB9_5 XORL R14, R14 - JMP LBB9_69 + JMP LBB9_84 LBB9_4: XORL R14, R14 - XORL R12, R12 - JMP LBB9_69 + XORL R15, R15 + JMP LBB9_84 LBB9_5: TESTL AX, AX - MOVQ BX, -48(BP) - JLE LBB9_12 - XORL R12, R12 - LONG $0x6c358d4c; WORD $0x0060; BYTE $0x00 // leaq $24684(%rip), %r14 /* _POW_TAB(%rip) */ - JMP LBB9_8 - -LBB9_10: - MOVL AX, AX - MOVL 0(R14)(AX*4), BX - CMPL 16(R15), $0 - JE LBB9_7 + MOVQ R13, -48(BP) + JLE LBB9_20 + XORL R15, R15 + LONG $0x402d8d4c; WORD $0x0065; BYTE $0x00 // leaq $25920(%rip), %r13 /* _POW_TAB(%rip) */ + JMP LBB9_9 -LBB9_11: - MOVQ R15, DI +LBB9_7: + NEGL BX + MOVQ R12, DI MOVL BX, SI - LONG $0x003359e8; BYTE $0x00 // callq _right_shift + LONG $0x00385de8; BYTE $0x00 // callq _right_shift -LBB9_7: - ADDL BX, R12 - MOVL 20(R15), AX +LBB9_8: + ADDL R14, R15 + MOVL 20(R12), AX TESTL AX, AX - JLE LBB9_12 + JLE LBB9_20 -LBB9_8: +LBB9_9: + MOVL $27, R14 CMPL AX, $8 - JLE LBB9_10 - MOVL $27, BX - CMPL 16(R15), $0 - JNE LBB9_11 + JG LBB9_11 + MOVL AX, AX + MOVL 0(R13)(AX*4), R14 + +LBB9_11: + TESTL R14, R14 + JE LBB9_8 + CMPL 16(R12), $0 + JE LBB9_8 + MOVL R14, BX + NEGL BX + TESTL R14, R14 + JS LBB9_16 + CMPL R14, $61 + JL LBB9_7 + +LBB9_15: + MOVQ R12, DI + MOVL $60, SI + LONG $0x003815e8; BYTE $0x00 // callq _right_shift + LEAL 60(BX), AX + CMPL BX, $-120 + MOVL AX, BX + JL LBB9_15 JMP LBB9_7 -LBB9_12: - LONG $0x2e358d4c; WORD $0x0060; BYTE $0x00 // leaq $24622(%rip), %r14 /* _POW_TAB(%rip) */ - JMP LBB9_14 +LBB9_16: + CMPL R14, $-61 + JG LBB9_18 + +LBB9_17: + MOVQ R12, DI + MOVL $60, SI + LONG $0x003687e8; BYTE $0x00 // callq _left_shift + LEAL -60(BX), SI + CMPL BX, $120 + MOVL SI, BX + JG LBB9_17 + JMP LBB9_19 LBB9_18: - MOVL $27, BX - CMPL 16(R15), $0 - JE LBB9_13 + MOVL BX, SI + +LBB9_19: + MOVQ R12, DI + LONG $0x003671e8; BYTE $0x00 // callq _left_shift + JMP LBB9_8 LBB9_20: - MOVQ R15, DI - MOVL BX, SI - LONG $0x00318ce8; BYTE $0x00 // callq _left_shift - MOVL 20(R15), AX + LONG $0xac358d4c; WORD $0x0064; BYTE $0x00 // leaq $25772(%rip), %r14 /* _POW_TAB(%rip) */ + JMP LBB9_23 + +LBB9_21: + MOVQ R12, DI + LONG $0x00365ee8; BYTE $0x00 // callq _left_shift -LBB9_13: - SUBL BX, R12 +LBB9_22: + SUBL R13, R15 + MOVL 20(R12), AX -LBB9_14: +LBB9_23: TESTL AX, AX - JS LBB9_17 - JNE LBB9_21 - MOVQ 0(R15), CX + JS LBB9_26 + JNE LBB9_37 + MOVQ 0(R12), CX CMPB 0(CX), $53 - JL LBB9_19 - JMP LBB9_21 + JL LBB9_27 + JMP LBB9_37 -LBB9_17: +LBB9_26: + MOVL $27, R13 CMPL AX, $-8 - JL LBB9_18 + JL LBB9_28 -LBB9_19: - MOVL AX, CX - NEGL CX - MOVLQSX CX, CX - MOVL 0(R14)(CX*4), BX - CMPL 16(R15), $0 - JNE LBB9_20 - JMP LBB9_13 +LBB9_27: + NEGL AX + WORD $0x9848 // cltq + MOVL 0(R14)(AX*4), R13 -LBB9_21: - CMPL R12, $-1022 - JG LBB9_27 - CMPL 16(R15), $0 - MOVQ -48(BP), BX - JE LBB9_29 - CMPL R12, $-1082 - JG LBB9_30 - ADDL $961, R12 +LBB9_28: + TESTL R13, R13 + JE LBB9_22 + CMPL 16(R12), $0 + JE LBB9_22 + TESTL R13, R13 + JLE LBB9_34 + MOVL R13, SI + CMPL R13, $61 + JL LBB9_21 + MOVL R13, BX + +LBB9_33: + MOVQ R12, DI + MOVL $60, SI + LONG $0x003603e8; BYTE $0x00 // callq _left_shift + LEAL -60(BX), SI + CMPL BX, $120 + MOVL SI, BX + JG LBB9_33 + JMP LBB9_21 -LBB9_25: - MOVQ R15, DI +LBB9_34: + MOVL R13, BX + CMPL R13, $-61 + JG LBB9_36 + +LBB9_35: + MOVQ R12, DI + MOVL $60, SI + LONG $0x003750e8; BYTE $0x00 // callq _right_shift + LEAL 60(BX), AX + CMPL BX, $-120 + MOVL AX, BX + JL LBB9_35 + +LBB9_36: + NEGL BX + MOVQ R12, DI + MOVL BX, SI + LONG $0x00373ae8; BYTE $0x00 // callq _right_shift + JMP LBB9_22 + +LBB9_37: + CMPL R15, $-1022 + JG LBB9_43 + CMPL 16(R12), $0 + MOVQ -48(BP), R13 + MOVQ $4503599627370496, BX + JE LBB9_45 + CMPL R15, $-1082 + JG LBB9_46 + ADDL $961, R15 + +LBB9_41: + MOVQ R12, DI MOVL $60, SI - LONG $0x0032bbe8; BYTE $0x00 // callq _right_shift - ADDL $60, R12 - CMPL R12, $-120 - JL LBB9_25 - ADDL $60, R12 - JMP LBB9_31 + LONG $0x0036f9e8; BYTE $0x00 // callq _right_shift + ADDL $60, R15 + CMPL R15, $-120 + JL LBB9_41 + ADDL $60, R15 + JMP LBB9_47 -LBB9_27: - CMPL R12, $1024 - MOVQ -48(BP), BX - JG LBB9_66 - ADDL $-1, R12 - MOVL R12, R14 - JMP LBB9_32 +LBB9_43: + CMPL R15, $1024 + MOVQ -48(BP), R13 + MOVQ $4503599627370496, BX + JG LBB9_81 + DECL R15 + MOVL R15, R14 + JMP LBB9_48 -LBB9_29: +LBB9_45: MOVL $-1022, R14 - JMP LBB9_34 + JMP LBB9_50 -LBB9_30: - ADDL $1021, R12 +LBB9_46: + ADDL $1021, R15 -LBB9_31: - NEGL R12 - MOVQ R15, DI - MOVL R12, SI - LONG $0x003274e8; BYTE $0x00 // callq _right_shift +LBB9_47: + NEGL R15 + MOVQ R12, DI + MOVL R15, SI + LONG $0x0036a9e8; BYTE $0x00 // callq _right_shift MOVL $-1022, R14 -LBB9_32: - CMPL 16(R15), $0 - JE LBB9_34 - MOVQ R15, DI +LBB9_48: + CMPL 16(R12), $0 + JE LBB9_50 + MOVQ R12, DI MOVL $53, SI - LONG $0x0030cae8; BYTE $0x00 // callq _left_shift + LONG $0x00351fe8; BYTE $0x00 // callq _left_shift -LBB9_34: - MOVL 20(R15), AX - MOVQ $-1, R12 - CMPL AX, $20 - JG LBB9_68 - TESTL AX, AX - JLE LBB9_40 - MOVL 16(R15), DX - XORL SI, SI - TESTL DX, DX - WORD $0x480f; BYTE $0xd6 // cmovsl %esi, %edx - LEAQ -1(AX), R9 - CMPQ R9, DX - LONG $0xca430f44 // cmovael %edx, %r9d - LEAL 1(R9), R8 - XORL R12, R12 +LBB9_50: + MOVLQSX 20(R12), R10 + MOVQ $-1, R15 + CMPQ R10, $20 + JG LBB9_83 + MOVL R10, CX + TESTL R10, R10 + JLE LBB9_55 + MOVLQSX 16(R12), SI + XORL DX, DX + XORL R15, R15 -LBB9_37: +LBB9_53: CMPQ DX, SI - JE LBB9_41 - LEAQ 0(R12)(R12*4), DI - MOVQ 0(R15), CX - MOVBQSX 0(CX)(SI*1), CX - LEAQ 0(CX)(DI*2), R12 - ADDQ $-48, R12 - ADDQ $1, SI - CMPQ AX, SI - JNE LBB9_37 - MOVL R8, R9 - JMP LBB9_41 - -LBB9_40: - XORL R9, R9 - XORL R12, R12 + JGE LBB9_56 + LEAQ 0(R15)(R15*4), AX + MOVQ 0(R12), DI + MOVBQSX 0(DI)(DX*1), DI + LEAQ -48(DI)(AX*2), R15 + INCQ DX + CMPQ CX, DX + JNE LBB9_53 + JMP LBB9_56 + +LBB9_55: + XORL DX, DX + XORL R15, R15 -LBB9_41: - CMPL AX, R9 - JLE LBB9_54 - MOVL AX, DX - SUBL R9, DX - CMPL DX, $16 - JB LBB9_52 - MOVL DX, R8 - QUAD $0xfffffdc0056ffac5 // vmovdqu $-576(%rip), %xmm0 /* LCPI9_0(%rip) */ - LONG $0x22f9c3c4; WORD $0x00c4 // vpinsrq $0, %r12, %xmm0, %xmm0 +LBB9_56: + CMPL CX, DX + JLE LBB9_69 + MOVL R10, R9 + SUBL DX, R9 + CMPL R9, $16 + JB LBB9_67 + MOVL R9, R8 + QUAD $0xfffffd09056ffac5 // vmovdqu $-759(%rip), %xmm0 /* LCPI9_0(%rip) */ + LONG $0x22f9c3c4; WORD $0x00c7 // vpinsrq $0, %r15, %xmm0, %xmm0 ANDL $-16, R8 - QUAD $0xfffdac05027de3c4; WORD $0xf0ff // vpblendd $240, $-596(%rip), %ymm0, %ymm0 /* LCPI9_0(%rip) */ - LEAL -16(R8), CX - MOVL CX, DI - SHRL $4, DI - ADDL $1, DI - MOVL DI, SI + QUAD $0xfffcf505027de3c4; WORD $0xf0ff // vpblendd $240, $-779(%rip), %ymm0, %ymm0 /* LCPI9_0(%rip) */ + LEAL -16(R8), DI + MOVL DI, AX + SHRL $4, AX + INCL AX + MOVL AX, SI ANDL $3, SI - CMPL CX, $48 - JAE LBB9_45 - QUAD $0xfffdad15597de2c4; BYTE $0xff // vpbroadcastq $-595(%rip), %ymm2 /* LCPI9_1(%rip) */ + CMPL DI, $48 + JAE LBB9_60 + QUAD $0xfffcf715597de2c4; BYTE $0xff // vpbroadcastq $-777(%rip), %ymm2 /* LCPI9_1(%rip) */ LONG $0xda6ffdc5 // vmovdqa %ymm2, %ymm3 LONG $0xca6ffdc5 // vmovdqa %ymm2, %ymm1 - JMP LBB9_47 + JMP LBB9_62 -LBB9_45: - ANDL $-4, DI - NEGL DI - QUAD $0xfffd9515597de2c4; BYTE $0xff // vpbroadcastq $-619(%rip), %ymm2 /* LCPI9_1(%rip) */ - QUAD $0xfffd9425597de2c4; BYTE $0xff // vpbroadcastq $-620(%rip), %ymm4 /* LCPI9_2(%rip) */ +LBB9_60: + MOVL SI, DI + SUBL AX, DI + QUAD $0xfffce015597de2c4; BYTE $0xff // vpbroadcastq $-800(%rip), %ymm2 /* LCPI9_1(%rip) */ + QUAD $0xfffcdf25597de2c4; BYTE $0xff // vpbroadcastq $-801(%rip), %ymm4 /* LCPI9_2(%rip) */ LONG $0xda6ffdc5 // vmovdqa %ymm2, %ymm3 LONG $0xca6ffdc5 // vmovdqa %ymm2, %ymm1 -LBB9_46: +LBB9_61: LONG $0xecf4fdc5 // vpmuludq %ymm4, %ymm0, %ymm5 LONG $0xd073fdc5; BYTE $0x20 // vpsrlq $32, %ymm0, %ymm0 LONG $0xc4f4fdc5 // vpmuludq %ymm4, %ymm0, %ymm0 @@ -4135,15 +4134,15 @@ LBB9_46: LONG $0xf173f5c5; BYTE $0x20 // vpsllq $32, %ymm1, %ymm1 LONG $0xc9d4d5c5 // vpaddq %ymm1, %ymm5, %ymm1 ADDL $4, DI - JNE LBB9_46 + JNE LBB9_61 -LBB9_47: +LBB9_62: TESTL SI, SI - JE LBB9_50 + JE LBB9_65 NEGL SI - QUAD $0xfffd2825597de2c4; BYTE $0xff // vpbroadcastq $-728(%rip), %ymm4 /* LCPI9_3(%rip) */ + QUAD $0xfffc7325597de2c4; BYTE $0xff // vpbroadcastq $-909(%rip), %ymm4 /* LCPI9_3(%rip) */ -LBB9_49: +LBB9_64: LONG $0xecf4fdc5 // vpmuludq %ymm4, %ymm0, %ymm5 LONG $0xd073fdc5; BYTE $0x20 // vpsrlq $32, %ymm0, %ymm0 LONG $0xc4f4fdc5 // vpmuludq %ymm4, %ymm0, %ymm0 @@ -4165,9 +4164,9 @@ LBB9_49: LONG $0xf173f5c5; BYTE $0x20 // vpsllq $32, %ymm1, %ymm1 LONG $0xc9d4d5c5 // vpaddq %ymm1, %ymm5, %ymm1 INCL SI - JNE LBB9_49 + JNE LBB9_64 -LBB9_50: +LBB9_65: LONG $0xd273ddc5; BYTE $0x20 // vpsrlq $32, %ymm2, %ymm4 LONG $0xe0f4ddc5 // vpmuludq %ymm0, %ymm4, %ymm4 LONG $0xd073d5c5; BYTE $0x20 // vpsrlq $32, %ymm0, %ymm5 @@ -4201,92 +4200,91 @@ LBB9_50: LONG $0xf273e9c5; BYTE $0x20 // vpsllq $32, %xmm2, %xmm2 LONG $0xc1f4f9c5 // vpmuludq %xmm1, %xmm0, %xmm0 LONG $0xc2d4f9c5 // vpaddq %xmm2, %xmm0, %xmm0 - LONG $0xc870f9c5; BYTE $0xee // vpshufd $238, %xmm0, %xmm1 + LONG $0xc870f9c5; BYTE $0x4e // vpshufd $78, %xmm0, %xmm1 LONG $0xd073e9c5; BYTE $0x20 // vpsrlq $32, %xmm0, %xmm2 LONG $0xd1f4e9c5 // vpmuludq %xmm1, %xmm2, %xmm2 - LONG $0xd870f9c5; BYTE $0xff // vpshufd $255, %xmm0, %xmm3 + LONG $0xd873e1c5; BYTE $0x0c // vpsrldq $12, %xmm0, %xmm3 LONG $0xdbf4f9c5 // vpmuludq %xmm3, %xmm0, %xmm3 LONG $0xd2d4e1c5 // vpaddq %xmm2, %xmm3, %xmm2 LONG $0xf273e9c5; BYTE $0x20 // vpsllq $32, %xmm2, %xmm2 LONG $0xc1f4f9c5 // vpmuludq %xmm1, %xmm0, %xmm0 LONG $0xc2d4f9c5 // vpaddq %xmm2, %xmm0, %xmm0 - LONG $0x7ef9c1c4; BYTE $0xc4 // vmovq %xmm0, %r12 - CMPL DX, R8 - JE LBB9_54 - ADDL R8, R9 + LONG $0x7ef9c1c4; BYTE $0xc7 // vmovq %xmm0, %r15 + CMPL R9, R8 + JE LBB9_69 + ADDL R8, DX -LBB9_52: - MOVL AX, DX - SUBL R9, DX +LBB9_67: + MOVL CX, SI + SUBL DX, SI -LBB9_53: - ADDQ R12, R12 - LEAQ 0(R12)(R12*4), R12 - ADDL $-1, DX - JNE LBB9_53 +LBB9_68: + ADDQ R15, R15 + LEAQ 0(R15)(R15*4), R15 + DECL SI + JNE LBB9_68 -LBB9_54: - TESTL AX, AX - JS LBB9_62 - MOVL 16(R15), SI - CMPL SI, AX - JLE LBB9_62 - MOVQ 0(R15), DX - MOVB 0(DX)(AX*1), CX - LEAL 1(AX), DI - CMPL DI, SI - JNE LBB9_63 - CMPB CX, $53 - JNE LBB9_63 - CMPL 28(R15), $0 - SETNE CX - JNE LBB9_64 - TESTL AX, AX - JLE LBB9_64 - ADDL $-1, AX - MOVB 0(DX)(AX*1), CX - ANDB $1, CX - JMP LBB9_64 +LBB9_69: + TESTL CX, CX + JS LBB9_77 + MOVL 16(R12), AX + CMPL AX, CX + JLE LBB9_77 + MOVQ 0(R12), SI + MOVB 0(SI)(CX*1), DX + LEAL 1(R10), DI + CMPL DI, AX + JNE LBB9_78 + CMPB DX, $53 + JNE LBB9_78 + CMPL 28(R12), $0 + SETNE DX + JNE LBB9_79 + TESTL CX, CX + JLE LBB9_79 + MOVB -1(R10)(SI*1), DX + ANDB $1, DX + JMP LBB9_79 -LBB9_62: - XORL CX, CX +LBB9_77: + XORL DX, DX -LBB9_64: - MOVBLZX CX, AX - ADDQ AX, R12 +LBB9_79: + MOVBLZX DX, AX + ADDQ AX, R15 MOVQ $9007199254740992, AX - CMPQ R12, AX - JNE LBB9_68 + CMPQ R15, AX + JNE LBB9_83 CMPL R14, $1022 - JLE LBB9_67 + JLE LBB9_82 -LBB9_66: - XORL R12, R12 +LBB9_81: + XORL R15, R15 MOVQ $9218868437227405312, R14 - JMP LBB9_69 + JMP LBB9_84 -LBB9_67: - ADDL $1, R14 - MOVQ R13, R12 +LBB9_82: + INCL R14 + MOVQ BX, R15 -LBB9_68: - MOVQ R12, AX - ANDQ R13, AX +LBB9_83: + MOVQ R15, AX + ANDQ BX, AX ADDL $1023, R14 ANDL $2047, R14 SHLQ $52, R14 TESTQ AX, AX LONG $0xf0440f4c // cmoveq %rax, %r14 -LBB9_69: - ADDQ $-1, R13 - ANDQ R12, R13 - ORQ R14, R13 +LBB9_84: + DECQ BX + ANDQ R15, BX + ORQ R14, BX MOVQ $-9223372036854775808, AX - ORQ R13, AX - CMPL 24(R15), $0 - LONG $0xc5440f49 // cmoveq %r13, %rax - MOVQ AX, 0(BX) + ORQ BX, AX + CMPL 24(R12), $0 + LONG $0xc3440f48 // cmoveq %rbx, %rax + MOVQ AX, 0(R13) XORL AX, AX ADDQ $8, SP BYTE $0x5b // popq %rbx @@ -4298,10 +4296,10 @@ LBB9_69: WORD $0xf8c5; BYTE $0x77 // vzeroupper RET -LBB9_63: - CMPB CX, $52 - SETGT CX - JMP LBB9_64 +LBB9_78: + CMPB DX, $52 + SETGT DX + JMP LBB9_79 _atof_native: BYTE $0x55 // pushq %rbp @@ -4323,7 +4321,7 @@ _atof_native: LBB10_4: MOVQ -32(BP), CX MOVB $0, 0(CX)(AX*1) - ADDQ $1, AX + INCQ AX CMPQ -24(BP), AX JA LBB10_4 @@ -4331,729 +4329,308 @@ LBB10_5: LONG $0xc057f8c5 // vxorps %xmm0, %xmm0, %xmm0 LONG $0x4511f8c5; BYTE $0xf0 // vmovups %xmm0, $-16(%rbp) CMPB 0(DI), $45 - JNE LBB10_8 + JNE LBB10_6 MOVL $1, -8(BP) MOVL $1, AX CMPQ AX, SI JL LBB10_9 -LBB10_7: +LBB10_41: MOVL $0, -12(BP) JMP LBB10_40 -LBB10_8: +LBB10_6: XORL AX, AX CMPQ AX, SI - JGE LBB10_7 + JGE LBB10_41 LBB10_9: MOVB $1, R11 XORL R9, R9 XORL R10, R10 XORL R8, R8 - JMP LBB10_13 - -LBB10_21: - MOVL $1, -4(BP) - -LBB10_12: - ADDQ $1, AX - CMPQ AX, SI - SETLT R11 - CMPQ SI, AX - JE LBB10_23 + JMP LBB10_10 LBB10_13: - MOVBLZX 0(DI)(AX*1), CX - LEAL -48(CX), DX - CMPB DX, $9 - JA LBB10_18 - CMPB CX, $48 - JNE LBB10_20 - TESTL R10, R10 - JE LBB10_22 - MOVLQSX R9, R11 - CMPQ -24(BP), R11 - JA LBB10_10 - JMP LBB10_11 - -LBB10_18: - CMPB CX, $46 - JNE LBB10_30 - MOVL R10, -12(BP) - MOVL $1, R8 - JMP LBB10_12 - -LBB10_20: - MOVLQSX R10, R11 - CMPQ -24(BP), R11 - JBE LBB10_21 - -LBB10_10: - MOVQ -32(BP), DX - MOVB CX, 0(DX)(R11*1) - MOVL -16(BP), R9 - ADDL $1, R9 - MOVL R9, -16(BP) - -LBB10_11: - MOVL R9, R10 - JMP LBB10_12 - -LBB10_22: - ADDL $-1, -12(BP) + DECL -12(BP) XORL R10, R10 - JMP LBB10_12 - -LBB10_23: - MOVL SI, CX - MOVQ SI, AX - TESTL R8, R8 - JNE LBB10_25 - -LBB10_24: - MOVL R9, -12(BP) - -LBB10_25: - TESTB $1, R11 - JE LBB10_40 - MOVB 0(DI)(CX*1), CX - ORB $32, CX - CMPB CX, $101 - JNE LBB10_40 - MOVL AX, DX - MOVB 1(DI)(DX*1), CX - CMPB CX, $45 - JE LBB10_31 - MOVL $1, R8 - CMPB CX, $43 - JNE LBB10_33 - ADDL $2, AX - JMP LBB10_32 - -LBB10_30: - MOVQ AX, CX - TESTL R8, R8 - JNE LBB10_25 - JMP LBB10_24 - -LBB10_31: - ADDL $2, AX - MOVL $-1, R8 - -LBB10_32: - MOVL AX, DX - MOVLQSX DX, DX - XORL AX, AX - CMPQ DX, SI - JL LBB10_34 - JMP LBB10_39 - -LBB10_33: - ADDQ $1, DX - MOVLQSX DX, DX - XORL AX, AX - CMPQ DX, SI - JGE LBB10_39 - -LBB10_34: - XORL AX, AX - -LBB10_35: - MOVBLSX 0(DI)(DX*1), CX - CMPL CX, $48 - JL LBB10_39 - CMPL AX, $9999 - JG LBB10_39 - CMPB CX, $57 - JG LBB10_39 - LEAL 0(AX)(AX*4), AX - LEAL 0(CX)(AX*2), AX - ADDL $-48, AX - ADDQ $1, DX - CMPQ SI, DX - JNE LBB10_35 - -LBB10_39: - IMULL R8, AX - ADDL AX, -12(BP) - -LBB10_40: - LEAQ -32(BP), DI - LEAQ -40(BP), SI - LONG $0xfff980e8; BYTE $0xff // callq _decimal_to_f64 - LONG $0x4510fbc5; BYTE $0xd8 // vmovsd $-40(%rbp), %xmm0 - ADDQ $48, SP - BYTE $0x5d // popq %rbp - RET - -_value: - BYTE $0x55 // pushq %rbp - WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - WORD $0x5741 // pushq %r15 - WORD $0x5641 // pushq %r14 - WORD $0x5541 // pushq %r13 - WORD $0x5441 // pushq %r12 - BYTE $0x53 // pushq %rbx - SUBQ $40, SP - MOVQ R8, R12 - MOVQ CX, R14 - MOVQ SI, BX - MOVQ DI, R15 - MOVQ DX, -48(BP) - MOVQ DI, -80(BP) - MOVQ SI, -72(BP) - LEAQ -80(BP), DI - LEAQ -48(BP), SI - LONG $0x0005e4e8; BYTE $0x00 // callq _advance_ns - MOVBLSX AX, AX - CMPL AX, $125 - JA LBB11_11 - LONG $0x190d8d48; WORD $0x0003; BYTE $0x00 // leaq $793(%rip), %rcx /* LJTI11_0(%rip) */ - MOVLQSX 0(CX)(AX*4), AX - ADDQ CX, AX - JMP AX - -LBB11_2: - MOVQ R14, -56(BP) - MOVQ -48(BP), R14 - LEAQ -1(R14), R13 - MOVQ R13, -48(BP) - TESTB $2, R12 - JNE LBB11_4 - LEAQ -80(BP), DI - LEAQ -48(BP), SI - MOVQ -56(BP), DX - LONG $0x000b9be8; BYTE $0x00 // callq _vnumber - MOVQ -48(BP), BX - JMP LBB11_50 - -LBB11_4: - XORL AX, AX - CMPB 0(R15)(R13*1), $45 - LEAQ 0(R15)(R13*1), R12 - SETEQ AX - ADDQ AX, R12 - SUBQ AX, BX - JE LBB11_45 - CMPQ R13, BX - JAE LBB11_7 - MOVB 0(R12), AX - ADDB $-48, AX - CMPB AX, $10 - JAE LBB11_47 - -LBB11_7: - MOVQ R12, DI - MOVQ BX, SI - LONG $0x0022a9e8; BYTE $0x00 // callq _do_skip_number - TESTQ AX, AX - JS LBB11_46 - ADDQ AX, R12 - MOVQ R12, BX - SUBQ R15, BX - MOVQ BX, -48(BP) - TESTQ R14, R14 - JLE LBB11_49 - MOVQ -56(BP), AX - MOVQ $8, 0(AX) - MOVQ R13, 24(AX) - JMP LBB11_50 - -LBB11_10: - MOVQ $1, 0(R14) - MOVQ -48(BP), BX - JMP LBB11_50 - -LBB11_11: - MOVQ $-2, 0(R14) - MOVQ -48(BP), BX - ADDQ $-1, BX - JMP LBB11_50 - -LBB11_12: - MOVQ $-1, -64(BP) - MOVQ -48(BP), R15 - LEAQ -80(BP), DI - LEAQ -64(BP), DX - MOVQ R15, SI - LONG $0x00080be8; BYTE $0x00 // callq _advance_string - TESTQ AX, AX - JS LBB11_31 - MOVQ AX, -48(BP) - MOVQ R15, 16(R14) - MOVQ -64(BP), CX - CMPQ CX, AX - MOVQ $-1, DX - LONG $0xd14c0f48 // cmovlq %rcx, %rdx - MOVQ DX, 24(R14) - MOVL $7, CX - MOVQ CX, 0(R14) - MOVQ AX, BX - JMP LBB11_50 - -LBB11_14: - TESTL R12, R12 - MOVQ $-2, AX - MOVL $11, CX - JMP LBB11_28 - -LBB11_15: - TESTL R12, R12 - MOVQ $-2, AX - MOVL $10, CX - JMP LBB11_28 - -LBB11_16: - MOVQ $5, 0(R14) - MOVQ -48(BP), BX - JMP LBB11_50 - -LBB11_17: - TESTL R12, R12 - MOVQ $-2, AX - MOVL $12, CX - JMP LBB11_28 - -LBB11_18: - MOVQ -48(BP), AX - LEAQ -4(BX), CX - CMPQ AX, CX - JA LBB11_22 - MOVL 0(R15)(AX*1), DX - CMPL DX, $1702063201 - JNE LBB11_32 - ADDQ $4, AX - MOVQ AX, -48(BP) - MOVL $4, CX - JMP LBB11_40 - -LBB11_21: - MOVQ -48(BP), AX - LEAQ -3(BX), CX - CMPQ AX, CX - JA LBB11_22 - MOVL -1(R15)(AX*1), DX - CMPL DX, $1819047278 - JNE LBB11_36 - ADDQ $3, AX - MOVQ AX, -48(BP) - MOVL $2, CX - JMP LBB11_40 - -LBB11_23: - MOVQ -48(BP), AX - LEAQ -3(BX), CX - CMPQ AX, CX - JBE LBB11_24 - -LBB11_22: - MOVQ BX, -48(BP) - MOVQ $-1, CX - MOVQ CX, 0(R14) - JMP LBB11_50 - -LBB11_26: - MOVQ $6, 0(R14) - MOVQ -48(BP), BX - JMP LBB11_50 - -LBB11_27: - TESTL R12, R12 - MOVQ $-2, AX - MOVL $13, CX - -LBB11_28: - LONG $0xc8490f48 // cmovnsq %rax, %rcx - MOVQ CX, 0(R14) - SARL $31, R12 - NOTL R12 - MOVLQSX R12, BX - ADDQ -48(BP), BX - JMP LBB11_50 - -LBB11_24: - MOVL -1(R15)(AX*1), DX - CMPL DX, $1702195828 - JNE LBB11_41 - ADDQ $3, AX - MOVQ AX, -48(BP) - MOVL $3, CX - JMP LBB11_40 -LBB11_31: - MOVQ BX, -48(BP) - MOVQ AX, 0(R14) - JMP LBB11_50 +LBB10_22: + INCQ AX + CMPQ AX, SI + SETLT R11 + CMPQ SI, AX + JE LBB10_23 -LBB11_32: - MOVQ $-2, CX - CMPB DX, $97 - JNE LBB11_40 - MOVL $1702063201, DX +LBB10_10: + MOVBLZX 0(DI)(AX*1), CX + LEAL -48(CX), DX + CMPB DX, $9 + JA LBB10_19 + TESTL R10, R10 + JNE LBB10_14 + CMPB CX, $48 + JE LBB10_13 + +LBB10_14: + MOVLQSX R9, R10 + CMPQ -24(BP), R10 + JBE LBB10_16 + MOVQ -32(BP), DX + MOVB CX, 0(DX)(R10*1) + MOVL -16(BP), R9 + INCL R9 + MOVL R9, -16(BP) + MOVL R9, R10 + JMP LBB10_22 + +LBB10_19: + CMPB CX, $46 + JNE LBB10_20 + MOVL R10, -12(BP) + MOVL $1, R8 + JMP LBB10_22 -LBB11_34: - SHRL $8, DX - MOVBLSX 1(R15)(AX*1), SI - ADDQ $1, AX - MOVBLZX DX, DI - CMPL DI, SI - JE LBB11_34 - JMP LBB11_39 +LBB10_16: + CMPB CX, $48 + JNE LBB10_18 + MOVL R9, R10 + JMP LBB10_22 -LBB11_36: - ADDQ $-1, AX - MOVQ AX, -48(BP) - MOVQ $-2, CX - CMPB DX, $110 - JNE LBB11_40 - MOVL $1819047278, DX +LBB10_18: + MOVL $1, -4(BP) + MOVL R9, R10 + JMP LBB10_22 -LBB11_38: - SHRL $8, DX - MOVBLSX 1(R15)(AX*1), SI - ADDQ $1, AX - MOVBLZX DX, DI - CMPL DI, SI - JE LBB11_38 - JMP LBB11_39 - -LBB11_41: - ADDQ $-1, AX - MOVQ AX, -48(BP) - MOVQ $-2, CX - CMPB DX, $116 - JNE LBB11_40 - MOVL $1702195828, DX +LBB10_23: + MOVL SI, CX + MOVQ SI, AX + TESTL R8, R8 + JNE LBB10_26 -LBB11_43: - SHRL $8, DX - MOVBLSX 1(R15)(AX*1), SI - ADDQ $1, AX - MOVBLZX DX, DI - CMPL DI, SI - JE LBB11_43 +LBB10_25: + MOVL R9, -12(BP) -LBB11_39: - MOVQ AX, -48(BP) +LBB10_26: + TESTB $1, R11 + JE LBB10_40 + MOVB 0(DI)(CX*1), CX + ORB $32, CX + CMPB CX, $101 + JNE LBB10_40 + MOVL AX, DX + MOVB 1(DI)(DX*1), CX + CMPB CX, $45 + JE LBB10_32 + MOVL $1, R8 + CMPB CX, $43 + JNE LBB10_30 + ADDL $2, AX + JMP LBB10_33 -LBB11_40: - MOVQ AX, BX - MOVQ CX, 0(R14) - JMP LBB11_50 +LBB10_20: + MOVQ AX, CX + TESTL R8, R8 + JNE LBB10_26 + JMP LBB10_25 -LBB11_45: - SUBQ R15, R12 - MOVQ R12, -48(BP) - MOVQ $-1, R13 - JMP LBB11_48 +LBB10_32: + ADDL $2, AX + MOVL $-1, R8 -LBB11_46: - NOTQ AX - ADDQ AX, R12 +LBB10_33: + MOVL AX, DX + MOVLQSX DX, DX + XORL R9, R9 + CMPQ DX, SI + JL LBB10_35 + JMP LBB10_39 -LBB11_47: - SUBQ R15, R12 - MOVQ R12, -48(BP) - MOVQ $-2, R13 +LBB10_30: + INCQ DX + MOVLQSX DX, DX + XORL R9, R9 + CMPQ DX, SI + JGE LBB10_39 -LBB11_48: - MOVQ R12, BX +LBB10_35: + XORL R9, R9 -LBB11_49: - MOVQ -56(BP), AX - MOVQ R13, 0(AX) +LBB10_36: + CMPL R9, $9999 + JG LBB10_39 + MOVBLZX 0(DI)(DX*1), CX + LEAL -48(CX), AX + CMPB AX, $9 + JA LBB10_39 + LEAL 0(R9)(R9*4), AX + LEAL -48(CX)(AX*2), R9 + INCQ DX + CMPQ SI, DX + JNE LBB10_36 -LBB11_50: - MOVQ BX, AX - ADDQ $40, SP - BYTE $0x5b // popq %rbx - WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 - WORD $0x5e41 // popq %r14 - WORD $0x5f41 // popq %r15 - BYTE $0x5d // popq %rbp +LBB10_39: + IMULL R8, R9 + ADDL R9, -12(BP) + +LBB10_40: + LEAQ -32(BP), DI + LEAQ -40(BP), SI + LONG $0xfff8cbe8; BYTE $0xff // callq _decimal_to_f64 + LONG $0x4510fbc5; BYTE $0xd8 // vmovsd $-40(%rbp), %xmm0 + ADDQ $48, SP + BYTE $0x5d // popq %rbp RET -// .set L11_0_set_10, LBB11_10-LJTI11_0 -// .set L11_0_set_11, LBB11_11-LJTI11_0 -// .set L11_0_set_12, LBB11_12-LJTI11_0 -// .set L11_0_set_14, LBB11_14-LJTI11_0 -// .set L11_0_set_2, LBB11_2-LJTI11_0 -// .set L11_0_set_15, LBB11_15-LJTI11_0 -// .set L11_0_set_16, LBB11_16-LJTI11_0 -// .set L11_0_set_17, LBB11_17-LJTI11_0 -// .set L11_0_set_18, LBB11_18-LJTI11_0 -// .set L11_0_set_21, LBB11_21-LJTI11_0 -// .set L11_0_set_23, LBB11_23-LJTI11_0 -// .set L11_0_set_26, LBB11_26-LJTI11_0 -// .set L11_0_set_27, LBB11_27-LJTI11_0 -LJTI11_0: - LONG $0xfffffd8b // .long L11_0_set_10 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffdaf // .long L11_0_set_12 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffe02 // .long L11_0_set_14 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffcf0 // .long L11_0_set_2 - LONG $0xfffffe16 // .long L11_0_set_15 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffe2a // .long L11_0_set_16 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffe3a // .long L11_0_set_17 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffe4e // .long L11_0_set_18 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffe7d // .long L11_0_set_21 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffead // .long L11_0_set_23 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffecd // .long L11_0_set_26 - LONG $0xfffffd9b // .long L11_0_set_11 - LONG $0xfffffedd // .long L11_0_set_27 - -LCPI12_0: +LCPI11_0: QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' -LCPI12_1: +LCPI11_1: QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' -LCPI12_2: +LCPI11_2: QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' -LCPI12_3: +LCPI11_3: QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' -LCPI12_4: +LCPI11_4: QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' -LCPI12_5: +LCPI11_5: QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' -LCPI12_6: +LCPI11_6: QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' -LCPI12_7: +LCPI11_7: QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' -_advance_ns: +_value: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + WORD $0x5741 // pushq %r15 + WORD $0x5641 // pushq %r14 + WORD $0x5541 // pushq %r13 + WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - MOVQ 0(SI), R11 - MOVQ 0(DI), R9 - MOVQ 8(DI), R8 - CMPQ R11, R8 - JAE LBB12_4 - MOVB 0(R9)(R11*1), AX + SUBQ $40, SP + MOVQ CX, R10 + MOVQ DX, R12 + MOVQ SI, R13 + MOVQ DI, R15 + MOVQ DX, -48(BP) + MOVQ DI, -80(BP) + MOVQ SI, -72(BP) + CMPQ DX, SI + JAE LBB11_5 + MOVB 0(R15)(R12*1), AX CMPB AX, $13 - JE LBB12_4 + JE LBB11_5 CMPB AX, $32 - JE LBB12_4 - ADDB $-9, AX - CMPB AX, $1 - JA LBB12_48 - -LBB12_4: - LEAQ 1(R11), AX - CMPQ AX, R8 - JAE LBB12_9 - MOVB 0(R9)(AX*1), CX - CMPB CX, $13 - JE LBB12_9 - CMPB CX, $32 - JE LBB12_9 - ADDB $-9, CX + JE LBB11_5 + LEAL -9(AX), CX CMPB CX, $1 - JA LBB12_8 + JBE LBB11_5 + MOVQ R12, BX + JMP LBB11_48 -LBB12_9: - LEAQ 2(R11), AX - CMPQ AX, R8 - JAE LBB12_14 - MOVB 0(R9)(AX*1), CX - CMPB CX, $13 - JE LBB12_14 - CMPB CX, $32 - JE LBB12_14 - ADDB $-9, CX +LBB11_5: + LEAQ 1(R12), BX + CMPQ BX, R13 + JAE LBB11_9 + MOVB 0(R15)(BX*1), AX + CMPB AX, $13 + JE LBB11_9 + CMPB AX, $32 + JE LBB11_9 + LEAL -9(AX), CX CMPB CX, $1 - JA LBB12_8 + JA LBB11_48 -LBB12_14: - LEAQ 3(R11), AX - CMPQ AX, R8 - JAE LBB12_19 - MOVB 0(R9)(AX*1), CX - CMPB CX, $13 - JE LBB12_19 - CMPB CX, $32 - JE LBB12_19 - ADDB $-9, CX +LBB11_9: + LEAQ 2(R12), BX + CMPQ BX, R13 + JAE LBB11_13 + MOVB 0(R15)(BX*1), AX + CMPB AX, $13 + JE LBB11_13 + CMPB AX, $32 + JE LBB11_13 + LEAL -9(AX), CX CMPB CX, $1 - JBE LBB12_19 + JA LBB11_48 -LBB12_8: - MOVQ AX, R11 - JMP LBB12_48 +LBB11_13: + LEAQ 3(R12), BX + CMPQ BX, R13 + JAE LBB11_17 + MOVB 0(R15)(BX*1), AX + CMPB AX, $13 + JE LBB11_17 + CMPB AX, $32 + JE LBB11_17 + LEAL -9(AX), CX + CMPB CX, $1 + JA LBB11_48 -LBB12_19: - ADDQ $4, R11 - CMPQ R8, R11 - JBE LBB12_44 - LEAQ 0(R9)(R11*1), BX - MOVQ R8, CX - SUBQ R11, CX - JE LBB12_28 - MOVL BX, AX - ANDL $31, AX - TESTQ AX, AX - JE LBB12_28 - MOVL $5, DX - SUBQ R8, DX - MOVQ $4294977024, CX - -LBB12_23: - MOVBLSX 0(R9)(R11*1), AX - CMPL AX, $32 - JA LBB12_46 - BTQ AX, CX - JAE LBB12_46 - LEAQ 0(DX)(R11*1), BX - LEAQ 1(R11), AX - CMPQ BX, $4 - JE LBB12_27 - LEAL 0(R9)(R11*1), BX - ADDL $1, BX +LBB11_17: + LEAQ 4(R12), CX + CMPQ CX, R13 + JAE LBB11_43 + LEAQ 0(R15)(CX*1), BX + MOVQ R13, AX + SUBQ CX, AX + JE LBB11_26 + MOVL BX, CX + ANDL $31, CX + TESTQ CX, CX + JE LBB11_26 + LEAQ 0(R15)(R12*1), CX + MOVQ R13, AX + SUBQ R12, AX + LEAQ -5(AX), DX + XORL DI, DI + MOVQ $4294977024, R9 + +LBB11_21: + MOVBLSX 4(CX)(DI*1), SI + CMPL SI, $32 + JA LBB11_45 + BTQ SI, R9 + JAE LBB11_45 + LEAQ 1(DI), SI + CMPQ DX, DI + JE LBB11_25 + LEAQ 5(CX)(DI*1), BX ANDL $31, BX - MOVQ AX, R11 + MOVQ SI, DI TESTQ BX, BX - JNE LBB12_23 + JNE LBB11_21 -LBB12_27: - LEAQ 0(R9)(AX*1), BX - MOVQ R8, CX - SUBQ AX, CX +LBB11_25: + LEAQ 4(SI)(CX*1), BX + SUBQ SI, AX + ADDQ $-4, AX + +LBB11_26: + CMPQ AX, $32 + JB LBB11_31 + LEAQ -32(AX), CX + MOVQ CX, DX + ANDQ $-32, DX + LEAQ 32(DX)(BX*1), DX + ANDL $31, CX + QUAD $0xfffffdf4056ffec5 // vmovdqu $-524(%rip), %ymm0 /* LCPI11_0(%rip) */ + QUAD $0xfffffe0c0d6ffec5 // vmovdqu $-500(%rip), %ymm1 /* LCPI11_1(%rip) */ + QUAD $0xfffffe24156ffec5 // vmovdqu $-476(%rip), %ymm2 /* LCPI11_2(%rip) */ + QUAD $0xfffffe3c1d6ffec5 // vmovdqu $-452(%rip), %ymm3 /* LCPI11_3(%rip) */ -LBB12_28: - CMPQ CX, $32 - JB LBB12_32 - MOVQ R9, AX - SUBQ BX, AX - QUAD $0xfffffe26056ffec5 // vmovdqu $-474(%rip), %ymm0 /* LCPI12_0(%rip) */ - QUAD $0xfffffe3e0d6ffec5 // vmovdqu $-450(%rip), %ymm1 /* LCPI12_1(%rip) */ - QUAD $0xfffffe56156ffec5 // vmovdqu $-426(%rip), %ymm2 /* LCPI12_2(%rip) */ - QUAD $0xfffffe6e1d6ffec5 // vmovdqu $-402(%rip), %ymm3 /* LCPI12_3(%rip) */ - -LBB12_30: +LBB11_28: LONG $0x236ffdc5 // vmovdqa (%rbx), %ymm4 LONG $0xe874ddc5 // vpcmpeqb %ymm0, %ymm4, %ymm5 LONG $0xf174ddc5 // vpcmpeqb %ymm1, %ymm4, %ymm6 @@ -5062,118 +4639,519 @@ LBB12_30: LONG $0xe374ddc5 // vpcmpeqb %ymm3, %ymm4, %ymm4 LONG $0xe6ebddc5 // vpor %ymm6, %ymm4, %ymm4 LONG $0xe5ebddc5 // vpor %ymm5, %ymm4, %ymm4 - LONG $0xd4d7fdc5 // vpmovmskb %ymm4, %edx - CMPL DX, $-1 - JNE LBB12_45 + LONG $0xf4d7fdc5 // vpmovmskb %ymm4, %esi + CMPL SI, $-1 + JNE LBB11_44 ADDQ $32, BX - ADDQ $-32, CX ADDQ $-32, AX - CMPQ CX, $31 - JA LBB12_30 + CMPQ AX, $31 + JA LBB11_28 + MOVQ CX, AX + MOVQ DX, BX + +LBB11_31: + WORD $0xf8c5; BYTE $0x77 // vzeroupper + CMPQ AX, $16 + JB LBB11_36 + LEAQ -16(AX), CX + MOVQ CX, DX + ANDQ $-16, DX + LEAQ 16(DX)(BX*1), DX + ANDL $15, CX + QUAD $0xfffffdf7056ffac5 // vmovdqu $-521(%rip), %xmm0 /* LCPI11_4(%rip) */ + QUAD $0xfffffdff0d6ffac5 // vmovdqu $-513(%rip), %xmm1 /* LCPI11_5(%rip) */ + QUAD $0xfffffe07156ffac5 // vmovdqu $-505(%rip), %xmm2 /* LCPI11_6(%rip) */ + QUAD $0xfffffe0f1d6ffac5 // vmovdqu $-497(%rip), %xmm3 /* LCPI11_7(%rip) */ + +LBB11_33: + LONG $0x236ff9c5 // vmovdqa (%rbx), %xmm4 + LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 + LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 + LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 + LONG $0xf274d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm6 + LONG $0xe374d9c5 // vpcmpeqb %xmm3, %xmm4, %xmm4 + LONG $0xe6ebd9c5 // vpor %xmm6, %xmm4, %xmm4 + LONG $0xe5ebd9c5 // vpor %xmm5, %xmm4, %xmm4 + LONG $0xf4d7f9c5 // vpmovmskb %xmm4, %esi + CMPW SI, $-1 + JNE LBB11_96 + ADDQ $16, BX + ADDQ $-16, AX + CMPQ AX, $15 + JA LBB11_33 + MOVQ CX, AX + MOVQ DX, BX + +LBB11_36: + TESTQ AX, AX + JE LBB11_42 + LEAQ 0(BX)(AX*1), CX + INCQ BX + MOVQ $4294977024, DX + +LBB11_38: + MOVBLSX -1(BX), SI + CMPL SI, $32 + JA LBB11_101 + BTQ SI, DX + JAE LBB11_101 + DECQ AX + INCQ BX + TESTQ AX, AX + JNE LBB11_38 + MOVQ CX, BX + +LBB11_42: + SUBQ R15, BX + JMP LBB11_46 + +LBB11_43: + MOVQ CX, -48(BP) + MOVQ CX, R12 + JMP LBB11_52 + +LBB11_44: + WORD $0xf8c5; BYTE $0x77 // vzeroupper + SUBQ R15, BX + NOTL SI + MOVLQSX SI, AX + BSFQ AX, AX + ADDQ AX, BX + JMP LBB11_46 + +LBB11_45: + ADDQ DI, CX + MOVQ R15, AX + NOTQ AX + LEAQ 5(AX)(CX*1), BX + +LBB11_46: + CMPQ BX, R13 + JAE LBB11_52 + MOVB 0(R15)(BX*1), AX + +LBB11_48: + LEAQ 1(BX), R12 + MOVQ R12, -48(BP) + MOVBLSX AX, CX + CMPL CX, $125 + JA LBB11_61 + LEAQ 0(R15)(BX*1), R14 + LONG $0xf8158d48; WORD $0x0002; BYTE $0x00 // leaq $760(%rip), %rdx /* LJTI11_0(%rip) */ + MOVLQSX 0(DX)(CX*4), CX + ADDQ DX, CX + JMP CX + +LBB11_50: + MOVQ BX, -48(BP) + TESTB $2, R8 + JNE LBB11_55 + LEAQ -80(BP), DI + LEAQ -48(BP), SI + MOVQ R10, DX + LONG $0x00083ae8; BYTE $0x00 // callq _vnumber + MOVQ -48(BP), BX + JMP LBB11_54 + +LBB11_52: + MOVQ $1, 0(R10) + +LBB11_53: + MOVQ R12, BX + +LBB11_54: + MOVQ BX, AX + ADDQ $40, SP + BYTE $0x5b // popq %rbx + WORD $0x5c41 // popq %r12 + WORD $0x5d41 // popq %r13 + WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 + BYTE $0x5d // popq %rbp + RET + +LBB11_55: + XORL CX, CX + CMPB AX, $45 + SETEQ CX + ADDQ CX, R14 + SUBQ CX, R13 + JE LBB11_97 + MOVQ R10, -56(BP) + CMPQ BX, R13 + JAE LBB11_58 + MOVB 0(R14), AX + ADDB $-48, AX + CMPB AX, $10 + JAE LBB11_99 + +LBB11_58: + MOVQ R14, DI + MOVQ R13, SI + LONG $0x002381e8; BYTE $0x00 // callq _do_skip_number + TESTQ AX, AX + JS LBB11_98 + ADDQ AX, R14 + SUBQ R15, R14 + MOVQ R14, -48(BP) + TESTQ R12, R12 + MOVQ -56(BP), R10 + JLE LBB11_100 + MOVQ $8, 0(R10) + MOVQ BX, 24(R10) + MOVQ R14, BX + JMP LBB11_54 + +LBB11_61: + MOVQ $-2, 0(R10) + JMP LBB11_54 + +LBB11_62: + MOVQ R10, R14 + MOVQ $-1, -64(BP) + LEAQ -80(BP), DI + LEAQ -64(BP), DX + MOVQ R12, SI + LONG $0x0004d2e8; BYTE $0x00 // callq _advance_string + MOVQ AX, BX + TESTQ AX, AX + JS LBB11_81 + MOVQ BX, -48(BP) + MOVQ R12, 16(R14) + MOVQ -64(BP), AX + CMPQ AX, BX + MOVQ $-1, CX + LONG $0xc84c0f48 // cmovlq %rax, %rcx + MOVQ CX, 24(R14) + MOVL $7, AX + MOVQ AX, 0(R14) + JMP LBB11_54 + +LBB11_64: + XORL AX, AX + TESTL R8, R8 + SETPL AX + MOVL $11, CX + JMP LBB11_80 + +LBB11_65: + XORL AX, AX + TESTL R8, R8 + SETPL AX + MOVL $10, CX + JMP LBB11_80 + +LBB11_66: + MOVQ $5, 0(R10) + JMP LBB11_53 + +LBB11_67: + XORL AX, AX + TESTL R8, R8 + SETPL AX + MOVL $12, CX + JMP LBB11_80 + +LBB11_68: + LEAQ -4(R13), AX + CMPQ BX, AX + JAE LBB11_77 + MOVL 0(R15)(R12*1), CX + CMPL CX, $1702063201 + JNE LBB11_82 + ADDQ $5, BX + MOVQ BX, -48(BP) + MOVL $4, AX + JMP LBB11_94 + +LBB11_71: + LEAQ -3(R13), AX + CMPQ BX, AX + JAE LBB11_77 + MOVL 0(R14), CX + CMPL CX, $1819047278 + JNE LBB11_87 + ADDQ $4, BX + MOVQ BX, -48(BP) + MOVL $2, AX + JMP LBB11_94 + +LBB11_74: + LEAQ -3(R13), AX + CMPQ BX, AX + JAE LBB11_77 + MOVL 0(R14), CX + CMPL CX, $1702195828 + JNE LBB11_90 + ADDQ $4, BX + MOVQ BX, -48(BP) + MOVL $3, AX + JMP LBB11_94 -LBB12_32: - WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ CX, $16 - JB LBB12_36 - MOVQ R9, R10 - SUBQ BX, R10 - QUAD $0xfffffe38056ffac5 // vmovdqu $-456(%rip), %xmm0 /* LCPI12_4(%rip) */ - QUAD $0xfffffe400d6ffac5 // vmovdqu $-448(%rip), %xmm1 /* LCPI12_5(%rip) */ - QUAD $0xfffffe48156ffac5 // vmovdqu $-440(%rip), %xmm2 /* LCPI12_6(%rip) */ - QUAD $0xfffffe501d6ffac5 // vmovdqu $-432(%rip), %xmm3 /* LCPI12_7(%rip) */ +LBB11_77: + MOVQ R13, -48(BP) + MOVQ $-1, AX + JMP LBB11_95 -LBB12_34: - LONG $0x236ff9c5 // vmovdqa (%rbx), %xmm4 - LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 - LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 - LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 - LONG $0xf274d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm6 - LONG $0xe374d9c5 // vpcmpeqb %xmm3, %xmm4, %xmm4 - LONG $0xe6ebd9c5 // vpor %xmm6, %xmm4, %xmm4 - LONG $0xe5ebd9c5 // vpor %xmm5, %xmm4, %xmm4 - LONG $0xc4d7f9c5 // vpmovmskb %xmm4, %eax - CMPW AX, $-1 - JNE LBB12_49 - ADDQ $16, BX - ADDQ $-16, CX - ADDQ $-16, R10 - CMPQ CX, $15 - JA LBB12_34 +LBB11_78: + MOVQ $6, 0(R10) + JMP LBB11_53 -LBB12_36: - TESTQ CX, CX - JE LBB12_42 - LEAQ 0(BX)(CX*1), R10 +LBB11_79: XORL AX, AX - MOVQ $4294977024, R11 - -LBB12_38: - MOVBLSX 0(BX)(AX*1), DX - CMPL DX, $32 - JA LBB12_51 - BTQ DX, R11 - JAE LBB12_51 - ADDQ $1, AX - CMPQ CX, AX - JNE LBB12_38 - MOVQ R10, BX + TESTL R8, R8 + SETPL AX + MOVL $13, CX -LBB12_42: - SUBQ R9, BX - MOVQ BX, R11 - CMPQ R11, R8 - JB LBB12_47 - JMP LBB12_50 +LBB11_80: + MOVQ $-2, DX + LONG $0xd1480f48 // cmovsq %rcx, %rdx + MOVQ DX, 0(R10) + SUBQ AX, R12 + JMP LBB11_53 -LBB12_44: - MOVQ R11, 0(SI) - JMP LBB12_50 +LBB11_81: + MOVQ R13, -48(BP) + MOVQ BX, 0(R14) + MOVQ R13, BX + JMP LBB11_54 -LBB12_45: - WORD $0xf8c5; BYTE $0x77 // vzeroupper - NOTL DX - MOVLQSX DX, CX - BSFQ CX, R11 - SUBQ AX, R11 +LBB11_82: + MOVQ $-2, AX + CMPB CX, $97 + JNE LBB11_86 + MOVL $1702063201, CX + +LBB11_84: + SHRL $8, CX + MOVBLSX 1(R15)(R12*1), DX + INCQ R12 + MOVBLZX CX, SI + CMPL SI, DX + JE LBB11_84 + MOVQ R12, -48(BP) + +LBB11_86: + MOVQ R12, R13 + JMP LBB11_95 + +LBB11_87: + MOVQ BX, -48(BP) + MOVQ $-2, AX + CMPB CX, $110 + JNE LBB11_94 + MOVL $1819047278, CX -LBB12_46: - CMPQ R11, R8 - JAE LBB12_50 +LBB11_89: + SHRL $8, CX + MOVBLSX 1(R15)(BX*1), DX + INCQ BX + MOVBLZX CX, SI + CMPL SI, DX + JE LBB11_89 + JMP LBB11_93 + +LBB11_90: + MOVQ BX, -48(BP) + MOVQ $-2, AX + CMPB CX, $116 + JNE LBB11_94 + MOVL $1702195828, CX -LBB12_47: - MOVQ 0(DI), R9 +LBB11_92: + SHRL $8, CX + MOVBLSX 1(R15)(BX*1), DX + INCQ BX + MOVBLZX CX, SI + CMPL SI, DX + JE LBB11_92 -LBB12_48: - LEAQ 1(R11), AX - MOVQ AX, 0(SI) - MOVB 0(R9)(R11*1), AX - MOVBLSX AX, AX - BYTE $0x5b // popq %rbx - BYTE $0x5d // popq %rbp - RET +LBB11_93: + MOVQ BX, -48(BP) -LBB12_49: - MOVWLZX AX, AX +LBB11_94: + MOVQ BX, R13 + +LBB11_95: + MOVQ AX, 0(R10) + MOVQ R13, BX + JMP LBB11_54 + +LBB11_96: + MOVWLZX SI, AX + SUBQ R15, BX NOTL AX - BSFL AX, R11 - SUBQ R10, R11 - CMPQ R11, R8 - JB LBB12_47 + BSFL AX, AX + ADDQ AX, BX + JMP LBB11_46 -LBB12_50: - XORL AX, AX - MOVBLSX AX, AX - BYTE $0x5b // popq %rbx - BYTE $0x5d // popq %rbp - RET +LBB11_97: + SUBQ R15, R14 + MOVQ R14, -48(BP) + MOVQ $-1, BX + JMP LBB11_100 + +LBB11_98: + NOTQ AX + ADDQ AX, R14 + +LBB11_99: + SUBQ R15, R14 + MOVQ R14, -48(BP) + MOVQ $-2, BX + MOVQ -56(BP), R10 + +LBB11_100: + MOVQ BX, 0(R10) + MOVQ R14, BX + JMP LBB11_54 -LBB12_51: - SUBQ R9, BX +LBB11_101: + MOVQ R15, AX + NOTQ AX ADDQ AX, BX - MOVQ BX, R11 - CMPQ R11, R8 - JB LBB12_47 - JMP LBB12_50 + JMP LBB11_46 + +// .set L11_0_set_52, LBB11_52-LJTI11_0 +// .set L11_0_set_61, LBB11_61-LJTI11_0 +// .set L11_0_set_62, LBB11_62-LJTI11_0 +// .set L11_0_set_64, LBB11_64-LJTI11_0 +// .set L11_0_set_50, LBB11_50-LJTI11_0 +// .set L11_0_set_65, LBB11_65-LJTI11_0 +// .set L11_0_set_66, LBB11_66-LJTI11_0 +// .set L11_0_set_67, LBB11_67-LJTI11_0 +// .set L11_0_set_68, LBB11_68-LJTI11_0 +// .set L11_0_set_71, LBB11_71-LJTI11_0 +// .set L11_0_set_74, LBB11_74-LJTI11_0 +// .set L11_0_set_78, LBB11_78-LJTI11_0 +// .set L11_0_set_79, LBB11_79-LJTI11_0 +LJTI11_0: + LONG $0xfffffd31 // .long L11_0_set_52 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdba // .long L11_0_set_62 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffe0c // .long L11_0_set_64 + LONG $0xfffffd11 // .long L11_0_set_50 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffd11 // .long L11_0_set_50 + LONG $0xfffffd11 // .long L11_0_set_50 + LONG $0xfffffd11 // .long L11_0_set_50 + LONG $0xfffffd11 // .long L11_0_set_50 + LONG $0xfffffd11 // .long L11_0_set_50 + LONG $0xfffffd11 // .long L11_0_set_50 + LONG $0xfffffd11 // .long L11_0_set_50 + LONG $0xfffffd11 // .long L11_0_set_50 + LONG $0xfffffd11 // .long L11_0_set_50 + LONG $0xfffffd11 // .long L11_0_set_50 + LONG $0xfffffe1e // .long L11_0_set_65 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffe30 // .long L11_0_set_66 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffe3c // .long L11_0_set_67 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffe4e // .long L11_0_set_68 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffe79 // .long L11_0_set_71 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffea3 // .long L11_0_set_74 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffedd // .long L11_0_set_78 + LONG $0xfffffdb1 // .long L11_0_set_61 + LONG $0xfffffee9 // .long L11_0_set_79 _vstring: BYTE $0x55 // pushq %rbp @@ -5192,7 +5170,7 @@ _vstring: MOVQ R12, SI LONG $0x000080e8; BYTE $0x00 // callq _advance_string TESTQ AX, AX - JS LBB13_1 + JS LBB12_1 MOVQ AX, 0(BX) MOVQ R12, 16(R14) MOVQ -40(BP), CX @@ -5201,13 +5179,13 @@ _vstring: LONG $0xc14c0f48 // cmovlq %rcx, %rax MOVQ AX, 24(R14) MOVL $7, AX - JMP LBB13_3 + JMP LBB12_3 -LBB13_1: +LBB12_1: MOVQ 8(R15), CX MOVQ CX, 0(BX) -LBB13_3: +LBB12_3: MOVQ AX, 0(R14) ADDQ $16, SP BYTE $0x5b // popq %rbx @@ -5217,11 +5195,11 @@ LBB13_3: BYTE $0x5d // popq %rbp RET -LCPI14_0: +LCPI13_0: QUAD $0x2222222222222222; QUAD $0x2222222222222222 // .space 16, '""""""""""""""""' QUAD $0x2222222222222222; QUAD $0x2222222222222222 // .space 16, '""""""""""""""""' -LCPI14_1: +LCPI13_1: QUAD $0x5c5c5c5c5c5c5c5c; QUAD $0x5c5c5c5c5c5c5c5c // .space 16, '\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' QUAD $0x5c5c5c5c5c5c5c5c; QUAD $0x5c5c5c5c5c5c5c5c // .space 16, '\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' @@ -5233,90 +5211,98 @@ _advance_string: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - MOVQ 8(DI), R15 - SUBQ SI, R15 - JE LBB14_17 - MOVQ 0(DI), R9 + MOVQ 8(DI), R12 + SUBQ SI, R12 + JE LBB13_18 + MOVQ 0(DI), R8 + ADDQ R8, SI MOVQ $-1, 0(DX) - CMPQ R15, $64 - JB LBB14_18 - MOVQ SI, DI - NOTQ DI - MOVQ $-1, R8 - XORL R14, R14 - QUAD $0xffffff7a056ffec5 // vmovdqu $-134(%rip), %ymm0 /* LCPI14_0(%rip) */ - QUAD $0xffffff920d6ffec5 // vmovdqu $-110(%rip), %ymm1 /* LCPI14_1(%rip) */ + CMPQ R12, $64 + JB LBB13_19 + MOVL R12, R9 + ANDL $63, R9 + MOVQ $-1, R14 + XORL R15, R15 + QUAD $0xffffff76056ffec5 // vmovdqu $-138(%rip), %ymm0 /* LCPI13_0(%rip) */ + QUAD $0xffffff8e0d6ffec5 // vmovdqu $-114(%rip), %ymm1 /* LCPI13_1(%rip) */ MOVQ $-6148914691236517206, R10 MOVQ $6148914691236517205, R11 -LBB14_3: - LONG $0x6f7ec1c4; WORD $0x3114 // vmovdqu (%r9,%rsi), %ymm2 - LONG $0x6f7ec1c4; WORD $0x315c; BYTE $0x20 // vmovdqu $32(%r9,%rsi), %ymm3 - LONG $0xe074e5c5 // vpcmpeqb %ymm0, %ymm3, %ymm4 - LONG $0xdcd7fdc5 // vpmovmskb %ymm4, %ebx - LONG $0xe174edc5 // vpcmpeqb %ymm1, %ymm2, %ymm4 - LONG $0xc4d7fdc5 // vpmovmskb %ymm4, %eax - LONG $0xd974e5c5 // vpcmpeqb %ymm1, %ymm3, %ymm3 - LONG $0xcbd7fdc5 // vpmovmskb %ymm3, %ecx +LBB13_3: + LONG $0x166ffec5 // vmovdqu (%rsi), %ymm2 + LONG $0x5e6ffec5; BYTE $0x20 // vmovdqu $32(%rsi), %ymm3 + LONG $0xe074edc5 // vpcmpeqb %ymm0, %ymm2, %ymm4 + LONG $0xc4d7fdc5 // vpmovmskb %ymm4, %eax + LONG $0xe074e5c5 // vpcmpeqb %ymm0, %ymm3, %ymm4 + LONG $0xdcd7fdc5 // vpmovmskb %ymm4, %ebx + LONG $0xd174edc5 // vpcmpeqb %ymm1, %ymm2, %ymm2 + LONG $0xfad7fdc5 // vpmovmskb %ymm2, %edi + LONG $0xd174e5c5 // vpcmpeqb %ymm1, %ymm3, %ymm2 + LONG $0xcad7fdc5 // vpmovmskb %ymm2, %ecx SHLQ $32, BX SHLQ $32, CX - ORQ CX, AX - JE LBB14_5 - CMPQ R8, $-1 - JE LBB14_9 + ORQ CX, DI + JE LBB13_5 + CMPQ R14, $-1 + JE LBB13_8 -LBB14_5: - LONG $0xd074edc5 // vpcmpeqb %ymm0, %ymm2, %ymm2 - LONG $0xcad7fdc5 // vpmovmskb %ymm2, %ecx - ORQ CX, BX - MOVQ AX, CX - ORQ R14, CX - JNE LBB14_8 +LBB13_5: + ORQ AX, BX + MOVQ DI, AX + ORQ R15, AX + JNE LBB13_9 + +LBB13_6: TESTQ BX, BX - JNE LBB14_10 + JNE LBB13_10 -LBB14_7: - ADDQ $-64, R15 - ADDQ $-64, DI +LBB13_7: ADDQ $64, SI - CMPQ R15, $63 - JA LBB14_3 - JMP LBB14_12 + ADDQ $-64, R12 + CMPQ R12, $63 + JA LBB13_3 + JMP LBB13_13 -LBB14_8: - MOVQ R14, R13 - NOTQ R13 - ANDQ AX, R13 - MOVQ R13, R12 - ADDQ R13, R12 - ORQ R14, R12 - MOVQ R12, CX +LBB13_8: + MOVQ SI, CX + SUBQ R8, CX + BSFQ DI, R14 + ADDQ CX, R14 + MOVQ R14, 0(DX) + ORQ AX, BX + MOVQ DI, AX + ORQ R15, AX + JE LBB13_6 + +LBB13_9: + MOVQ R15, AX + NOTQ AX + ANDQ DI, AX + LEAQ 0(AX)(AX*1), R13 + ORQ R15, R13 + MOVQ R13, CX NOTQ CX - ANDQ AX, CX + ANDQ DI, CX ANDQ R10, CX - XORL R14, R14 - ADDQ R13, CX - SETCS R14 + XORL R15, R15 + ADDQ AX, CX + SETCS R15 ADDQ CX, CX XORQ R11, CX - ANDQ R12, CX + ANDQ R13, CX NOTQ CX ANDQ CX, BX TESTQ BX, BX - JE LBB14_7 - JMP LBB14_10 - -LBB14_9: - BSFQ AX, R8 - ADDQ SI, R8 - MOVQ R8, 0(DX) - JMP LBB14_5 + JE LBB13_7 -LBB14_10: +LBB13_10: BSFQ BX, AX - SUBQ DI, AX -LBB14_11: +LBB13_11: + SUBQ R8, SI + LEAQ 1(SI)(AX*1), AX + +LBB13_12: BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -5326,173 +5312,154 @@ LBB14_11: WORD $0xf8c5; BYTE $0x77 // vzeroupper RET -LBB14_12: - ADDQ R9, SI - CMPQ R15, $32 - JB LBB14_23 +LBB13_13: + MOVQ R9, R12 + CMPQ R12, $32 + JB LBB13_24 -LBB14_13: +LBB13_14: LONG $0x066ffec5 // vmovdqu (%rsi), %ymm0 - QUAD $0xfffffe820d74fdc5 // vpcmpeqb $-382(%rip), %ymm0, %ymm1 /* LCPI14_0(%rip) */ - LONG $0xf9d7fdc5 // vpmovmskb %ymm1, %edi - QUAD $0xfffffe960574fdc5 // vpcmpeqb $-362(%rip), %ymm0, %ymm0 /* LCPI14_1(%rip) */ - LONG $0xc0d7fdc5 // vpmovmskb %ymm0, %eax - TESTL AX, AX - JNE LBB14_19 - TESTQ R14, R14 - JNE LBB14_21 - XORL R14, R14 - TESTQ DI, DI - JE LBB14_22 + QUAD $0xfffffe760d74fdc5 // vpcmpeqb $-394(%rip), %ymm0, %ymm1 /* LCPI13_0(%rip) */ + LONG $0xc9d77dc5 // vpmovmskb %ymm1, %r9d + QUAD $0xfffffe8a0574fdc5 // vpcmpeqb $-374(%rip), %ymm0, %ymm0 /* LCPI13_1(%rip) */ + LONG $0xf8d7fdc5 // vpmovmskb %ymm0, %edi + TESTL DI, DI + JNE LBB13_20 + TESTQ R15, R15 + JNE LBB13_22 + XORL R15, R15 + TESTQ R9, R9 + JE LBB13_23 -LBB14_16: - BSFQ DI, AX - SUBQ R9, SI - ADDQ SI, AX - ADDQ $1, AX - JMP LBB14_11 +LBB13_17: + BSFQ R9, AX + JMP LBB13_11 -LBB14_18: - ADDQ R9, SI - MOVQ $-1, R8 - XORL R14, R14 - CMPQ R15, $32 - JAE LBB14_13 - JMP LBB14_23 +LBB13_19: + MOVQ $-1, R14 + XORL R15, R15 + CMPQ R12, $32 + JAE LBB13_14 + JMP LBB13_24 -LBB14_19: - CMPQ R8, $-1 - JNE LBB14_21 - MOVQ SI, CX - SUBQ R9, CX - BSFQ AX, R8 - ADDQ CX, R8 - MOVQ R8, 0(DX) +LBB13_20: + CMPQ R14, $-1 + JNE LBB13_22 + MOVQ SI, AX + SUBQ R8, AX + BSFQ DI, R14 + ADDQ AX, R14 + MOVQ R14, 0(DX) -LBB14_21: - MOVL R14, CX +LBB13_22: + MOVL R15, AX + NOTL AX + ANDL DI, AX + LEAL 0(AX)(AX*1), BX + ORL R15, BX + MOVL BX, CX NOTL CX - ANDL AX, CX - LEAL 0(CX)(CX*1), R10 - ORL R14, R10 - MOVL R10, BX - NOTL BX - ANDL AX, BX - ANDL $-1431655766, BX - XORL R14, R14 - ADDL CX, BX - SETCS R14 - ADDL BX, BX - XORL $1431655765, BX - ANDL R10, BX - NOTL BX - ANDL BX, DI - TESTQ DI, DI - JNE LBB14_16 + ANDL DI, CX + ANDL $-1431655766, CX + XORL R15, R15 + ADDL AX, CX + SETCS R15 + ADDL CX, CX + XORL $1431655765, CX + ANDL BX, CX + NOTL CX + ANDL CX, R9 + TESTQ R9, R9 + JNE LBB13_17 -LBB14_22: +LBB13_23: ADDQ $32, SI - ADDQ $-32, R15 + ADDQ $-32, R12 -LBB14_23: - TESTQ R14, R14 - JNE LBB14_38 +LBB13_24: TESTQ R15, R15 - JE LBB14_35 - -LBB14_25: - MOVQ R9, R10 - NEGQ R10 - MOVQ $-1, AX + JNE LBB13_35 + MOVQ $-1, AX + TESTQ R12, R12 + JE LBB13_12 -LBB14_26: - XORL DI, DI +LBB13_26: + MOVQ R8, R9 + NOTQ R9 -LBB14_27: - MOVBLZX 0(SI)(DI*1), BX +LBB13_27: + LEAQ 1(SI), DI + MOVBLZX 0(SI), BX CMPB BX, $34 - JE LBB14_34 + JE LBB13_34 + LEAQ -1(R12), R10 CMPB BX, $92 - JE LBB14_30 - ADDQ $1, DI - CMPQ R15, DI - JNE LBB14_27 - JMP LBB14_36 - -LBB14_30: - LEAQ -1(R15), CX - CMPQ CX, DI - JE LBB14_11 - CMPQ R8, $-1 - JNE LBB14_33 - LEAQ 0(R10)(SI*1), R8 - ADDQ DI, R8 - MOVQ R8, 0(DX) - -LBB14_33: - ADDQ DI, SI - ADDQ $2, SI - MOVQ R15, CX - SUBQ DI, CX - ADDQ $-2, CX - ADDQ $-2, R15 - CMPQ R15, DI - MOVQ CX, R15 - JNE LBB14_26 - JMP LBB14_11 - -LBB14_34: - ADDQ DI, SI - ADDQ $1, SI + JE LBB13_30 + MOVQ R10, R12 + MOVQ DI, SI + TESTQ R10, R10 + JNE LBB13_27 + JMP LBB13_12 -LBB14_35: - SUBQ R9, SI - MOVQ SI, AX - JMP LBB14_11 +LBB13_30: + TESTQ R10, R10 + JE LBB13_12 + CMPQ R14, $-1 + JNE LBB13_33 + ADDQ R9, DI + MOVQ DI, 0(DX) + MOVQ DI, R14 -LBB14_36: - MOVQ $-1, AX - CMPB BX, $34 - JNE LBB14_11 - ADDQ DI, SI - JMP LBB14_35 +LBB13_33: + ADDQ $2, SI + ADDQ $-2, R12 + MOVQ R12, R10 + TESTQ R10, R10 + JNE LBB13_27 + JMP LBB13_12 -LBB14_38: - TESTQ R15, R15 - JE LBB14_17 - CMPQ R8, $-1 - JNE LBB14_41 - MOVQ R9, R8 - NOTQ R8 - ADDQ SI, R8 - MOVQ R8, 0(DX) +LBB13_34: + SUBQ R8, DI + MOVQ DI, AX + JMP LBB13_12 -LBB14_41: - ADDQ $1, SI - ADDQ $-1, R15 - TESTQ R15, R15 - JNE LBB14_25 - JMP LBB14_35 +LBB13_35: + TESTQ R12, R12 + JE LBB13_18 + CMPQ R14, $-1 + JNE LBB13_38 + MOVQ R8, R14 + NOTQ R14 + ADDQ SI, R14 + MOVQ R14, 0(DX) + +LBB13_38: + INCQ SI + DECQ R12 + MOVQ $-1, AX + TESTQ R12, R12 + JNE LBB13_26 + JMP LBB13_12 -LBB14_17: +LBB13_18: MOVQ $-1, AX - JMP LBB14_11 + JMP LBB13_12 -LCPI15_0: +LCPI14_0: LONG $0x43300000 // .long 1127219200 LONG $0x45300000 // .long 1160773632 LONG $0x00000000 // .long 0 LONG $0x00000000 // .long 0 -LCPI15_1: - QUAD $0x4330000000000000 // .quad 0x4330000000000000 - QUAD $0x4530000000000000 // .quad 0x4530000000000000 +LCPI14_1: + QUAD $0x4330000000000000 // .quad 4841369599423283200 + QUAD $0x4530000000000000 // .quad 4985484787499139072 -LCPI15_2: - QUAD $0x430c6bf526340000 // .quad 0x430c6bf526340000 +LCPI14_2: + QUAD $0x430c6bf526340000 // .quad 4831355200913801216 -LCPI15_3: - QUAD $0xc30c6bf526340000 // .quad 0xc30c6bf526340000 +LCPI14_3: + QUAD $0xc30c6bf526340000 // .quad -4392016835940974592 _vnumber: BYTE $0x55 // pushq %rbp @@ -5502,594 +5469,560 @@ _vnumber: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $72, SP - MOVQ DX, R14 - MOVQ $0, -64(BP) + SUBQ $56, SP + MOVQ DX, BX + MOVQ SI, R14 + MOVQ $0, -48(BP) MOVQ 0(SI), AX MOVQ 0(DI), R15 MOVQ 8(DI), R13 - MOVQ 32(DX), R11 - MOVQ 40(DX), BX + MOVQ 32(DX), R10 + MOVQ 40(DX), R11 MOVQ $9, 0(DX) LONG $0xc057f9c5 // vxorpd %xmm0, %xmm0, %xmm0 LONG $0x4211f9c5; BYTE $0x08 // vmovupd %xmm0, $8(%rdx) MOVQ 0(SI), CX MOVQ CX, 24(DX) CMPQ AX, R13 - JAE LBB15_6 + JAE LBB14_52 MOVB 0(R15)(AX*1), DI - MOVL $1, R10 + MOVL $1, R9 CMPB DI, $45 - JNE LBB15_4 - ADDQ $1, AX + JNE LBB14_4 + INCQ AX CMPQ AX, R13 - JAE LBB15_6 + JAE LBB14_52 MOVB 0(R15)(AX*1), DI - MOVL $-1, R10 + MOVL $-1, R9 -LBB15_4: +LBB14_4: LEAL -48(DI), CX CMPB CX, $10 - JB LBB15_9 - MOVQ AX, 0(SI) - MOVQ $-2, 0(R14) - JMP LBB15_8 - -LBB15_6: - MOVQ R13, 0(SI) - -LBB15_7: - MOVQ $-1, 0(R14) + JB LBB14_6 -LBB15_8: - ADDQ $72, SP - BYTE $0x5b // popq %rbx - WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 - WORD $0x5e41 // popq %r14 - WORD $0x5f41 // popq %r15 - BYTE $0x5d // popq %rbp - RET +LBB14_5: + MOVQ AX, 0(R14) + MOVQ $-2, 0(BX) + JMP LBB14_53 -LBB15_9: +LBB14_6: CMPB DI, $48 - JNE LBB15_13 - LEAQ 1(AX), R9 + JNE LBB14_10 + LEAQ 1(AX), SI CMPQ AX, R13 - JAE LBB15_22 - MOVB 0(R15)(R9*1), DX + JAE LBB14_19 + MOVB 0(R15)(SI*1), DX ADDB $-46, DX CMPB DX, $55 - JA LBB15_22 + JA LBB14_19 MOVBLZX DX, R8 MOVQ $36028797027352577, DX BTQ R8, DX - JAE LBB15_22 + JAE LBB14_19 -LBB15_13: +LBB14_10: CMPQ AX, R13 - MOVQ R11, -104(BP) - MOVQ BX, -96(BP) - MOVQ SI, -56(BP) - JAE LBB15_21 + JAE LBB14_18 CMPB CX, $9 - MOVL R10, -68(BP) - MOVB $1, CX - JA LBB15_23 - MOVL CX, -44(BP) - MOVL $4294967248, R9 - LEAQ -1(R13), R10 + JA LBB14_20 + LEAQ -1(R13), R8 XORL CX, CX - XORL R8, R8 + XORL SI, SI XORL R12, R12 -LBB15_16: - CMPL R8, $18 - JG LBB15_18 +LBB14_13: + CMPL SI, $18 + JG LBB14_15 + MOVBQSX DI, DI LEAQ 0(R12)(R12*4), DX - MOVBLSX DI, DI - ADDL R9, DI - LEAQ 0(DI)(DX*2), R12 - ADDL $1, R8 - JMP LBB15_19 + LEAQ -48(DI)(DX*2), R12 + INCL SI + JMP LBB14_16 -LBB15_18: - ADDL $1, CX +LBB14_15: + INCL CX -LBB15_19: - CMPQ R10, AX - JE LBB15_28 +LBB14_16: + CMPQ R8, AX + JE LBB14_24 MOVBLZX 1(R15)(AX*1), DI - ADDQ $1, AX + INCQ AX LEAL -48(DI), DX CMPB DX, $10 - JB LBB15_16 - JMP LBB15_24 + JB LBB14_13 + JMP LBB14_21 -LBB15_21: - MOVB $1, CX - MOVL CX, -44(BP) - XORL CX, CX - XORL R8, R8 - XORL R12, R12 - XORL R11, R11 - TESTL CX, CX - SETGT R11 - JE LBB15_30 - JMP LBB15_38 +LBB14_18: + XORL CX, CX + XORL SI, SI + XORL R12, R12 + JMP LBB14_25 -LBB15_22: - MOVQ R9, 0(SI) - JMP LBB15_8 +LBB14_19: + MOVQ SI, 0(R14) + JMP LBB14_53 -LBB15_23: - MOVL CX, -44(BP) +LBB14_20: XORL R12, R12 - XORL R8, R8 + XORL SI, SI XORL CX, CX -LBB15_24: - CMPB DI, $46 - JNE LBB15_29 - ADDQ $1, AX - MOVQ $8, 0(R14) - CMPQ AX, R13 - JAE LBB15_67 - MOVB 0(R15)(AX*1), DX - ADDB $-48, DX - CMPB DX, $10 - JAE LBB15_89 - MOVL $0, -44(BP) - JMP LBB15_29 - -LBB15_28: +LBB14_21: + XORL DX, DX + TESTL CX, CX + SETGT DX + MOVL DX, -52(BP) + MOVL $9, R8 + CMPB DI, $46 + JNE LBB14_26 + INCQ AX + MOVQ $8, 0(BX) + CMPQ AX, R13 + JAE LBB14_52 + MOVB 0(R15)(AX*1), DX + ADDB $-48, DX + MOVL $8, R8 + CMPB DX, $10 + JAE LBB14_5 + JMP LBB14_26 + +LBB14_24: MOVQ R13, AX -LBB15_29: - MOVL -68(BP), R10 - XORL R11, R11 +LBB14_25: + XORL DX, DX TESTL CX, CX - SETGT R11 - JNE LBB15_38 + SETGT DX + MOVL DX, -52(BP) + MOVL $9, R8 -LBB15_30: +LBB14_26: + TESTL CX, CX + JNE LBB14_35 TESTQ R12, R12 - JNE LBB15_38 + JNE LBB14_35 CMPQ AX, R13 - JAE LBB15_36 - MOVL AX, SI - SUBL R13, SI - XORL R8, R8 + JAE LBB14_33 + MOVL AX, DI + SUBL R13, DI + XORL SI, SI XORL CX, CX -LBB15_33: +LBB14_30: CMPB 0(R15)(AX*1), $48 - JNE LBB15_37 - ADDQ $1, AX - ADDL $-1, CX + JNE LBB14_34 + INCQ AX + DECL CX CMPQ R13, AX - JNE LBB15_33 + JNE LBB14_30 XORL R12, R12 - CMPB -44(BP), $0 - JNE LBB15_57 - JMP LBB15_61 + CMPL R8, $9 + JE LBB14_55 + JMP LBB14_59 -LBB15_36: +LBB14_33: XORL CX, CX - XORL R8, R8 + XORL SI, SI -LBB15_37: +LBB14_34: XORL R12, R12 -LBB15_38: +LBB14_35: CMPQ AX, R13 - JAE LBB15_44 - CMPL R8, $18 - JG LBB15_44 - MOVL $4294967248, R9 + JAE LBB14_40 + CMPL SI, $18 + JG LBB14_40 -LBB15_41: +LBB14_37: MOVBLZX 0(R15)(AX*1), DI LEAL -48(DI), DX CMPB DX, $9 - JA LBB15_44 + JA LBB14_40 LEAQ 0(R12)(R12*4), DX - ADDL R9, DI - LEAQ 0(DI)(DX*2), R12 - ADDL $-1, CX - ADDQ $1, AX + LEAQ -48(DI)(DX*2), R12 + DECL CX + INCQ AX CMPQ AX, R13 - JAE LBB15_44 - LEAL 1(R8), DX - CMPL R8, $18 - MOVL DX, R8 - JL LBB15_41 + JAE LBB14_40 + LEAL 1(SI), DX + CMPL SI, $18 + MOVL DX, SI + JL LBB14_37 -LBB15_44: +LBB14_40: CMPQ AX, R13 - JAE LBB15_56 - MOVB 0(R15)(AX*1), DX - LEAL -48(DX), SI - CMPB SI, $9 - JA LBB15_50 - LEAQ -1(R13), SI - -LBB15_47: - CMPQ SI, AX - JE LBB15_68 - MOVBLZX 1(R15)(AX*1), DX - ADDQ $1, AX - LEAL -48(DX), DI - CMPB DI, $9 - JBE LBB15_47 - MOVL $1, R11 - -LBB15_50: - ORB $32, DX - CMPB DX, $101 - JNE LBB15_56 - LEAQ 1(AX), DX - MOVQ $8, 0(R14) - CMPQ DX, R13 - JAE LBB15_67 - MOVL R11, -44(BP) - MOVB 0(R15)(DX*1), SI + JAE LBB14_54 + MOVB 0(R15)(AX*1), SI + LEAL -48(SI), DX + CMPB DX, $9 + JA LBB14_46 + LEAQ -1(R13), DI + +LBB14_43: + CMPQ DI, AX + JE LBB14_76 + MOVBLZX 1(R15)(AX*1), SI + INCQ AX + LEAL -48(SI), DX + CMPB DX, $9 + JBE LBB14_43 + MOVL $1, -52(BP) + +LBB14_46: + ORB $32, SI + CMPB SI, $101 + JNE LBB14_54 + LEAQ 1(AX), DI + MOVQ $8, 0(BX) + CMPQ DI, R13 + JAE LBB14_52 + MOVB 0(R15)(DI*1), SI CMPB SI, $45 - JE LBB15_54 + JE LBB14_50 MOVL $1, R8 CMPB SI, $43 - JNE LBB15_87 + JNE LBB14_85 -LBB15_54: +LBB14_50: ADDQ $2, AX CMPQ AX, R13 - JAE LBB15_67 + JAE LBB14_52 XORL DX, DX CMPB SI, $43 SETEQ DX - LEAL 0(DX)(DX*1), R8 - ADDL $-1, R8 + LEAL -1(DX)(DX*1), R8 MOVB 0(R15)(AX*1), SI - JMP LBB15_88 + JMP LBB14_86 -LBB15_56: - MOVL CX, SI - MOVQ AX, R13 - CMPB -44(BP), $0 - JE LBB15_61 +LBB14_52: + MOVQ R13, 0(R14) + MOVQ $-1, 0(BX) -LBB15_57: - TESTL SI, SI - JNE LBB15_60 - MOVQ $-9223372036854775808, AX - MOVLQSX R10, CX - TESTQ R12, R12 - JNS LBB15_71 - MOVQ R12, DX - ANDQ CX, DX - CMPQ DX, AX - JE LBB15_71 - -LBB15_60: - MOVQ $8, 0(R14) - -LBB15_61: - MOVQ $0, -80(BP) - LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 - QUAD $0xfffffcae0562f9c5 // vpunpckldq $-850(%rip), %xmm0, %xmm0 /* LCPI15_0(%rip) */ - QUAD $0xfffffcb6055cf9c5 // vsubpd $-842(%rip), %xmm0, %xmm0 /* LCPI15_1(%rip) */ - LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 - LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 - LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) - MOVQ R12, AX - SHRQ $52, AX - JNE LBB15_76 - LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx - MOVL R10, AX - SHRL $31, AX - SHLQ $63, AX - ORQ CX, AX - MOVQ AX, -64(BP) - TESTQ R12, R12 - JE LBB15_83 - TESTL SI, SI - JE LBB15_83 - LONG $0x6ef9e1c4; BYTE $0xc0 // vmovq %rax, %xmm0 - LEAL -1(SI), AX - CMPL AX, $36 - JA LBB15_69 - CMPL SI, $23 - JL LBB15_72 - LEAL -22(SI), AX - LONG $0xbc0d8d48; WORD $0x00c0; BYTE $0x00 // leaq $49340(%rip), %rcx /* _P10_TAB(%rip) */ - LONG $0x0459fbc5; BYTE $0xc1 // vmulsd (%rcx,%rax,8), %xmm0, %xmm0 - LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) - MOVL $22, AX - JMP LBB15_73 - -LBB15_67: - MOVQ -56(BP), AX - MOVQ R13, 0(AX) - JMP LBB15_7 - -LBB15_68: - MOVL $1, R11 - MOVL CX, SI - CMPB -44(BP), $0 - JNE LBB15_57 - JMP LBB15_61 - -LBB15_69: - CMPL SI, $-22 - JB LBB15_76 - NEGL SI - LONG $0x73058d48; WORD $0x00c0; BYTE $0x00 // leaq $49267(%rip), %rax /* _P10_TAB(%rip) */ - LONG $0x045efbc5; BYTE $0xf0 // vdivsd (%rax,%rsi,8), %xmm0, %xmm0 - LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) - JMP LBB15_80 - -LBB15_71: - LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 - IMULQ CX, R12 - QUAD $0xfffffbda0562f9c5 // vpunpckldq $-1062(%rip), %xmm0, %xmm0 /* LCPI15_0(%rip) */ - QUAD $0xfffffbe2055cf9c5 // vsubpd $-1054(%rip), %xmm0, %xmm0 /* LCPI15_1(%rip) */ - MOVQ R12, 16(R14) - LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 - LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 - ANDQ CX, AX - LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx - ORQ AX, CX - MOVQ CX, 8(R14) - MOVQ -56(BP), AX - MOVQ R13, 0(AX) - JMP LBB15_8 - -LBB15_72: - MOVL SI, AX - -LBB15_73: - QUAD $0xfffffbbf052ef9c5 // vucomisd $-1089(%rip), %xmm0 /* LCPI15_2(%rip) */ - JA LBB15_76 - QUAD $0xfffffbbd0d10fbc5 // vmovsd $-1091(%rip), %xmm1 /* LCPI15_3(%rip) */ - LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 - JA LBB15_76 - MOVL AX, AX - LONG $0xff0d8d48; WORD $0x00bf; BYTE $0x00 // leaq $49151(%rip), %rcx /* _P10_TAB(%rip) */ - LONG $0x0459fbc5; BYTE $0xc1 // vmulsd (%rcx,%rax,8), %xmm0, %xmm0 - LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) - JMP LBB15_80 +LBB14_53: + ADDQ $56, SP + BYTE $0x5b // popq %rbx + WORD $0x5c41 // popq %r12 + WORD $0x5d41 // popq %r13 + WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 + BYTE $0x5d // popq %rbp + RET -LBB15_76: - MOVL R11, -44(BP) - LEAQ -64(BP), CX +LBB14_54: + MOVL CX, DI + MOVQ AX, R13 + CMPL R8, $9 + JNE LBB14_59 + +LBB14_55: + TESTL DI, DI + JNE LBB14_58 + MOVQ $-9223372036854775808, AX + MOVLQSX R9, CX + TESTQ R12, R12 + JNS LBB14_80 + MOVQ R12, DX + ANDQ CX, DX + CMPQ DX, AX + JE LBB14_80 + +LBB14_58: + MOVQ $8, 0(BX) + +LBB14_59: + MOVQ $0, -64(BP) + LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 + QUAD $0xfffffcf60562f9c5 // vpunpckldq $-778(%rip), %xmm0, %xmm0 /* LCPI14_0(%rip) */ + QUAD $0xfffffcfe055cf9c5 // vsubpd $-770(%rip), %xmm0, %xmm0 /* LCPI14_1(%rip) */ + LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 + LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 + LONG $0x4511fbc5; BYTE $0xd0 // vmovsd %xmm0, $-48(%rbp) + MOVQ R12, AX + SHRQ $52, AX + JE LBB14_71 + +LBB14_60: + MOVQ R11, -80(BP) + MOVQ R10, -88(BP) + LEAQ -48(BP), CX + MOVQ DI, SI MOVQ R12, DI - MOVL R10, DX - MOVL R10, BX - MOVQ SI, -88(BP) - LONG $0xffe779e8; BYTE $0xff // callq _atof_eisel_lemire64 + MOVQ SI, -72(BP) + MOVL R9, DX + MOVL R9, -56(BP) + LONG $0xffe855e8; BYTE $0xff // callq _atof_eisel_lemire64 TESTB AX, AX - JE LBB15_81 - MOVQ -88(BP), SI - CMPL -44(BP), $0 - JE LBB15_82 - MOVL BX, DX - ADDQ $1, R12 - LEAQ -80(BP), CX + JE LBB14_64 + MOVQ -72(BP), SI + MOVL -56(BP), DX + CMPL -52(BP), $0 + JE LBB14_79 + INCQ R12 + LEAQ -64(BP), CX MOVQ R12, DI - LONG $0xffe759e8; BYTE $0xff // callq _atof_eisel_lemire64 + LONG $0xffe831e8; BYTE $0xff // callq _atof_eisel_lemire64 TESTB AX, AX - JE LBB15_81 - LONG $0x4d10fbc5; BYTE $0xb0 // vmovsd $-80(%rbp), %xmm1 - LONG $0x4510fbc5; BYTE $0xc0 // vmovsd $-64(%rbp), %xmm0 + JE LBB14_64 + LONG $0x4d10fbc5; BYTE $0xc0 // vmovsd $-64(%rbp), %xmm1 + LONG $0x4510fbc5; BYTE $0xd0 // vmovsd $-48(%rbp), %xmm0 LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 - JNE LBB15_81 - JP LBB15_81 + JNE LBB14_64 + JNP LBB14_66 -LBB15_80: - LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax - JMP LBB15_83 - -LBB15_81: - MOVQ -56(BP), BX - MOVQ 0(BX), AX +LBB14_64: + MOVQ 0(R14), AX ADDQ AX, R15 MOVQ R13, SI SUBQ AX, SI MOVQ R15, DI - MOVQ -104(BP), DX - MOVQ -96(BP), CX - LONG $0xffed6ee8; BYTE $0xff // callq _atof_native - LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) - LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax - JMP LBB15_84 + MOVQ -88(BP), DX + MOVQ -80(BP), CX + LONG $0xffef07e8; BYTE $0xff // callq _atof_native -LBB15_82: - MOVQ -64(BP), AX +LBB14_65: + LONG $0x4511fbc5; BYTE $0xd0 // vmovsd %xmm0, $-48(%rbp) -LBB15_83: - MOVQ -56(BP), BX - LONG $0x6ef9e1c4; BYTE $0xc0 // vmovq %rax, %xmm0 +LBB14_66: + LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax -LBB15_84: +LBB14_67: MOVQ $-9223372036854775808, CX - ADDQ $-1, CX + DECQ CX ANDQ AX, CX - MOVQ $9218868437227405312, AX - CMPQ CX, AX - JNE LBB15_86 - MOVQ $-8, 0(R14) + MOVQ $9218868437227405312, DX + CMPQ CX, DX + JNE LBB14_69 + MOVQ $-8, 0(BX) + +LBB14_69: + MOVQ AX, 8(BX) + +LBB14_70: + MOVQ R13, 0(R14) + JMP LBB14_53 + +LBB14_71: + LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx + MOVL R9, AX + SHRL $31, AX + SHLQ $63, AX + ORQ CX, AX + MOVQ AX, -48(BP) + TESTQ R12, R12 + JE LBB14_67 + TESTL DI, DI + JE LBB14_67 + LONG $0x6ef9e1c4; BYTE $0xc0 // vmovq %rax, %xmm0 + LEAL -1(DI), AX + CMPL AX, $36 + JA LBB14_77 + CMPL DI, $23 + JL LBB14_81 + MOVLQSX DI, AX + LONG $0xbd0d8d48; WORD $0x00c4; BYTE $0x00 // leaq $50365(%rip), %rcx /* _P10_TAB(%rip) */ + QUAD $0xffff50c18459fbc5; BYTE $0xff // vmulsd $-176(%rcx,%rax,8), %xmm0, %xmm0 + LONG $0x4511fbc5; BYTE $0xd0 // vmovsd %xmm0, $-48(%rbp) + MOVL $22, AX + JMP LBB14_82 + +LBB14_76: + MOVL $1, -52(BP) + MOVL CX, DI + CMPL R8, $9 + JE LBB14_55 + JMP LBB14_59 + +LBB14_77: + CMPL DI, $-22 + JB LBB14_60 + NEGL DI + MOVLQSX DI, AX + LONG $0x7b0d8d48; WORD $0x00c4; BYTE $0x00 // leaq $50299(%rip), %rcx /* _P10_TAB(%rip) */ + LONG $0x045efbc5; BYTE $0xc1 // vdivsd (%rcx,%rax,8), %xmm0, %xmm0 + JMP LBB14_65 + +LBB14_79: + MOVQ -48(BP), AX + JMP LBB14_67 + +LBB14_80: + LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 + IMULQ CX, R12 + QUAD $0xfffffb790562f9c5 // vpunpckldq $-1159(%rip), %xmm0, %xmm0 /* LCPI14_0(%rip) */ + QUAD $0xfffffb81055cf9c5 // vsubpd $-1151(%rip), %xmm0, %xmm0 /* LCPI14_1(%rip) */ + MOVQ R12, 16(BX) + LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 + LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 + ANDQ CX, AX + LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx + ORQ AX, CX + MOVQ CX, 8(BX) + JMP LBB14_70 -LBB15_86: - LONG $0x117bc1c4; WORD $0x0846 // vmovsd %xmm0, $8(%r14) - MOVQ R13, 0(BX) - JMP LBB15_8 +LBB14_81: + MOVL DI, AX -LBB15_87: - MOVQ DX, AX +LBB14_82: + QUAD $0xfffffb65052ef9c5 // vucomisd $-1179(%rip), %xmm0 /* LCPI14_2(%rip) */ + JA LBB14_60 + QUAD $0xfffffb5f0d10fbc5 // vmovsd $-1185(%rip), %xmm1 /* LCPI14_3(%rip) */ + LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 + JA LBB14_60 + MOVL AX, AX + LONG $0x020d8d48; WORD $0x00c4; BYTE $0x00 // leaq $50178(%rip), %rcx /* _P10_TAB(%rip) */ + LONG $0x0459fbc5; BYTE $0xc1 // vmulsd (%rcx,%rax,8), %xmm0, %xmm0 + JMP LBB14_65 + +LBB14_85: + MOVQ DI, AX -LBB15_88: +LBB14_86: LEAL -48(SI), DI CMPB DI, $9 - JBE LBB15_90 - -LBB15_89: - MOVQ -56(BP), CX - MOVQ AX, 0(CX) - MOVQ $-2, 0(R14) - JMP LBB15_8 - -LBB15_90: + JA LBB14_5 + MOVL R9, -56(BP) CMPQ AX, R13 - JAE LBB15_96 + JAE LBB14_93 CMPB DI, $9 - JA LBB15_96 + JA LBB14_93 LEAQ -1(R13), R9 XORL DI, DI -LBB15_93: +LBB14_90: MOVL DI, DX - MOVBLZX SI, SI CMPL DI, $10000 LEAL 0(DX)(DX*4), DI + MOVBLZX SI, SI LEAL -48(SI)(DI*2), DI WORD $0x4d0f; BYTE $0xfa // cmovgel %edx, %edi CMPQ R9, AX - JE LBB15_97 + JE LBB14_94 MOVBLZX 1(R15)(AX*1), SI - ADDQ $1, AX + INCQ AX LEAL -48(SI), DX CMPB DX, $10 - JB LBB15_93 - JMP LBB15_98 + JB LBB14_90 + JMP LBB14_95 -LBB15_96: +LBB14_93: XORL DI, DI - JMP LBB15_98 + JMP LBB14_95 -LBB15_97: +LBB14_94: MOVQ R13, AX -LBB15_98: - MOVQ DI, SI - IMULL R8, SI - ADDL CX, SI +LBB14_95: + IMULL R8, DI + ADDL CX, DI MOVQ AX, R13 - MOVL -44(BP), R11 - JMP LBB15_61 + MOVL -56(BP), R9 + JMP LBB14_59 _vsigned: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp BYTE $0x53 // pushq %rbx - MOVQ 0(SI), AX + MOVQ 0(SI), BX MOVQ 0(DI), R8 - MOVQ 8(DI), R11 + MOVQ 8(DI), R10 MOVQ $9, 0(DX) LONG $0xc057f8c5 // vxorps %xmm0, %xmm0, %xmm0 LONG $0x4211f8c5; BYTE $0x08 // vmovups %xmm0, $8(%rdx) MOVQ 0(SI), CX MOVQ CX, 24(DX) - CMPQ AX, R11 - JAE LBB16_1 - MOVB 0(R8)(AX*1), CX + CMPQ BX, R10 + JAE LBB15_1 + MOVB 0(R8)(BX*1), CX MOVL $1, R9 CMPB CX, $45 - JNE LBB16_5 - ADDQ $1, AX - CMPQ AX, R11 - JAE LBB16_1 - MOVB 0(R8)(AX*1), CX + JNE LBB15_5 + INCQ BX + CMPQ BX, R10 + JAE LBB15_1 + MOVB 0(R8)(BX*1), CX MOVQ $-1, R9 -LBB16_5: +LBB15_5: LEAL -48(CX), DI CMPB DI, $10 - JB LBB16_7 - MOVQ AX, 0(SI) + JB LBB15_7 + MOVQ BX, 0(SI) MOVQ $-2, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB16_1: - MOVQ R11, 0(SI) +LBB15_1: + MOVQ R10, 0(SI) MOVQ $-1, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB16_7: +LBB15_7: CMPB CX, $48 - JNE LBB16_12 - LEAQ 1(AX), DI - CMPQ AX, R11 - JAE LBB16_11 + JNE LBB15_8 + LEAQ 1(BX), DI + CMPQ BX, R10 + JAE LBB15_17 MOVB 0(R8)(DI*1), CX ADDB $-46, CX CMPB CX, $55 - JA LBB16_11 - MOVBLZX CX, R10 + JA LBB15_17 + MOVBLZX CX, R11 MOVQ $36028797027352577, CX - BTQ R10, CX - JAE LBB16_11 + BTQ R11, CX + JAE LBB15_17 -LBB16_12: - CMPQ AX, R11 - MOVQ R11, R10 - LONG $0xd0470f4c // cmovaq %rax, %r10 +LBB15_8: XORL DI, DI -LBB16_13: - CMPQ R10, AX - JE LBB16_23 - MOVBQSX 0(R8)(AX*1), CX - LEAL -48(CX), BX - CMPB BX, $9 - JA LBB16_18 +LBB15_9: + CMPQ BX, R10 + JAE LBB15_22 + MOVBQSX 0(R8)(BX*1), CX + LEAL -48(CX), AX + CMPB AX, $9 + JA LBB15_18 IMUL3Q $10, DI, DI - JO LBB16_17 - ADDQ $1, AX - ADDL $-48, CX + JO LBB15_13 + INCQ BX + ADDQ $-48, CX IMULQ R9, CX ADDQ CX, DI - JNO LBB16_13 + JNO LBB15_9 -LBB16_17: - ADDQ $-1, AX - MOVQ AX, 0(SI) +LBB15_13: + DECQ BX + MOVQ BX, 0(SI) MOVQ $-5, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB16_11: +LBB15_17: MOVQ DI, 0(SI) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB16_18: - CMPQ AX, R11 - JAE LBB16_22 - CMPB CX, $46 - JE LBB16_25 - CMPB CX, $69 - JE LBB16_25 +LBB15_18: CMPB CX, $101 - JNE LBB16_22 + JE LBB15_21 + CMPB CX, $69 + JE LBB15_21 + CMPB CX, $46 + JNE LBB15_22 -LBB16_25: - MOVQ AX, 0(SI) +LBB15_21: + MOVQ BX, 0(SI) MOVQ $-6, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB16_22: - MOVQ AX, R10 - -LBB16_23: - MOVQ R10, 0(SI) +LBB15_22: + MOVQ BX, 0(SI) MOVQ DI, 16(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp @@ -6098,7 +6031,6 @@ LBB16_23: _vunsigned: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - BYTE $0x53 // pushq %rbx MOVQ DX, R8 MOVQ 0(SI), CX MOVQ 0(DI), R9 @@ -6109,104 +6041,96 @@ _vunsigned: MOVQ 0(SI), AX MOVQ AX, 24(DX) CMPQ CX, R11 - JAE LBB17_1 + JAE LBB16_1 MOVB 0(R9)(CX*1), AX CMPB AX, $45 - JNE LBB17_4 + JNE LBB16_4 -LBB17_3: +LBB16_3: MOVQ CX, 0(SI) MOVQ $-6, 0(R8) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB17_1: +LBB16_1: MOVQ R11, 0(SI) MOVQ $-1, 0(R8) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB17_4: +LBB16_4: LEAL -48(AX), DX CMPB DX, $10 - JB LBB17_6 + JB LBB16_6 MOVQ CX, 0(SI) MOVQ $-2, 0(R8) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB17_6: +LBB16_6: CMPB AX, $48 - JNE LBB17_7 + JNE LBB16_7 MOVB 1(R9)(CX*1), AX ADDB $-46, AX CMPB AX, $55 - JA LBB17_16 + JA LBB16_16 MOVBLZX AX, AX MOVQ $36028797027352577, DX BTQ AX, DX - JAE LBB17_16 + JAE LBB16_16 -LBB17_7: +LBB16_7: XORL AX, AX MOVL $10, R10 -LBB17_8: - CMPQ R11, CX - JE LBB17_22 +LBB16_8: + CMPQ CX, R11 + JAE LBB16_20 MOVBLSX 0(R9)(CX*1), DI LEAL -48(DI), DX CMPB DX, $9 - JA LBB17_17 + JA LBB16_17 MULQ R10 - JO LBB17_13 - ADDQ $1, CX + JO LBB16_13 + INCQ CX ADDL $-48, DI - XORL BX, BX - ADDQ DI, AX - SETCS BX - MOVQ BX, DX + MOVLQSX DI, DX + MOVQ DX, DI + SARQ $63, DI + ADDQ DX, AX + ADCQ $0, DI + MOVL DI, DX + ANDL $1, DX NEGQ DX - XORQ DX, BX - JNE LBB17_13 + XORQ DX, DI + JNE LBB16_13 TESTQ DX, DX - JNS LBB17_8 + JNS LBB16_8 -LBB17_13: - ADDQ $-1, CX +LBB16_13: + DECQ CX MOVQ CX, 0(SI) MOVQ $-5, 0(R8) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB17_17: - CMPQ CX, R11 - JAE LBB17_21 - CMPB DI, $46 - JE LBB17_3 - CMPB DI, $69 - JE LBB17_3 +LBB16_17: CMPB DI, $101 - JE LBB17_3 - -LBB17_21: - MOVQ CX, R11 + JE LBB16_3 + CMPB DI, $69 + JE LBB16_3 + CMPB DI, $46 + JE LBB16_3 -LBB17_22: - MOVQ R11, 0(SI) +LBB16_20: + MOVQ CX, 0(SI) MOVQ AX, 16(R8) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB17_16: - ADDQ $1, CX +LBB16_16: + INCQ CX MOVQ CX, 0(SI) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET @@ -6222,6 +6146,34 @@ _skip_one: BYTE $0x5d // popq %rbp JMP _fsm_exec +LCPI18_0: + QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' + QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' + +LCPI18_1: + QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' + QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' + +LCPI18_2: + QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' + QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' + +LCPI18_3: + QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' + QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' + +LCPI18_4: + QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' + +LCPI18_5: + QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' + +LCPI18_6: + QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' + +LCPI18_7: + QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' + _fsm_exec: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp @@ -6230,567 +6182,869 @@ _fsm_exec: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $40, SP - MOVL CX, -60(BP) + SUBQ $24, SP CMPL 0(DI), $0 - JE LBB19_2 + JE LBB18_2 + MOVQ DX, R11 + MOVQ SI, BX MOVQ DI, R12 - MOVQ DX, -56(BP) - MOVQ SI, -48(BP) + MOVL CX, -52(BP) MOVQ $-1, R14 - JMP LBB19_4 + MOVQ $4294977024, R8 + QUAD $0xffffff051d6ffec5 // vmovdqu $-251(%rip), %ymm3 /* LCPI18_0(%rip) */ + QUAD $0xffffff1d256ffec5 // vmovdqu $-227(%rip), %ymm4 /* LCPI18_1(%rip) */ + QUAD $0xffffff352d6ffec5 // vmovdqu $-203(%rip), %ymm5 /* LCPI18_2(%rip) */ + QUAD $0xffffff4d356ffec5 // vmovdqu $-179(%rip), %ymm6 /* LCPI18_3(%rip) */ + MOVQ SI, -48(BP) + JMP LBB18_4 -LBB19_2: +LBB18_2: MOVQ $-1, R13 - JMP LBB19_67 + JMP LBB18_113 -LBB19_3: +LBB18_3: LEAQ 3(AX), CX - MOVQ CX, 0(BX) + MOVQ CX, 0(R11) TESTQ AX, AX - JLE LBB19_64 + JLE LBB18_120 -LBB19_40: - MOVL 0(R12), CX +LBB18_94: + MOVL 0(R12), AX MOVQ R14, R13 - TESTL CX, CX - JE LBB19_67 - -LBB19_4: - MOVQ -48(BP), R13 - MOVQ R13, DI - MOVQ -56(BP), BX - MOVQ BX, SI - LONG $0xfff1bde8; BYTE $0xff // callq _advance_ns - MOVLQSX 0(R12), DX - LEAQ -1(DX), CX + TESTL AX, AX + JE LBB18_113 + +LBB18_4: + MOVQ 0(R11), SI + MOVQ 0(BX), R9 + MOVQ 8(BX), R10 + CMPQ SI, R10 + JAE LBB18_8 + MOVB 0(R9)(SI*1), CX + CMPB CX, $13 + JE LBB18_8 + CMPB CX, $32 + JE LBB18_8 + ADDB $-9, CX + CMPB CX, $1 + JA LBB18_53 + +LBB18_8: + LEAQ 1(SI), DX + CMPQ DX, R10 + JAE LBB18_13 + MOVB 0(R9)(DX*1), CX + CMPB CX, $13 + JE LBB18_13 + CMPB CX, $32 + JE LBB18_13 + ADDB $-9, CX + CMPB CX, $1 + JA LBB18_12 + +LBB18_13: + LEAQ 2(SI), DX + CMPQ DX, R10 + JAE LBB18_18 + MOVB 0(R9)(DX*1), CX + CMPB CX, $13 + JE LBB18_18 + CMPB CX, $32 + JE LBB18_18 + ADDB $-9, CX + CMPB CX, $1 + JA LBB18_12 + +LBB18_18: + LEAQ 3(SI), DX + CMPQ DX, R10 + JAE LBB18_23 + MOVB 0(R9)(DX*1), CX + CMPB CX, $13 + JE LBB18_23 + CMPB CX, $32 + JE LBB18_23 + ADDB $-9, CX + CMPB CX, $1 + JBE LBB18_23 + +LBB18_12: + MOVQ DX, SI + JMP LBB18_53 + +LBB18_23: + LEAQ 4(SI), CX + CMPQ R10, CX + JBE LBB18_48 + LEAQ 0(R9)(CX*1), DI + MOVQ R10, DX + SUBQ CX, DX + JE LBB18_32 + MOVL DI, CX + ANDL $31, CX + TESTQ CX, CX + JE LBB18_32 + LEAQ 0(R9)(SI*1), DI + MOVQ R10, DX + SUBQ SI, DX + LEAQ -5(DX), SI + XORL BX, BX + +LBB18_27: + MOVBLSX 4(DI)(BX*1), CX + CMPL CX, $32 + JA LBB18_51 + BTQ CX, R8 + JAE LBB18_51 + LEAQ 1(BX), CX + CMPQ SI, BX + JE LBB18_31 + LEAQ 5(DI)(BX*1), AX + ANDL $31, AX + MOVQ CX, BX + TESTQ AX, AX + JNE LBB18_27 + +LBB18_31: + LEAQ 4(CX)(DI*1), DI + SUBQ CX, DX + ADDQ $-4, DX + MOVQ -48(BP), BX + +LBB18_32: + CMPQ DX, $32 + JB LBB18_37 + LEAQ -32(DX), SI + MOVQ SI, CX + ANDQ $-32, CX + LEAQ 32(CX)(DI*1), R8 + ANDL $31, SI + +LBB18_34: + LONG $0x076ffdc5 // vmovdqa (%rdi), %ymm0 + LONG $0xcb74fdc5 // vpcmpeqb %ymm3, %ymm0, %ymm1 + LONG $0xd474fdc5 // vpcmpeqb %ymm4, %ymm0, %ymm2 + LONG $0xcaebf5c5 // vpor %ymm2, %ymm1, %ymm1 + LONG $0xd574fdc5 // vpcmpeqb %ymm5, %ymm0, %ymm2 + LONG $0xc674fdc5 // vpcmpeqb %ymm6, %ymm0, %ymm0 + LONG $0xc2ebfdc5 // vpor %ymm2, %ymm0, %ymm0 + LONG $0xc1ebfdc5 // vpor %ymm1, %ymm0, %ymm0 + LONG $0xc8d7fdc5 // vpmovmskb %ymm0, %ecx + CMPL CX, $-1 + JNE LBB18_49 + ADDQ $32, DI + ADDQ $-32, DX + CMPQ DX, $31 + JA LBB18_34 + MOVQ SI, DX + MOVQ R8, DI + +LBB18_37: + WORD $0xf8c5; BYTE $0x77 // vzeroupper + CMPQ DX, $16 + JB LBB18_111 + LEAQ -16(DX), SI + MOVQ SI, AX + ANDQ $-16, AX + LEAQ 16(AX)(DI*1), R8 + ANDL $15, SI + QUAD $0xfffffd381d6ffec5 // vmovdqu $-712(%rip), %ymm3 /* LCPI18_0(%rip) */ + QUAD $0xfffffd50256ffec5 // vmovdqu $-688(%rip), %ymm4 /* LCPI18_1(%rip) */ + QUAD $0xfffffd682d6ffec5 // vmovdqu $-664(%rip), %ymm5 /* LCPI18_2(%rip) */ + QUAD $0xfffffd80356ffec5 // vmovdqu $-640(%rip), %ymm6 /* LCPI18_3(%rip) */ + +LBB18_39: + LONG $0x076ff9c5 // vmovdqa (%rdi), %xmm0 + QUAD $0xfffffd940d74f9c5 // vpcmpeqb $-620(%rip), %xmm0, %xmm1 /* LCPI18_4(%rip) */ + QUAD $0xfffffd9c1574f9c5 // vpcmpeqb $-612(%rip), %xmm0, %xmm2 /* LCPI18_5(%rip) */ + LONG $0xcaebf1c5 // vpor %xmm2, %xmm1, %xmm1 + QUAD $0xfffffda01574f9c5 // vpcmpeqb $-608(%rip), %xmm0, %xmm2 /* LCPI18_6(%rip) */ + QUAD $0xfffffda80574f9c5 // vpcmpeqb $-600(%rip), %xmm0, %xmm0 /* LCPI18_7(%rip) */ + LONG $0xc2ebf9c5 // vpor %xmm2, %xmm0, %xmm0 + LONG $0xc1ebf9c5 // vpor %xmm1, %xmm0, %xmm0 + LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx + CMPW CX, $-1 + JNE LBB18_54 + ADDQ $16, DI + ADDQ $-16, DX + CMPQ DX, $15 + JA LBB18_39 + MOVQ SI, DX + MOVQ R8, DI + TESTQ DX, DX + JE LBB18_47 + +LBB18_42: + LEAQ 0(DI)(DX*1), R8 + INCQ DI + MOVQ DI, SI + +LBB18_43: + MOVBLSX -1(SI), CX + CMPL CX, $32 + JA LBB18_55 + MOVQ $4294977024, AX + BTQ CX, AX + JAE LBB18_55 + DECQ DX + INCQ SI + TESTQ DX, DX + JNE LBB18_43 + MOVQ R8, DI + +LBB18_47: + MOVQ $4294977024, R8 + SUBQ R9, DI + MOVQ DI, SI + CMPQ SI, R10 + JB LBB18_52 + JMP LBB18_56 + +LBB18_48: + MOVQ CX, 0(R11) + JMP LBB18_56 + +LBB18_49: + WORD $0xf8c5; BYTE $0x77 // vzeroupper + QUAD $0xfffffcc9356ffec5 // vmovdqu $-823(%rip), %ymm6 /* LCPI18_3(%rip) */ + QUAD $0xfffffca12d6ffec5 // vmovdqu $-863(%rip), %ymm5 /* LCPI18_2(%rip) */ + QUAD $0xfffffc79256ffec5 // vmovdqu $-903(%rip), %ymm4 /* LCPI18_1(%rip) */ + QUAD $0xfffffc511d6ffec5 // vmovdqu $-943(%rip), %ymm3 /* LCPI18_0(%rip) */ + SUBQ R9, DI + NOTL CX + MOVLQSX CX, AX + BSFQ AX, SI + +LBB18_50: + ADDQ DI, SI + MOVQ $4294977024, R8 + CMPQ SI, R10 + JAE LBB18_56 + +LBB18_52: + MOVQ 0(BX), R9 + +LBB18_53: + LEAQ 1(SI), AX + MOVQ AX, 0(R11) + MOVB 0(R9)(SI*1), DX + JMP LBB18_57 + +LBB18_51: + ADDQ BX, DI + NOTQ R9 + LEAQ 5(R9)(DI*1), SI + MOVQ -48(BP), BX + CMPQ SI, R10 + JB LBB18_52 + +LBB18_56: + XORL DX, DX + +LBB18_57: + MOVLQSX 0(R12), CX + LEAQ -1(CX), AX + MOVL 0(R12)(CX*4), SI CMPQ R14, $-1 - JNE LBB19_6 - MOVQ 0(BX), R14 - ADDQ $-1, R14 + JNE LBB18_59 + MOVQ 0(R11), R14 + DECQ R14 -LBB19_6: - MOVL 0(R12)(DX*4), SI - ADDL $-1, SI +LBB18_59: + DECL SI CMPL SI, $5 - JA LBB19_11 - LONG $0x7c3d8d48; WORD $0x0004; BYTE $0x00 // leaq $1148(%rip), %rdi /* LJTI19_0(%rip) */ + JA LBB18_64 + LONG $0xf63d8d48; WORD $0x0005; BYTE $0x00 // leaq $1526(%rip), %rdi /* LJTI18_0(%rip) */ MOVLQSX 0(DI)(SI*4), SI ADDQ DI, SI JMP SI -LBB19_8: - MOVBLSX AX, AX - CMPL AX, $44 - JE LBB19_30 - CMPL AX, $93 - JNE LBB19_66 - MOVL CX, 0(R12) +LBB18_61: + MOVBLSX DX, DX + CMPL DX, $44 + JE LBB18_83 + CMPL DX, $93 + JNE LBB18_112 + MOVL AX, 0(R12) MOVQ R14, R13 - TESTL CX, CX - JNE LBB19_4 - JMP LBB19_67 + TESTL AX, AX + JNE LBB18_4 + JMP LBB18_113 -LBB19_11: - MOVL CX, 0(R12) - MOVBLSX AX, AX +LBB18_64: + MOVL AX, 0(R12) + MOVBLSX DX, AX CMPL AX, $123 - JBE LBB19_27 - JMP LBB19_66 - -LBB19_12: - MOVBLSX AX, AX - CMPL AX, $44 - JE LBB19_32 - CMPL AX, $125 - JNE LBB19_66 - MOVL CX, 0(R12) + JBE LBB18_80 + JMP LBB18_112 + +LBB18_65: + MOVBLSX DX, DX + CMPL DX, $44 + JE LBB18_85 + CMPL DX, $125 + JNE LBB18_112 + MOVL AX, 0(R12) MOVQ R14, R13 - TESTL CX, CX - JNE LBB19_4 - JMP LBB19_67 - -LBB19_15: - CMPB AX, $34 - JNE LBB19_66 - MOVL $4, 0(R12)(DX*4) - MOVQ 0(BX), R15 - MOVQ R13, DI - -LBB19_17: + TESTL AX, AX + JNE LBB18_4 + JMP LBB18_113 + +LBB18_68: + CMPB DX, $34 + JNE LBB18_112 + MOVL $4, 0(R12)(CX*4) + +LBB18_70: + MOVQ 0(R11), R15 + MOVQ BX, DI MOVQ R15, SI - LEAQ -72(BP), DX - LONG $0xfff425e8; BYTE $0xff // callq _advance_string + LEAQ -64(BP), DX + MOVQ R11, BX + WORD $0xf8c5; BYTE $0x77 // vzeroupper + LONG $0xfff13fe8; BYTE $0xff // callq _advance_string MOVQ AX, R13 TESTQ AX, AX - JS LBB19_57 + JS LBB18_114 MOVQ R13, 0(BX) TESTQ R15, R15 - JG LBB19_40 - JMP LBB19_58 - -LBB19_19: - CMPB AX, $58 - JNE LBB19_66 - MOVL $0, 0(R12)(DX*4) - JMP LBB19_40 - -LBB19_21: - CMPB AX, $93 - JNE LBB19_26 - MOVL CX, 0(R12) + MOVQ BX, R11 + MOVQ -48(BP), BX + MOVQ $4294977024, R8 + QUAD $0xfffffb231d6ffec5 // vmovdqu $-1245(%rip), %ymm3 /* LCPI18_0(%rip) */ + QUAD $0xfffffb3b256ffec5 // vmovdqu $-1221(%rip), %ymm4 /* LCPI18_1(%rip) */ + QUAD $0xfffffb532d6ffec5 // vmovdqu $-1197(%rip), %ymm5 /* LCPI18_2(%rip) */ + QUAD $0xfffffb6b356ffec5 // vmovdqu $-1173(%rip), %ymm6 /* LCPI18_3(%rip) */ + JG LBB18_94 + JMP LBB18_115 + +LBB18_72: + CMPB DX, $58 + JNE LBB18_112 + MOVL $0, 0(R12)(CX*4) + JMP LBB18_94 + +LBB18_74: + CMPB DX, $93 + JNE LBB18_79 + MOVL AX, 0(R12) MOVQ R14, R13 - TESTL CX, CX - JNE LBB19_4 - JMP LBB19_67 - -LBB19_23: - MOVBLSX AX, AX - CMPL AX, $34 - JE LBB19_34 - CMPL AX, $125 - JNE LBB19_66 - MOVL CX, 0(R12) + TESTL AX, AX + JNE LBB18_4 + JMP LBB18_113 + +LBB18_76: + MOVBLSX DX, DX + CMPL DX, $34 + JE LBB18_87 + CMPL DX, $125 + JNE LBB18_112 + MOVL AX, 0(R12) MOVQ R14, R13 - TESTL CX, CX - JNE LBB19_4 - JMP LBB19_67 + TESTL AX, AX + JNE LBB18_4 + JMP LBB18_113 -LBB19_26: - MOVL $1, 0(R12)(DX*4) - MOVBLSX AX, AX +LBB18_79: + MOVL $1, 0(R12)(CX*4) + MOVBLSX DX, AX CMPL AX, $123 - JA LBB19_66 + JA LBB18_112 -LBB19_27: +LBB18_80: MOVQ $-1, R13 - LONG $0x710d8d48; WORD $0x0003; BYTE $0x00 // leaq $881(%rip), %rcx /* LJTI19_1(%rip) */ + LONG $0xad0d8d48; WORD $0x0004; BYTE $0x00 // leaq $1197(%rip), %rcx /* LJTI18_1(%rip) */ MOVLQSX 0(CX)(AX*4), AX ADDQ CX, AX JMP AX -LBB19_28: - MOVQ -56(BP), BX - MOVQ 0(BX), R15 +LBB18_81: + MOVQ 0(R11), R15 LEAQ -1(R15), R13 - MOVQ -48(BP), AX - MOVQ 0(AX), DI + MOVQ 0(BX), DI ADDQ R13, DI - MOVQ 8(AX), SI + MOVQ 8(BX), SI SUBQ R13, SI - LONG $0x000d91e8; BYTE $0x00 // callq _do_skip_number + MOVQ R11, BX + WORD $0xf8c5; BYTE $0x77 // vzeroupper + LONG $0x000eaae8; BYTE $0x00 // callq _do_skip_number TESTQ AX, AX - JS LBB19_59 + JS LBB18_116 MOVQ 0(BX), CX - ADDQ CX, AX - ADDQ $-1, AX + LEAQ -1(AX)(CX*1), AX MOVQ AX, 0(BX) TESTQ R15, R15 - JG LBB19_40 - JMP LBB19_67 - -LBB19_30: - CMPL DX, $65535 - JG LBB19_60 - LEAL 1(DX), AX + MOVQ BX, R11 + MOVQ -48(BP), BX + MOVQ $4294977024, R8 + QUAD $0xfffffa2e1d6ffec5 // vmovdqu $-1490(%rip), %ymm3 /* LCPI18_0(%rip) */ + QUAD $0xfffffa46256ffec5 // vmovdqu $-1466(%rip), %ymm4 /* LCPI18_1(%rip) */ + QUAD $0xfffffa5e2d6ffec5 // vmovdqu $-1442(%rip), %ymm5 /* LCPI18_2(%rip) */ + QUAD $0xfffffa76356ffec5 // vmovdqu $-1418(%rip), %ymm6 /* LCPI18_3(%rip) */ + JG LBB18_94 + JMP LBB18_113 + +LBB18_83: + CMPL CX, $65535 + JG LBB18_117 + LEAL 1(CX), AX MOVL AX, 0(R12) - MOVL $0, 4(R12)(DX*4) - JMP LBB19_40 + MOVL $0, 4(R12)(CX*4) + JMP LBB18_94 -LBB19_32: - CMPL DX, $65535 - JG LBB19_60 - LEAL 1(DX), AX +LBB18_85: + CMPL CX, $65535 + JG LBB18_117 + LEAL 1(CX), AX MOVL AX, 0(R12) - MOVL $3, 4(R12)(DX*4) - JMP LBB19_40 - -LBB19_34: - MOVL $2, 0(R12)(DX*4) - CMPL -60(BP), $0 - JE LBB19_36 - MOVQ R13, DI - MOVQ BX, SI - LONG $0x0005a4e8; BYTE $0x00 // callq _validate_string - TESTQ AX, AX - JNS LBB19_38 - JMP LBB19_63 - -LBB19_36: - MOVQ 0(BX), R15 - MOVQ R13, DI + MOVL $3, 4(R12)(CX*4) + JMP LBB18_94 + +LBB18_87: + MOVL $2, 0(R12)(CX*4) + MOVL -52(BP), AX + CMPL AX, $1 + JE LBB18_91 + TESTL AX, AX + JNE LBB18_92 + MOVQ 0(R11), R15 + MOVQ BX, DI MOVQ R15, SI - LEAQ -72(BP), DX - LONG $0xfff2cbe8; BYTE $0xff // callq _advance_string + LEAQ -64(BP), DX + MOVQ R11, BX + WORD $0xf8c5; BYTE $0x77 // vzeroupper + LONG $0xffef8ee8; BYTE $0xff // callq _advance_string MOVQ AX, R13 TESTQ AX, AX - JS LBB19_57 + JS LBB18_114 MOVQ R13, 0(BX) TESTQ R15, R15 - JLE LBB19_58 + MOVQ BX, R11 + MOVQ -48(BP), BX + JG LBB18_92 + JMP LBB18_115 + +LBB18_91: + MOVQ BX, DI + MOVQ R11, SI + MOVQ R11, BX + WORD $0xf8c5; BYTE $0x77 // vzeroupper + LONG $0x00066ee8; BYTE $0x00 // callq _validate_string + MOVQ BX, R11 + MOVQ -48(BP), BX + TESTQ AX, AX + JS LBB18_121 -LBB19_38: +LBB18_92: MOVLQSX 0(R12), AX CMPQ AX, $65535 - JG LBB19_60 + JG LBB18_117 LEAL 1(AX), CX MOVL CX, 0(R12) MOVL $4, 4(R12)(AX*4) - JMP LBB19_40 - -LBB19_41: - CMPL -60(BP), $0 - JE LBB19_56 - MOVQ -48(BP), DI - MOVQ -56(BP), SI - LONG $0x000534e8; BYTE $0x00 // callq _validate_string - TESTQ AX, AX - JNS LBB19_40 - JMP LBB19_63 - -LBB19_43: - MOVQ -56(BP), BX - MOVQ 0(BX), R13 - MOVQ -48(BP), AX - MOVQ 0(AX), DI + MOVQ $4294977024, R8 + QUAD $0xfffff92a1d6ffec5 // vmovdqu $-1750(%rip), %ymm3 /* LCPI18_0(%rip) */ + QUAD $0xfffff942256ffec5 // vmovdqu $-1726(%rip), %ymm4 /* LCPI18_1(%rip) */ + QUAD $0xfffff95a2d6ffec5 // vmovdqu $-1702(%rip), %ymm5 /* LCPI18_2(%rip) */ + QUAD $0xfffff972356ffec5 // vmovdqu $-1678(%rip), %ymm6 /* LCPI18_3(%rip) */ + JMP LBB18_94 + +LBB18_95: + MOVL -52(BP), AX + CMPL AX, $1 + JE LBB18_110 + TESTL AX, AX + JE LBB18_70 + JMP LBB18_94 + +LBB18_97: + MOVQ 0(R11), R13 + MOVQ 0(BX), DI ADDQ R13, DI - MOVQ 8(AX), SI + MOVQ 8(BX), SI SUBQ R13, SI - LONG $0x000c77e8; BYTE $0x00 // callq _do_skip_number + MOVQ R11, BX + WORD $0xf8c5; BYTE $0x77 // vzeroupper + LONG $0x000d25e8; BYTE $0x00 // callq _do_skip_number + MOVQ BX, R11 + MOVQ 0(BX), CX TESTQ AX, AX - JS LBB19_65 - ADDQ AX, 0(BX) + JS LBB18_122 + ADDQ AX, CX + MOVQ CX, 0(R11) TESTQ R13, R13 - JG LBB19_40 - JMP LBB19_68 - -LBB19_45: + MOVQ -48(BP), BX + MOVQ $4294977024, R8 + QUAD $0xfffff8ab1d6ffec5 // vmovdqu $-1877(%rip), %ymm3 /* LCPI18_0(%rip) */ + QUAD $0xfffff8c3256ffec5 // vmovdqu $-1853(%rip), %ymm4 /* LCPI18_1(%rip) */ + QUAD $0xfffff8db2d6ffec5 // vmovdqu $-1829(%rip), %ymm5 /* LCPI18_2(%rip) */ + QUAD $0xfffff8f3356ffec5 // vmovdqu $-1805(%rip), %ymm6 /* LCPI18_3(%rip) */ + JG LBB18_94 + JMP LBB18_128 + +LBB18_99: MOVLQSX 0(R12), AX CMPQ AX, $65535 - JG LBB19_60 + JG LBB18_117 LEAL 1(AX), CX MOVL CX, 0(R12) MOVL $5, 4(R12)(AX*4) - JMP LBB19_40 + JMP LBB18_94 -LBB19_47: - MOVQ -56(BP), BX - MOVQ 0(BX), AX - MOVQ -48(BP), SI - MOVQ 8(SI), CX +LBB18_101: + MOVQ 0(R11), AX + MOVQ 8(BX), CX LEAQ -4(CX), DX CMPQ AX, DX - JA LBB19_74 - MOVQ 0(SI), CX + JA LBB18_125 + MOVQ 0(BX), CX MOVL 0(CX)(AX*1), DX CMPL DX, $1702063201 - JNE LBB19_71 + JNE LBB18_129 LEAQ 4(AX), CX - MOVQ CX, 0(BX) + MOVQ CX, 0(R11) TESTQ AX, AX - JG LBB19_40 - JMP LBB19_64 - -LBB19_50: - MOVQ -56(BP), BX - MOVQ 0(BX), AX - MOVQ -48(BP), SI - MOVQ 8(SI), CX + JG LBB18_94 + JMP LBB18_120 + +LBB18_104: + MOVQ 0(R11), AX + MOVQ 8(BX), CX LEAQ -3(CX), DX CMPQ AX, DX - JA LBB19_74 - MOVQ 0(SI), CX + JA LBB18_125 + MOVQ 0(BX), CX CMPL -1(CX)(AX*1), $1819047278 - JE LBB19_3 - JMP LBB19_80 - -LBB19_52: - MOVQ -56(BP), BX - MOVQ 0(BX), AX - MOVQ -48(BP), SI - MOVQ 8(SI), CX + JE LBB18_3 + JMP LBB18_132 + +LBB18_106: + MOVQ 0(R11), AX + MOVQ 8(BX), CX LEAQ -3(CX), DX CMPQ AX, DX - JA LBB19_74 - MOVQ 0(SI), CX + JA LBB18_125 + MOVQ 0(BX), CX CMPL -1(CX)(AX*1), $1702195828 - JE LBB19_3 - JMP LBB19_76 + JE LBB18_3 + JMP LBB18_135 -LBB19_54: +LBB18_108: MOVLQSX 0(R12), AX CMPQ AX, $65535 - JG LBB19_60 + JG LBB18_117 LEAL 1(AX), CX MOVL CX, 0(R12) MOVL $6, 4(R12)(AX*4) - JMP LBB19_40 + JMP LBB18_94 + +LBB18_54: + MOVWLZX CX, AX + SUBQ R9, DI + NOTL AX + BSFL AX, SI + JMP LBB18_50 + +LBB18_110: + MOVQ BX, DI + MOVQ R11, SI + MOVQ R11, BX + WORD $0xf8c5; BYTE $0x77 // vzeroupper + LONG $0x000490e8; BYTE $0x00 // callq _validate_string + QUAD $0xfffff7e6356ffec5 // vmovdqu $-2074(%rip), %ymm6 /* LCPI18_3(%rip) */ + QUAD $0xfffff7be2d6ffec5 // vmovdqu $-2114(%rip), %ymm5 /* LCPI18_2(%rip) */ + QUAD $0xfffff796256ffec5 // vmovdqu $-2154(%rip), %ymm4 /* LCPI18_1(%rip) */ + QUAD $0xfffff76e1d6ffec5 // vmovdqu $-2194(%rip), %ymm3 /* LCPI18_0(%rip) */ + MOVQ $4294977024, R8 + MOVQ BX, R11 + MOVQ -48(BP), BX + TESTQ AX, AX + JNS LBB18_94 + JMP LBB18_121 + +LBB18_55: + NOTQ R9 + ADDQ R9, SI + MOVQ $4294977024, R8 + CMPQ SI, R10 + JB LBB18_52 + JMP LBB18_56 + +LBB18_111: + QUAD $0xfffff7291d6ffec5 // vmovdqu $-2263(%rip), %ymm3 /* LCPI18_0(%rip) */ + QUAD $0xfffff741256ffec5 // vmovdqu $-2239(%rip), %ymm4 /* LCPI18_1(%rip) */ + QUAD $0xfffff7592d6ffec5 // vmovdqu $-2215(%rip), %ymm5 /* LCPI18_2(%rip) */ + QUAD $0xfffff771356ffec5 // vmovdqu $-2191(%rip), %ymm6 /* LCPI18_3(%rip) */ + TESTQ DX, DX + JNE LBB18_42 + JMP LBB18_47 -LBB19_56: - MOVQ -56(BP), BX - MOVQ 0(BX), R15 - MOVQ -48(BP), DI - JMP LBB19_17 +LBB18_112: + MOVQ 0(R11), AX + XORL R13, R13 + CMPQ AX, 8(BX) + SETCS R13 + NOTQ R13 + JMP LBB18_113 -LBB19_60: +LBB18_117: MOVQ $-7, R13 - JMP LBB19_67 + JMP LBB18_113 -LBB19_57: +LBB18_114: MOVQ -48(BP), AX MOVQ 8(AX), AX MOVQ AX, 0(BX) - JMP LBB19_67 + JMP LBB18_113 -LBB19_58: - ADDQ $-1, R15 +LBB18_115: + DECQ R15 MOVQ R15, R13 - JMP LBB19_67 + JMP LBB18_113 -LBB19_59: +LBB18_116: MOVQ 0(BX), CX SUBQ AX, CX ADDQ $-2, CX MOVQ CX, 0(BX) - -LBB19_66: MOVQ $-2, R13 + JMP LBB18_113 -LBB19_67: - MOVQ R13, AX - ADDQ $40, SP - BYTE $0x5b // popq %rbx - WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 - WORD $0x5e41 // popq %r14 - WORD $0x5f41 // popq %r15 - BYTE $0x5d // popq %rbp - RET - -LBB19_63: - MOVQ AX, R13 - JMP LBB19_67 +LBB18_125: + MOVQ CX, 0(R11) + JMP LBB18_113 -LBB19_74: - MOVQ CX, 0(BX) - JMP LBB19_67 +LBB18_120: + DECQ AX -LBB19_64: - ADDQ $-1, AX +LBB18_121: MOVQ AX, R13 - JMP LBB19_67 -LBB19_65: +LBB18_113: + MOVQ R13, AX + ADDQ $24, SP + BYTE $0x5b // popq %rbx + WORD $0x5c41 // popq %r12 + WORD $0x5d41 // popq %r13 + WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 + BYTE $0x5d // popq %rbp + WORD $0xf8c5; BYTE $0x77 // vzeroupper + RET + +LBB18_122: NOTQ AX - ADDQ AX, 0(BX) - JMP LBB19_66 + ADDQ AX, CX + MOVQ CX, 0(R11) + MOVQ $-2, R13 + JMP LBB18_113 -LBB19_68: - ADDQ $-1, R13 - JMP LBB19_67 +LBB18_128: + DECQ R13 + JMP LBB18_113 -LBB19_71: +LBB18_129: MOVQ $-2, R13 CMPB DX, $97 - JNE LBB19_67 - ADDQ $1, AX + JNE LBB18_113 + INCQ AX MOVL $1702063201, DX -LBB19_73: +LBB18_131: SHRL $8, DX - MOVQ AX, 0(BX) + MOVQ AX, 0(R11) MOVBLSX 0(CX)(AX*1), SI MOVBLZX DX, DI - ADDQ $1, AX + INCQ AX CMPL DI, SI - JE LBB19_73 - JMP LBB19_67 + JE LBB18_131 + JMP LBB18_113 -LBB19_76: +LBB18_132: LEAQ -1(AX), DX - MOVQ DX, 0(BX) + MOVQ DX, 0(R11) MOVQ $-2, R13 - CMPB -1(CX)(AX*1), $116 - JNE LBB19_67 - MOVL $1702195828, DX + CMPB -1(CX)(AX*1), $110 + JNE LBB18_113 + MOVL $1819047278, DX -LBB19_78: +LBB18_134: SHRL $8, DX - MOVQ AX, 0(BX) + MOVQ AX, 0(R11) MOVBLSX 0(CX)(AX*1), SI MOVBLZX DX, DI - ADDQ $1, AX + INCQ AX CMPL DI, SI - JE LBB19_78 - JMP LBB19_67 + JE LBB18_134 + JMP LBB18_113 -LBB19_80: +LBB18_135: LEAQ -1(AX), DX - MOVQ DX, 0(BX) + MOVQ DX, 0(R11) MOVQ $-2, R13 - CMPB -1(CX)(AX*1), $110 - JNE LBB19_67 - MOVL $1819047278, DX + CMPB -1(CX)(AX*1), $116 + JNE LBB18_113 + MOVL $1702195828, DX -LBB19_82: +LBB18_137: SHRL $8, DX - MOVQ AX, 0(BX) + MOVQ AX, 0(R11) MOVBLSX 0(CX)(AX*1), SI MOVBLZX DX, DI - ADDQ $1, AX + INCQ AX CMPL DI, SI - JE LBB19_82 - JMP LBB19_67 - -// .set L19_0_set_8, LBB19_8-LJTI19_0 -// .set L19_0_set_12, LBB19_12-LJTI19_0 -// .set L19_0_set_15, LBB19_15-LJTI19_0 -// .set L19_0_set_19, LBB19_19-LJTI19_0 -// .set L19_0_set_21, LBB19_21-LJTI19_0 -// .set L19_0_set_23, LBB19_23-LJTI19_0 -LJTI19_0: - LONG $0xfffffb8d // .long L19_0_set_8 - LONG $0xfffffbc7 // .long L19_0_set_12 - LONG $0xfffffbf0 // .long L19_0_set_15 - LONG $0xfffffc2f // .long L19_0_set_19 - LONG $0xfffffc44 // .long L19_0_set_21 - LONG $0xfffffc5c // .long L19_0_set_23 - - // .set L19_1_set_67, LBB19_67-LJTI19_1 - // .set L19_1_set_66, LBB19_66-LJTI19_1 - // .set L19_1_set_41, LBB19_41-LJTI19_1 - // .set L19_1_set_43, LBB19_43-LJTI19_1 - // .set L19_1_set_28, LBB19_28-LJTI19_1 - // .set L19_1_set_45, LBB19_45-LJTI19_1 - // .set L19_1_set_47, LBB19_47-LJTI19_1 - // .set L19_1_set_50, LBB19_50-LJTI19_1 - // .set L19_1_set_52, LBB19_52-LJTI19_1 - // .set L19_1_set_54, LBB19_54-LJTI19_1 -LJTI19_1: - LONG $0xffffff1d // .long L19_1_set_67 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xfffffd91 // .long L19_1_set_41 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xfffffdb6 // .long L19_1_set_43 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xfffffc98 // .long L19_1_set_28 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xfffffded // .long L19_1_set_45 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xfffffe12 // .long L19_1_set_47 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xfffffe55 // .long L19_1_set_50 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xfffffe87 // .long L19_1_set_52 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xffffff16 // .long L19_1_set_66 - LONG $0xfffffeb9 // .long L19_1_set_54 + JE LBB18_137 + JMP LBB18_113 + +// .set L18_0_set_61, LBB18_61-LJTI18_0 +// .set L18_0_set_65, LBB18_65-LJTI18_0 +// .set L18_0_set_68, LBB18_68-LJTI18_0 +// .set L18_0_set_72, LBB18_72-LJTI18_0 +// .set L18_0_set_74, LBB18_74-LJTI18_0 +// .set L18_0_set_76, LBB18_76-LJTI18_0 +LJTI18_0: + LONG $0xfffffa13 // .long L18_0_set_61 + LONG $0xfffffa51 // .long L18_0_set_65 + LONG $0xfffffa7a // .long L18_0_set_68 + LONG $0xfffffaf1 // .long L18_0_set_72 + LONG $0xfffffb07 // .long L18_0_set_74 + LONG $0xfffffb20 // .long L18_0_set_76 + + // .set L18_1_set_113, LBB18_113-LJTI18_1 + // .set L18_1_set_112, LBB18_112-LJTI18_1 + // .set L18_1_set_95, LBB18_95-LJTI18_1 + // .set L18_1_set_97, LBB18_97-LJTI18_1 + // .set L18_1_set_81, LBB18_81-LJTI18_1 + // .set L18_1_set_99, LBB18_99-LJTI18_1 + // .set L18_1_set_101, LBB18_101-LJTI18_1 + // .set L18_1_set_104, LBB18_104-LJTI18_1 + // .set L18_1_set_106, LBB18_106-LJTI18_1 + // .set L18_1_set_108, LBB18_108-LJTI18_1 +LJTI18_1: + LONG $0xffffff28 // .long L18_1_set_113 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffccc // .long L18_1_set_95 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffce5 // .long L18_1_set_97 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffb5c // .long L18_1_set_81 + LONG $0xfffffb5c // .long L18_1_set_81 + LONG $0xfffffb5c // .long L18_1_set_81 + LONG $0xfffffb5c // .long L18_1_set_81 + LONG $0xfffffb5c // .long L18_1_set_81 + LONG $0xfffffb5c // .long L18_1_set_81 + LONG $0xfffffb5c // .long L18_1_set_81 + LONG $0xfffffb5c // .long L18_1_set_81 + LONG $0xfffffb5c // .long L18_1_set_81 + LONG $0xfffffb5c // .long L18_1_set_81 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffd51 // .long L18_1_set_99 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffd76 // .long L18_1_set_101 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffdb1 // .long L18_1_set_104 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffddb // .long L18_1_set_106 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffed6 // .long L18_1_set_112 + LONG $0xfffffe05 // .long L18_1_set_108 _skip_array: BYTE $0x55 // pushq %rbp @@ -6830,18 +7084,18 @@ _skip_string: MOVQ 0(SI), BX LEAQ -32(BP), DX MOVQ BX, SI - LONG $0xffedc6e8; BYTE $0xff // callq _advance_string + LONG $0xffe971e8; BYTE $0xff // callq _advance_string TESTQ AX, AX - JS LBB22_2 - ADDQ $-1, BX + JS LBB21_2 + DECQ BX MOVQ AX, CX MOVQ BX, AX - JMP LBB22_3 + JMP LBB21_3 -LBB22_2: +LBB21_2: MOVQ 8(R15), CX -LBB22_3: +LBB21_3: MOVQ CX, 0(R14) ADDQ $8, SP BYTE $0x5b // popq %rbx @@ -6850,15 +7104,15 @@ LBB22_3: BYTE $0x5d // popq %rbp RET -LCPI23_0: +LCPI22_0: QUAD $0x2222222222222222; QUAD $0x2222222222222222 // .space 16, '""""""""""""""""' QUAD $0x2222222222222222; QUAD $0x2222222222222222 // .space 16, '""""""""""""""""' -LCPI23_1: +LCPI22_1: QUAD $0x5c5c5c5c5c5c5c5c; QUAD $0x5c5c5c5c5c5c5c5c // .space 16, '\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' QUAD $0x5c5c5c5c5c5c5c5c; QUAD $0x5c5c5c5c5c5c5c5c // .space 16, '\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -LCPI23_2: +LCPI22_2: QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' @@ -6870,143 +7124,152 @@ _validate_string: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $24, SP + SUBQ $40, SP MOVQ SI, R14 MOVQ 0(SI), R15 - MOVQ 8(DI), R8 - MOVQ R8, R13 - SUBQ R15, R13 - JE LBB23_52 - MOVQ 0(DI), R12 - LEAQ 0(R12)(R15*1), DI + MOVQ 8(DI), R12 + MOVQ R12, -64(BP) + SUBQ R15, R12 + JE LBB22_18 + MOVQ R14, -48(BP) + MOVQ 0(DI), DI + LEAQ 0(DI)(R15*1), SI + CMPQ R12, $64 + MOVQ SI, -72(BP) + JB LBB22_33 + MOVL R12, R9 + ANDL $63, R9 MOVQ $-1, AX - XORL R11, R11 - CMPQ R13, $64 - JB LBB23_2 - QUAD $0xffffff56056ffec5 // vmovdqu $-170(%rip), %ymm0 /* LCPI23_0(%rip) */ - QUAD $0xffffff6e0d6ffec5 // vmovdqu $-146(%rip), %ymm1 /* LCPI23_1(%rip) */ - QUAD $0xffffff86156ffec5 // vmovdqu $-122(%rip), %ymm2 /* LCPI23_2(%rip) */ + XORL R13, R13 + QUAD $0xffffff46056ffec5 // vmovdqu $-186(%rip), %ymm0 /* LCPI22_0(%rip) */ + QUAD $0xffffff5e0d6ffec5 // vmovdqu $-162(%rip), %ymm1 /* LCPI22_1(%rip) */ + QUAD $0xffffff76156ffec5 // vmovdqu $-138(%rip), %ymm2 /* LCPI22_2(%rip) */ LONG $0xdb76e5c5 // vpcmpeqd %ymm3, %ymm3, %ymm3 - MOVQ R15, DX -LBB23_4: - LONG $0x6f7ec1c4; WORD $0x1424 // vmovdqu (%r12,%rdx), %ymm4 - LONG $0x6f7ec1c4; WORD $0x146c; BYTE $0x20 // vmovdqu $32(%r12,%rdx), %ymm5 - LONG $0xf074d5c5 // vpcmpeqb %ymm0, %ymm5, %ymm6 - LONG $0xded7fdc5 // vpmovmskb %ymm6, %ebx - LONG $0xf174ddc5 // vpcmpeqb %ymm1, %ymm4, %ymm6 - LONG $0xd6d77dc5 // vpmovmskb %ymm6, %r10d - LONG $0xf174d5c5 // vpcmpeqb %ymm1, %ymm5, %ymm6 - LONG $0xf6d7fdc5 // vpmovmskb %ymm6, %esi +LBB22_3: + LONG $0x266ffec5 // vmovdqu (%rsi), %ymm4 + LONG $0x6e6ffec5; BYTE $0x20 // vmovdqu $32(%rsi), %ymm5 + LONG $0xf074ddc5 // vpcmpeqb %ymm0, %ymm4, %ymm6 + LONG $0xc6d77dc5 // vpmovmskb %ymm6, %r8d + LONG $0xf074d5c5 // vpcmpeqb %ymm0, %ymm5, %ymm6 + LONG $0xded7fdc5 // vpmovmskb %ymm6, %ebx + LONG $0xf174ddc5 // vpcmpeqb %ymm1, %ymm4, %ymm6 + LONG $0xded77dc5 // vpmovmskb %ymm6, %r11d + LONG $0xf174d5c5 // vpcmpeqb %ymm1, %ymm5, %ymm6 + LONG $0xf6d77dc5 // vpmovmskb %ymm6, %r14d + LONG $0xf464edc5 // vpcmpgtb %ymm4, %ymm2, %ymm6 + LONG $0xe364ddc5 // vpcmpgtb %ymm3, %ymm4, %ymm4 + LONG $0xe6dbddc5 // vpand %ymm6, %ymm4, %ymm4 + LONG $0xd4d77dc5 // vpmovmskb %ymm4, %r10d + LONG $0xe564edc5 // vpcmpgtb %ymm5, %ymm2, %ymm4 + LONG $0xeb64d5c5 // vpcmpgtb %ymm3, %ymm5, %ymm5 + LONG $0xe4dbd5c5 // vpand %ymm4, %ymm5, %ymm4 + LONG $0xd4d7fdc5 // vpmovmskb %ymm4, %edx SHLQ $32, BX - SHLQ $32, SI - ORQ SI, R10 + SHLQ $32, R14 + ORQ R14, R11 CMPQ AX, $-1 - JNE LBB23_7 - TESTQ R10, R10 - JNE LBB23_6 + JNE LBB22_5 + TESTQ R11, R11 + JNE LBB22_10 -LBB23_7: - LONG $0xf564edc5 // vpcmpgtb %ymm5, %ymm2, %ymm6 - LONG $0xeb64d5c5 // vpcmpgtb %ymm3, %ymm5, %ymm5 - LONG $0xeddbcdc5 // vpand %ymm5, %ymm6, %ymm5 - LONG $0xf5d7fdc5 // vpmovmskb %ymm5, %esi - SHLQ $32, SI - LONG $0xe874ddc5 // vpcmpeqb %ymm0, %ymm4, %ymm5 - LONG $0xcdd77dc5 // vpmovmskb %ymm5, %r9d - ORQ R9, BX - MOVQ R10, CX - ORQ R11, CX - JNE LBB23_8 - -LBB23_9: - LONG $0xec64edc5 // vpcmpgtb %ymm4, %ymm2, %ymm5 - LONG $0xe364ddc5 // vpcmpgtb %ymm3, %ymm4, %ymm4 - LONG $0xe4dbd5c5 // vpand %ymm4, %ymm5, %ymm4 - LONG $0xccd7fdc5 // vpmovmskb %ymm4, %ecx - ORQ CX, SI +LBB22_5: + SHLQ $32, DX + ORQ R8, BX + MOVQ R11, CX + ORQ R13, CX + JNE LBB22_9 + ORQ R10, DX TESTQ BX, BX - JNE LBB23_10 - TESTQ SI, SI - JNE LBB23_14 - ADDQ $-64, R13 - ADDQ $64, DX - CMPQ R13, $63 - JA LBB23_4 - JMP LBB23_17 + JNE LBB22_11 -LBB23_8: - MOVQ R11, CX - NOTQ CX - ANDQ R10, CX - MOVQ CX, -56(BP) - LEAQ 0(CX)(CX*1), R9 - ORQ R11, R9 - MOVQ R9, -48(BP) - NOTQ R9 - ANDQ R10, R9 +LBB22_7: + TESTQ DX, DX + JNE LBB22_21 + ADDQ $64, SI + ADDQ $-64, R12 + CMPQ R12, $63 + JA LBB22_3 + JMP LBB22_23 + +LBB22_9: + MOVQ R13, R14 + NOTQ R14 + ANDQ R11, R14 + LEAQ 0(R14)(R14*1), R8 + ORQ R13, R8 + MOVQ R8, -56(BP) + NOTQ R8 + ANDQ R11, R8 MOVQ $-6148914691236517206, CX - ANDQ CX, R9 - XORL R11, R11 - ADDQ -56(BP), R9 - SETCS R11 - ADDQ R9, R9 + ANDQ CX, R8 + XORL R13, R13 + ADDQ R14, R8 + SETCS R13 + ADDQ R8, R8 MOVQ $6148914691236517205, CX - XORQ CX, R9 - ANDQ -48(BP), R9 - NOTQ R9 - ANDQ R9, BX - JMP LBB23_9 + XORQ CX, R8 + ANDQ -56(BP), R8 + NOTQ R8 + ANDQ R8, BX + ORQ R10, DX + TESTQ BX, BX + JE LBB22_7 + JMP LBB22_11 -LBB23_6: - BSFQ R10, AX - ADDQ DX, AX - JMP LBB23_7 +LBB22_10: + MOVQ SI, R14 + SUBQ DI, R14 + BSFQ R11, AX + ADDQ R14, AX + JMP LBB22_5 -LBB23_10: +LBB22_11: + SUBQ DI, SI BSFQ BX, BX - LEAQ 0(BX)(DX*1), R13 - ADDQ $1, R13 - TESTQ SI, SI - JE LBB23_50 - BSFQ SI, CX + LEAQ 1(SI)(BX*1), R12 + TESTQ DX, DX + MOVQ -48(BP), R14 + JE LBB22_13 + BSFQ DX, CX CMPQ CX, BX - JBE LBB23_12 + JBE LBB22_29 -LBB23_50: - TESTQ R13, R13 - JS LBB23_51 +LBB22_13: + TESTQ R12, R12 + JS LBB22_17 MOVQ R15, SI NOTQ SI - ADDQ R13, SI + ADDQ R12, SI + MOVQ -72(BP), DI WORD $0xf8c5; BYTE $0x77 // vzeroupper LONG $0x00026ee8; BYTE $0x00 // callq _utf8_validate TESTQ AX, AX - JS LBB23_54 + JS LBB22_19 ADDQ 0(R14), AX -LBB23_56: - MOVQ $-2, R13 - JMP LBB23_57 +LBB22_16: + MOVQ $-2, R12 + JMP LBB22_20 -LBB23_51: - CMPQ R13, $-1 - JNE LBB23_57 +LBB22_17: + CMPQ R12, $-1 + JNE LBB22_20 -LBB23_52: - MOVQ $-1, R13 - MOVQ R8, AX - JMP LBB23_57 +LBB22_18: + MOVQ $-1, R12 + MOVQ -64(BP), AX + JMP LBB22_20 -LBB23_54: - ADDQ $-1, R15 - MOVQ R13, AX - MOVQ R15, R13 +LBB22_19: + DECQ R15 + MOVQ R12, AX + MOVQ R15, R12 -LBB23_57: +LBB22_20: MOVQ AX, 0(R14) - MOVQ R13, AX - ADDQ $24, SP + MOVQ R12, AX + ADDQ $40, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -7016,191 +7279,195 @@ LBB23_57: WORD $0xf8c5; BYTE $0x77 // vzeroupper RET -LBB23_14: - MOVQ $-2, R13 +LBB22_21: + MOVQ $-2, R12 CMPQ AX, $-1 - JNE LBB23_57 - BSFQ SI, AX - ADDQ DX, AX - JMP LBB23_57 + JE LBB22_30 -LBB23_17: - ADDQ R12, DX - CMPQ R13, $32 - JB LBB23_32 +LBB22_22: + MOVQ -48(BP), R14 + JMP LBB22_20 + +LBB22_23: + MOVQ R9, R12 + CMPQ R12, $32 + JB LBB22_39 -LBB23_19: - LONG $0x026ffec5 // vmovdqu (%rdx), %ymm0 - QUAD $0xfffffd940d74fdc5 // vpcmpeqb $-620(%rip), %ymm0, %ymm1 /* LCPI23_0(%rip) */ +LBB22_24: + LONG $0x066ffec5 // vmovdqu (%rsi), %ymm0 + QUAD $0xfffffd7c0d74fdc5 // vpcmpeqb $-644(%rip), %ymm0, %ymm1 /* LCPI22_0(%rip) */ LONG $0xd9d7fdc5 // vpmovmskb %ymm1, %ebx - QUAD $0xfffffda80d74fdc5 // vpcmpeqb $-600(%rip), %ymm0, %ymm1 /* LCPI23_1(%rip) */ - LONG $0xc9d7fdc5 // vpmovmskb %ymm1, %ecx - QUAD $0xfffffdbc0d6ffec5 // vmovdqu $-580(%rip), %ymm1 /* LCPI23_2(%rip) */ + QUAD $0xfffffd900d74fdc5 // vpcmpeqb $-624(%rip), %ymm0, %ymm1 /* LCPI22_1(%rip) */ + LONG $0xd1d7fdc5 // vpmovmskb %ymm1, %edx + QUAD $0xfffffda40d6ffec5 // vmovdqu $-604(%rip), %ymm1 /* LCPI22_2(%rip) */ LONG $0xc864f5c5 // vpcmpgtb %ymm0, %ymm1, %ymm1 LONG $0xd276edc5 // vpcmpeqd %ymm2, %ymm2, %ymm2 LONG $0xc264fdc5 // vpcmpgtb %ymm2, %ymm0, %ymm0 - LONG $0xc0dbf5c5 // vpand %ymm0, %ymm1, %ymm0 - LONG $0xc8d77dc5 // vpmovmskb %ymm0, %r9d - TESTL CX, CX - JNE LBB23_20 - TESTQ R11, R11 - JNE LBB23_22 - XORL R11, R11 + LONG $0xc1dbfdc5 // vpand %ymm1, %ymm0, %ymm0 + LONG $0xc0d77dc5 // vpmovmskb %ymm0, %r8d + TESTL DX, DX + JNE LBB22_34 + TESTQ R13, R13 + JNE LBB22_36 + XORL R13, R13 TESTQ BX, BX - JE LBB23_28 + JE LBB22_37 -LBB23_26: - SUBQ R12, DX - BSFQ BX, SI - LEAQ 0(DX)(SI*1), R13 - ADDQ $1, R13 - TESTL R9, R9 - JE LBB23_50 - BSFQ R9, CX - CMPQ CX, SI - JA LBB23_50 +LBB22_27: + SUBQ DI, SI + BSFQ BX, DX + LEAQ 1(SI)(DX*1), R12 + TESTL R8, R8 + JE LBB22_32 + BSFQ R8, CX + CMPQ CX, DX + MOVQ -48(BP), R14 + JA LBB22_13 -LBB23_12: - ADDQ DX, CX +LBB22_29: + ADDQ SI, CX CMPQ AX, $-1 LONG $0xc1440f48 // cmoveq %rcx, %rax - JMP LBB23_56 + JMP LBB22_16 -LBB23_2: - MOVQ DI, DX - CMPQ R13, $32 - JAE LBB23_19 - JMP LBB23_32 +LBB22_30: + SUBQ DI, SI + BSFQ DX, AX -LBB23_20: - CMPQ AX, $-1 - JNE LBB23_22 - MOVQ DX, SI - SUBQ R12, SI - BSFQ CX, AX +LBB22_31: ADDQ SI, AX + MOVQ -48(BP), R14 + JMP LBB22_20 -LBB23_22: - MOVL R11, SI - NOTL SI - ANDL CX, SI - MOVQ SI, -48(BP) - ADDL SI, SI - ORL R11, SI - MOVL SI, R10 - NOTL R10 - ANDL CX, R10 - ANDL $-1431655766, R10 - XORL R11, R11 - ADDL -48(BP), R10 - SETCS R11 - ADDL R10, R10 - XORL $1431655765, R10 - ANDL SI, R10 +LBB22_32: + MOVQ -48(BP), R14 + JMP LBB22_13 + +LBB22_33: + MOVQ $-1, AX + XORL R13, R13 + CMPQ R12, $32 + JAE LBB22_24 + JMP LBB22_39 + +LBB22_34: + CMPQ AX, $-1 + JNE LBB22_36 + MOVQ SI, CX + SUBQ DI, CX + BSFQ DX, AX + ADDQ CX, AX + +LBB22_36: + MOVL R13, R10 NOTL R10 - ANDL R10, BX + ANDL DX, R10 + LEAL 0(R10)(R10*1), R9 + ORL R13, R9 + MOVL R9, CX + NOTL CX + ANDL DX, CX + ANDL $-1431655766, CX + XORL R13, R13 + ADDL R10, CX + SETCS R13 + ADDL CX, CX + XORL $1431655765, CX + ANDL R9, CX + NOTL CX + ANDL CX, BX TESTQ BX, BX - JNE LBB23_26 + JNE LBB22_27 -LBB23_28: - TESTL R9, R9 - JNE LBB23_29 - ADDQ $32, DX - ADDQ $-32, R13 +LBB22_37: + TESTL R8, R8 + JNE LBB22_51 + ADDQ $32, SI + ADDQ $-32, R12 -LBB23_32: - TESTQ R11, R11 - JNE LBB23_33 +LBB22_39: TESTQ R13, R13 - JE LBB23_49 - -LBB23_36: - MOVQ R12, R9 - NOTQ R9 - MOVQ R12, SI - NEGQ SI + JNE LBB22_53 + MOVQ -48(BP), R14 + TESTQ R12, R12 + JE LBB22_50 -LBB23_37: - XORL CX, CX +LBB22_41: + MOVQ DI, R8 + NOTQ R8 -LBB23_38: - MOVBLZX 0(DX)(CX*1), BX +LBB22_42: + LEAQ 1(SI), DX + MOVBLZX 0(SI), BX CMPB BX, $34 - JE LBB23_48 + JE LBB22_49 + LEAQ -1(R12), R10 CMPB BX, $92 - JE LBB23_40 + JE LBB22_46 CMPB BX, $31 - JBE LBB23_43 - ADDQ $1, CX - CMPQ R13, CX - JNE LBB23_38 - JMP LBB23_46 - -LBB23_40: - LEAQ -1(R13), BX - CMPQ BX, CX - JE LBB23_52 - LEAQ 0(SI)(DX*1), BX - ADDQ CX, BX - CMPQ AX, $-1 - LONG $0xc3440f48 // cmoveq %rbx, %rax - ADDQ CX, DX - ADDQ $2, DX - MOVQ R13, BX - SUBQ CX, BX - ADDQ $-2, BX - ADDQ $-2, R13 - CMPQ R13, CX - MOVQ BX, R13 - JNE LBB23_37 - JMP LBB23_52 - -LBB23_48: - ADDQ CX, DX - ADDQ $1, DX + JBE LBB22_55 + MOVQ DX, SI + MOVQ R10, R12 + TESTQ R10, R10 + JNE LBB22_42 + JMP LBB22_48 -LBB23_49: - SUBQ R12, DX - MOVQ DX, R13 - JMP LBB23_50 +LBB22_46: + TESTQ R10, R10 + JE LBB22_18 + ADDQ R8, DX + CMPQ AX, $-1 + LONG $0xc2440f48 // cmoveq %rdx, %rax + ADDQ $2, SI + ADDQ $-2, R12 + MOVQ R12, R10 + TESTQ R10, R10 + JNE LBB22_42 -LBB23_46: +LBB22_48: CMPB BX, $34 - JNE LBB23_52 - ADDQ R13, DX - JMP LBB23_49 + JNE LBB22_18 + JMP LBB22_50 -LBB23_29: - MOVQ $-2, R13 +LBB22_49: + MOVQ DX, SI + +LBB22_50: + SUBQ DI, SI + MOVQ SI, R12 + JMP LBB22_13 + +LBB22_51: + MOVQ $-2, R12 CMPQ AX, $-1 - JNE LBB23_57 - SUBQ R12, DX - BSFQ R9, AX - ADDQ DX, AX - JMP LBB23_57 + JNE LBB22_22 + SUBQ DI, SI + BSFQ R8, AX + JMP LBB22_31 -LBB23_33: - TESTQ R13, R13 - JE LBB23_52 - MOVQ R12, CX +LBB22_53: + TESTQ R12, R12 + MOVQ -48(BP), R14 + JE LBB22_18 + MOVQ DI, CX NOTQ CX - ADDQ DX, CX + ADDQ SI, CX CMPQ AX, $-1 - LONG $0xc1440f48 // cmoveq %rcx, %rax - ADDQ $1, DX - ADDQ $-1, R13 - TESTQ R13, R13 - JNE LBB23_36 - JMP LBB23_49 + LONG $0xc1440f48 // cmoveq %rcx, %rax + INCQ SI + DECQ R12 + TESTQ R12, R12 + JNE LBB22_41 + JMP LBB22_50 -LBB23_43: - MOVQ $-2, R13 +LBB22_55: + MOVQ $-2, R12 CMPQ AX, $-1 - JNE LBB23_57 - ADDQ DX, R9 - LEAQ 0(CX)(R9*1), AX - ADDQ $1, AX - JMP LBB23_57 + JNE LBB22_22 + ADDQ R8, DX + MOVQ DX, AX + MOVQ -48(BP), R14 + JMP LBB22_20 _utf8_validate: BYTE $0x55 // pushq %rbp @@ -7210,151 +7477,143 @@ _utf8_validate: BYTE $0x53 // pushq %rbx MOVQ $-1, AX TESTQ SI, SI - JLE LBB24_34 - LONG $0xfa0d8d4c; WORD $0x00b0; BYTE $0x00 // leaq $45306(%rip), %r9 /* _first(%rip) */ - LONG $0xf3058d4c; WORD $0x00b1; BYTE $0x00 // leaq $45555(%rip), %r8 /* _ranges(%rip) */ - LONG $0x91158d4c; WORD $0x0001; BYTE $0x00 // leaq $401(%rip), %r10 /* LJTI24_0(%rip) */ - MOVQ DI, R14 + JLE LBB23_33 + LONG $0xd60d8d4c; WORD $0x00b0; BYTE $0x00 // leaq $45270(%rip), %r9 /* _first(%rip) */ + LONG $0xcf058d4c; WORD $0x00b1; BYTE $0x00 // leaq $45519(%rip), %r8 /* _ranges(%rip) */ + LONG $0x4e158d4c; WORD $0x0001; BYTE $0x00 // leaq $334(%rip), %r10 /* LJTI23_0(%rip) */ + MOVQ DI, R11 -LBB24_2: - CMPB 0(R14), $0 - JS LBB24_3 - MOVQ SI, R11 - MOVQ R14, CX +LBB23_2: + CMPB 0(R11), $0 + JS LBB23_3 + MOVQ SI, DX + MOVQ R11, CX CMPQ SI, $32 - JL LBB24_15 - XORL BX, BX - XORL DX, DX + JL LBB23_10 + MOVQ R11, CX + MOVQ SI, DX -LBB24_6: - LONG $0x6f7ec1c4; WORD $0x1e04 // vmovdqu (%r14,%rbx), %ymm0 - LONG $0xc8d7fdc5 // vpmovmskb %ymm0, %ecx - TESTL CX, CX - JNE LBB24_7 - ADDQ $32, BX - LEAQ 0(SI)(DX*1), CX - ADDQ $-32, CX - ADDQ $-32, DX +LBB23_6: + LONG $0x016ffec5 // vmovdqu (%rcx), %ymm0 + LONG $0xd8d7fdc5 // vpmovmskb %ymm0, %ebx + TESTL BX, BX + JNE LBB23_7 ADDQ $32, CX - CMPQ CX, $63 - JG LBB24_6 - MOVQ R14, CX - SUBQ DX, CX - MOVQ SI, R11 - SUBQ BX, R11 + CMPQ DX, $63 + LEAQ -32(DX), DX + JG LBB23_6 -LBB24_15: +LBB23_10: WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ R11, $16 - JL LBB24_16 - MOVQ R14, BX - SUBQ CX, BX - -LBB24_22: - LONG $0x016ffac5 // vmovdqu (%rcx), %xmm0 - LONG $0xd0d7f9c5 // vpmovmskb %xmm0, %edx - TESTW DX, DX - JNE LBB24_23 - LEAQ -16(R11), DX + CMPQ DX, $16 + JL LBB23_11 + +LBB23_16: + LONG $0x016ffac5 // vmovdqu (%rcx), %xmm0 + LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx + TESTW BX, BX + JNE LBB23_17 ADDQ $16, CX - ADDQ $-16, BX - CMPQ R11, $31 - MOVQ DX, R11 - JG LBB24_22 - JMP LBB24_17 + CMPQ DX, $31 + LEAQ -16(DX), DX + JG LBB23_16 -LBB24_3: - XORL R15, R15 +LBB23_11: + TESTQ DX, DX + JLE LBB23_33 + INCQ DX + +LBB23_13: + CMPB 0(CX), $0 + JS LBB23_18 + INCQ CX + DECQ DX + CMPQ DX, $1 + JG LBB23_13 + JMP LBB23_33 + +LBB23_3: + XORL DX, DX + CMPQ DX, $-1 + JNE LBB23_20 + JMP LBB23_33 -LBB24_8: - CMPQ R15, $-1 - JE LBB24_34 - SUBQ R15, SI - JLE LBB24_34 - LEAQ 0(R14)(R15*1), R11 - MOVBLZX 0(R14)(R15*1), R14 - MOVBLZX 0(R14)(R9*1), R15 +LBB23_18: + SUBQ R11, CX + MOVQ CX, DX + CMPQ DX, $-1 + JE LBB23_33 + +LBB23_20: + SUBQ DX, SI + JLE LBB23_33 + LEAQ 0(R11)(DX*1), R14 + MOVBLZX 0(R11)(DX*1), R11 + MOVBLZX 0(R11)(R9*1), R15 MOVL R15, DX ANDL $7, DX CMPQ SI, DX - JB LBB24_32 + JB LBB23_31 CMPB DX, $4 - JA LBB24_32 + JA LBB23_31 MOVL $1, BX MOVBLZX DX, CX MOVLQSX 0(R10)(CX*4), CX ADDQ R10, CX JMP CX -LBB24_25: - MOVB 3(R11), BX +LBB23_24: + MOVB 3(R14), BX TESTB BX, BX - JNS LBB24_32 + JNS LBB23_31 CMPB BX, $-65 - JA LBB24_32 + JA LBB23_31 -LBB24_27: - MOVB 2(R11), BX +LBB23_26: + MOVB 2(R14), BX TESTB BX, BX - JNS LBB24_32 + JNS LBB23_31 CMPB BX, $-65 - JA LBB24_32 + JA LBB23_31 -LBB24_29: - TESTB R14, R14 - JNS LBB24_32 +LBB23_28: + TESTB R11, R11 + JNS LBB23_31 SHRQ $4, R15 - MOVB 1(R11), R14 - CMPB R14, 0(R8)(R15*2) - JB LBB24_32 + MOVB 1(R14), R11 + CMPB R11, 0(R8)(R15*2) + JB LBB23_31 MOVQ DX, BX - CMPB 1(R8)(R15*2), R14 - JB LBB24_32 + CMPB 1(R8)(R15*2), R11 + JB LBB23_31 -LBB24_33: - ADDQ BX, R11 - MOVQ R11, R14 +LBB23_32: + ADDQ BX, R14 + MOVQ R14, R11 SUBQ BX, SI - JG LBB24_2 - JMP LBB24_34 + JG LBB23_2 + JMP LBB23_33 -LBB24_16: - MOVQ R11, DX - -LBB24_17: - TESTQ DX, DX - JLE LBB24_34 - ADDQ $1, DX - MOVQ CX, R15 - SUBQ R14, R15 - -LBB24_19: - CMPB 0(CX), $0 - JS LBB24_8 - ADDQ $1, CX - ADDQ $-1, DX - ADDQ $1, R15 - CMPQ DX, $1 - JG LBB24_19 - JMP LBB24_34 +LBB23_7: + MOVLQSX BX, DX + JMP LBB23_8 -LBB24_7: - MOVLQSX CX, CX - BSFQ CX, R15 - SUBQ DX, R15 - JMP LBB24_8 +LBB23_17: + MOVWLZX BX, DX -LBB24_23: - MOVWLZX DX, CX - BSFQ CX, R15 - SUBQ BX, R15 - JMP LBB24_8 +LBB23_8: + SUBQ R11, CX + BSFQ DX, DX + ADDQ CX, DX + CMPQ DX, $-1 + JNE LBB23_20 + JMP LBB23_33 -LBB24_32: - SUBQ DI, R11 - MOVQ R11, AX +LBB23_31: + SUBQ DI, R14 + MOVQ R14, AX -LBB24_34: +LBB23_33: BYTE $0x5b // popq %rbx WORD $0x5e41 // popq %r14 WORD $0x5f41 // popq %r15 @@ -7362,17 +7621,17 @@ LBB24_34: WORD $0xf8c5; BYTE $0x77 // vzeroupper RET -// .set L24_0_set_33, LBB24_33-LJTI24_0 -// .set L24_0_set_32, LBB24_32-LJTI24_0 -// .set L24_0_set_29, LBB24_29-LJTI24_0 -// .set L24_0_set_27, LBB24_27-LJTI24_0 -// .set L24_0_set_25, LBB24_25-LJTI24_0 -LJTI24_0: - LONG $0xffffff92 // .long L24_0_set_33 - LONG $0xfffffff0 // .long L24_0_set_32 - LONG $0xffffff75 // .long L24_0_set_29 - LONG $0xffffff64 // .long L24_0_set_27 - LONG $0xffffff4f // .long L24_0_set_25 +// .set L23_0_set_32, LBB23_32-LJTI23_0 +// .set L23_0_set_31, LBB23_31-LJTI23_0 +// .set L23_0_set_28, LBB23_28-LJTI23_0 +// .set L23_0_set_26, LBB23_26-LJTI23_0 +// .set L23_0_set_24, LBB23_24-LJTI23_0 +LJTI23_0: + LONG $0xffffffc1 // .long L23_0_set_32 + LONG $0xfffffff0 // .long L23_0_set_31 + LONG $0xffffffa4 // .long L23_0_set_28 + LONG $0xffffff97 // .long L23_0_set_26 + LONG $0xffffff8a // .long L23_0_set_24 _skip_negative: BYTE $0x55 // pushq %rbp @@ -7386,73 +7645,76 @@ _skip_negative: MOVQ 8(DI), SI SUBQ BX, SI MOVQ AX, DI - LONG $0x000173e8; BYTE $0x00 // callq _do_skip_number + LONG $0x00017be8; BYTE $0x00 // callq _do_skip_number + MOVQ 0(R14), CX TESTQ AX, AX - JS LBB25_1 - ADDQ AX, 0(R14) - ADDQ $-1, BX - JMP LBB25_3 + JS LBB24_1 + ADDQ AX, CX + MOVQ CX, 0(R14) + DECQ BX + JMP LBB24_3 -LBB25_1: +LBB24_1: NOTQ AX - ADDQ AX, 0(R14) + ADDQ AX, CX + MOVQ CX, 0(R14) MOVQ $-2, BX -LBB25_3: +LBB24_3: MOVQ BX, AX BYTE $0x5b // popq %rbx WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET -LCPI26_0: +LCPI25_0: QUAD $0x2f2f2f2f2f2f2f2f; QUAD $0x2f2f2f2f2f2f2f2f // .space 16, '////////////////' QUAD $0x2f2f2f2f2f2f2f2f; QUAD $0x2f2f2f2f2f2f2f2f // .space 16, '////////////////' -LCPI26_1: +LCPI25_1: QUAD $0x3a3a3a3a3a3a3a3a; QUAD $0x3a3a3a3a3a3a3a3a // .space 16, '::::::::::::::::' QUAD $0x3a3a3a3a3a3a3a3a; QUAD $0x3a3a3a3a3a3a3a3a // .space 16, '::::::::::::::::' -LCPI26_2: +LCPI25_2: QUAD $0x2b2b2b2b2b2b2b2b; QUAD $0x2b2b2b2b2b2b2b2b // .space 16, '++++++++++++++++' QUAD $0x2b2b2b2b2b2b2b2b; QUAD $0x2b2b2b2b2b2b2b2b // .space 16, '++++++++++++++++' -LCPI26_3: +LCPI25_3: QUAD $0x2d2d2d2d2d2d2d2d; QUAD $0x2d2d2d2d2d2d2d2d // .space 16, '----------------' QUAD $0x2d2d2d2d2d2d2d2d; QUAD $0x2d2d2d2d2d2d2d2d // .space 16, '----------------' -LCPI26_4: - QUAD $0xdfdfdfdfdfdfdfdf; QUAD $0xdfdfdfdfdfdfdfdf // .space 16, '\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf' - QUAD $0xdfdfdfdfdfdfdfdf; QUAD $0xdfdfdfdfdfdfdfdf // .space 16, '\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf' +LCPI25_4: + QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' + QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' -LCPI26_5: +LCPI25_5: QUAD $0x2e2e2e2e2e2e2e2e; QUAD $0x2e2e2e2e2e2e2e2e // .space 16, '................' QUAD $0x2e2e2e2e2e2e2e2e; QUAD $0x2e2e2e2e2e2e2e2e // .space 16, '................' -LCPI26_6: - QUAD $0x4545454545454545; QUAD $0x4545454545454545 // .space 16, 'EEEEEEEEEEEEEEEE' - QUAD $0x4545454545454545; QUAD $0x4545454545454545 // .space 16, 'EEEEEEEEEEEEEEEE' +LCPI25_6: + QUAD $0x6565656565656565; QUAD $0x6565656565656565 // .space 16, 'eeeeeeeeeeeeeeee' + QUAD $0x6565656565656565; QUAD $0x6565656565656565 // .space 16, 'eeeeeeeeeeeeeeee' -LCPI26_7: +LCPI25_7: QUAD $0x2f2f2f2f2f2f2f2f; QUAD $0x2f2f2f2f2f2f2f2f // .space 16, '////////////////' -LCPI26_8: +LCPI25_8: QUAD $0x3a3a3a3a3a3a3a3a; QUAD $0x3a3a3a3a3a3a3a3a // .space 16, '::::::::::::::::' -LCPI26_9: +LCPI25_9: QUAD $0x2b2b2b2b2b2b2b2b; QUAD $0x2b2b2b2b2b2b2b2b // .space 16, '++++++++++++++++' -LCPI26_10: +LCPI25_10: QUAD $0x2d2d2d2d2d2d2d2d; QUAD $0x2d2d2d2d2d2d2d2d // .space 16, '----------------' -LCPI26_11: - QUAD $0xdfdfdfdfdfdfdfdf; QUAD $0xdfdfdfdfdfdfdfdf // .space 16, '\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf' +LCPI25_11: + QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' -LCPI26_12: +LCPI25_12: QUAD $0x2e2e2e2e2e2e2e2e; QUAD $0x2e2e2e2e2e2e2e2e // .space 16, '................' -LCPI26_13: - QUAD $0x4545454545454545; QUAD $0x4545454545454545 // .space 16, 'EEEEEEEEEEEEEEEE' +LCPI25_13: + QUAD $0x6565656565656565; QUAD $0x6565656565656565 // .space 16, 'eeeeeeeeeeeeeeee' _do_skip_number: BYTE $0x55 // pushq %rbp @@ -7463,372 +7725,360 @@ _do_skip_number: WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx TESTQ SI, SI - JE LBB26_1 + JE LBB25_53 CMPB 0(DI), $48 - JNE LBB26_6 - MOVL $1, AX + JNE LBB25_5 + MOVL $1, DX CMPQ SI, $1 - JE LBB26_80 - MOVB 1(DI), CX - ADDB $-46, CX - CMPB CX, $55 - JA LBB26_80 - MOVBLZX CX, CX - MOVQ $36028797027352577, DX - BTQ CX, DX - JAE LBB26_80 + JE LBB25_73 + MOVB 1(DI), AX + ADDB $-46, AX + CMPB AX, $55 + JA LBB25_73 + MOVBLZX AX, AX + MOVQ $36028797027352577, CX + BTQ AX, CX + JAE LBB25_73 -LBB26_6: - MOVQ $-1, R10 +LBB25_5: CMPQ SI, $32 - JB LBB26_7 - XORL AX, AX - QUAD $0xfffffe45056ffec5 // vmovdqu $-443(%rip), %ymm0 /* LCPI26_0(%rip) */ - QUAD $0xfffffe5d0d6ffec5 // vmovdqu $-419(%rip), %ymm1 /* LCPI26_1(%rip) */ - QUAD $0xfffffe75156ffec5 // vmovdqu $-395(%rip), %ymm2 /* LCPI26_2(%rip) */ - QUAD $0xfffffe8d1d6ffec5 // vmovdqu $-371(%rip), %ymm3 /* LCPI26_3(%rip) */ - QUAD $0xfffffea5256ffec5 // vmovdqu $-347(%rip), %ymm4 /* LCPI26_4(%rip) */ - QUAD $0xfffffebd2d6ffec5 // vmovdqu $-323(%rip), %ymm5 /* LCPI26_5(%rip) */ - QUAD $0xfffffed5356ffec5 // vmovdqu $-299(%rip), %ymm6 /* LCPI26_6(%rip) */ - MOVQ $-1, R8 + JB LBB25_76 + LEAQ -32(SI), R11 + MOVQ R11, AX + ANDQ $-32, AX + LEAQ 32(AX)(DI*1), R10 + ANDL $31, R11 MOVQ $-1, R9 + QUAD $0xfffffe35056ffec5 // vmovdqu $-459(%rip), %ymm0 /* LCPI25_0(%rip) */ + QUAD $0xfffffe4d0d6ffec5 // vmovdqu $-435(%rip), %ymm1 /* LCPI25_1(%rip) */ + QUAD $0xfffffe65156ffec5 // vmovdqu $-411(%rip), %ymm2 /* LCPI25_2(%rip) */ + QUAD $0xfffffe7d1d6ffec5 // vmovdqu $-387(%rip), %ymm3 /* LCPI25_3(%rip) */ + QUAD $0xfffffe95256ffec5 // vmovdqu $-363(%rip), %ymm4 /* LCPI25_4(%rip) */ + QUAD $0xfffffead2d6ffec5 // vmovdqu $-339(%rip), %ymm5 /* LCPI25_5(%rip) */ + QUAD $0xfffffec5356ffec5 // vmovdqu $-315(%rip), %ymm6 /* LCPI25_6(%rip) */ + MOVQ $-1, AX + MOVQ $-1, R8 + MOVQ DI, R14 -LBB26_9: - LONG $0x3c6ffec5; BYTE $0x07 // vmovdqu (%rdi,%rax), %ymm7 +LBB25_7: + LONG $0x6f7ec1c4; BYTE $0x3e // vmovdqu (%r14), %ymm7 LONG $0xc06445c5 // vpcmpgtb %ymm0, %ymm7, %ymm8 LONG $0xcf6475c5 // vpcmpgtb %ymm7, %ymm1, %ymm9 - LONG $0xdb3541c4; BYTE $0xc0 // vpand %ymm8, %ymm9, %ymm8 + LONG $0xdb3d41c4; BYTE $0xc1 // vpand %ymm9, %ymm8, %ymm8 LONG $0xca7445c5 // vpcmpeqb %ymm2, %ymm7, %ymm9 LONG $0xd37445c5 // vpcmpeqb %ymm3, %ymm7, %ymm10 LONG $0xeb2d41c4; BYTE $0xc9 // vpor %ymm9, %ymm10, %ymm9 - LONG $0xd4db45c5 // vpand %ymm4, %ymm7, %ymm10 + LONG $0xd4eb45c5 // vpor %ymm4, %ymm7, %ymm10 LONG $0xd6742dc5 // vpcmpeqb %ymm6, %ymm10, %ymm10 LONG $0xfd74c5c5 // vpcmpeqb %ymm5, %ymm7, %ymm7 - LONG $0xdfd7fdc5 // vpmovmskb %ymm7, %ebx - LONG $0xd77d41c4; BYTE $0xf2 // vpmovmskb %ymm10, %r14d - LONG $0xd77d41c4; BYTE $0xd9 // vpmovmskb %ymm9, %r11d - LONG $0xffebbdc5 // vpor %ymm7, %ymm8, %ymm7 - LONG $0xeb2d41c4; BYTE $0xc1 // vpor %ymm9, %ymm10, %ymm8 + LONG $0xd7d7fdc5 // vpmovmskb %ymm7, %edx + LONG $0xd77d41c4; BYTE $0xe2 // vpmovmskb %ymm10, %r12d + LONG $0xd77d41c4; BYTE $0xf9 // vpmovmskb %ymm9, %r15d + LONG $0xffebadc5 // vpor %ymm7, %ymm10, %ymm7 + LONG $0xeb3541c4; BYTE $0xc0 // vpor %ymm8, %ymm9, %ymm8 LONG $0xffebbdc5 // vpor %ymm7, %ymm8, %ymm7 LONG $0xcfd7fdc5 // vpmovmskb %ymm7, %ecx NOTQ CX - BSFQ CX, R15 - CMPL R15, $32 - JE LBB26_11 - MOVL $-1, DX - MOVL R15, CX - SHLL CX, DX - NOTL DX - ANDL DX, BX - ANDL DX, R14 - ANDL R11, DX - MOVL DX, R11 - -LBB26_11: - LEAL -1(BX), CX - ANDL BX, CX - JNE LBB26_12 - LEAL -1(R14), CX - ANDL R14, CX - JNE LBB26_12 - LEAL -1(R11), CX - ANDL R11, CX - JNE LBB26_12 - TESTL BX, BX - JE LBB26_19 - BSFL BX, CX - CMPQ R9, $-1 - JNE LBB26_81 - ADDQ AX, CX - MOVQ CX, R9 - -LBB26_19: - TESTL R14, R14 - JE LBB26_22 - BSFL R14, CX + BSFQ CX, CX + CMPL CX, $32 + JE LBB25_9 + MOVL $-1, BX + SHLL CX, BX + NOTL BX + ANDL BX, DX + ANDL BX, R12 + ANDL R15, BX + MOVL BX, R15 + +LBB25_9: + LEAL -1(DX), BX + ANDL DX, BX + JNE LBB25_70 + LEAL -1(R12), BX + ANDL R12, BX + JNE LBB25_70 + LEAL -1(R15), BX + ANDL R15, BX + JNE LBB25_70 + TESTL DX, DX + JE LBB25_15 + MOVQ R14, BX + SUBQ DI, BX + BSFL DX, DX + ADDQ BX, DX CMPQ R8, $-1 - JNE LBB26_81 - ADDQ AX, CX - MOVQ CX, R8 + JNE LBB25_72 + MOVQ DX, R8 + +LBB25_15: + TESTL R12, R12 + JE LBB25_18 + MOVQ R14, BX + SUBQ DI, BX + BSFL R12, DX + ADDQ BX, DX + CMPQ AX, $-1 + JNE LBB25_72 + MOVQ DX, AX -LBB26_22: - TESTL R11, R11 - JE LBB26_25 - BSFL R11, CX - CMPQ R10, $-1 - JNE LBB26_81 - ADDQ AX, CX - MOVQ CX, R10 +LBB25_18: + TESTL R15, R15 + JE LBB25_21 + MOVQ R14, BX + SUBQ DI, BX + BSFL R15, DX + ADDQ BX, DX + CMPQ R9, $-1 + JNE LBB25_72 + MOVQ DX, R9 -LBB26_25: - CMPL R15, $32 - JNE LBB26_82 +LBB25_21: + CMPL CX, $32 + JNE LBB25_54 + ADDQ $32, R14 ADDQ $-32, SI - ADDQ $32, AX CMPQ SI, $31 - JA LBB26_9 + JA LBB25_7 WORD $0xf8c5; BYTE $0x77 // vzeroupper - ADDQ DI, AX - MOVQ AX, R15 + MOVQ R11, SI CMPQ SI, $16 - JB LBB26_49 + JB LBB25_42 -LBB26_29: - MOVQ R15, R11 - SUBQ DI, R11 - XORL AX, AX - QUAD $0xfffffdcc056f7ac5 // vmovdqu $-564(%rip), %xmm8 /* LCPI26_7(%rip) */ - QUAD $0xfffffdd40d6f7ac5 // vmovdqu $-556(%rip), %xmm9 /* LCPI26_8(%rip) */ - QUAD $0xfffffddc156f7ac5 // vmovdqu $-548(%rip), %xmm10 /* LCPI26_9(%rip) */ - QUAD $0xfffffde41d6f7ac5 // vmovdqu $-540(%rip), %xmm11 /* LCPI26_10(%rip) */ - QUAD $0xfffffdec256ffac5 // vmovdqu $-532(%rip), %xmm4 /* LCPI26_11(%rip) */ - QUAD $0xfffffdf42d6ffac5 // vmovdqu $-524(%rip), %xmm5 /* LCPI26_12(%rip) */ - QUAD $0xfffffdfc356ffac5 // vmovdqu $-516(%rip), %xmm6 /* LCPI26_13(%rip) */ - MOVL $4294967295, R14 - -LBB26_30: - LONG $0x6f7ac1c4; WORD $0x073c // vmovdqu (%r15,%rax), %xmm7 - LONG $0x6441c1c4; BYTE $0xc0 // vpcmpgtb %xmm8, %xmm7, %xmm0 - LONG $0xcf64b1c5 // vpcmpgtb %xmm7, %xmm9, %xmm1 - LONG $0xc1dbf9c5 // vpand %xmm1, %xmm0, %xmm0 - LONG $0xcf74a9c5 // vpcmpeqb %xmm7, %xmm10, %xmm1 - LONG $0xd774a1c5 // vpcmpeqb %xmm7, %xmm11, %xmm2 - LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 - LONG $0xd4dbc1c5 // vpand %xmm4, %xmm7, %xmm2 - LONG $0xd674e9c5 // vpcmpeqb %xmm6, %xmm2, %xmm2 - LONG $0xfd74c1c5 // vpcmpeqb %xmm5, %xmm7, %xmm7 - LONG $0xdfebe9c5 // vpor %xmm7, %xmm2, %xmm3 - LONG $0xc0ebf1c5 // vpor %xmm0, %xmm1, %xmm0 - LONG $0xc0ebe1c5 // vpor %xmm0, %xmm3, %xmm0 - LONG $0xdfd7f9c5 // vpmovmskb %xmm7, %ebx - LONG $0xead779c5 // vpmovmskb %xmm2, %r13d - LONG $0xe1d779c5 // vpmovmskb %xmm1, %r12d - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - XORQ R14, CX +LBB25_24: + LEAQ -16(SI), R14 + MOVQ R14, CX + ANDQ $-16, CX + LEAQ 16(CX)(R10*1), R11 + ANDL $15, R14 + QUAD $0xfffffda2056f7ac5 // vmovdqu $-606(%rip), %xmm8 /* LCPI25_7(%rip) */ + QUAD $0xfffffdaa0d6f7ac5 // vmovdqu $-598(%rip), %xmm9 /* LCPI25_8(%rip) */ + QUAD $0xfffffdb2156f7ac5 // vmovdqu $-590(%rip), %xmm10 /* LCPI25_9(%rip) */ + QUAD $0xfffffdba1d6f7ac5 // vmovdqu $-582(%rip), %xmm11 /* LCPI25_10(%rip) */ + QUAD $0xfffffdc2256ffac5 // vmovdqu $-574(%rip), %xmm4 /* LCPI25_11(%rip) */ + QUAD $0xfffffdca2d6ffac5 // vmovdqu $-566(%rip), %xmm5 /* LCPI25_12(%rip) */ + QUAD $0xfffffdd2356ffac5 // vmovdqu $-558(%rip), %xmm6 /* LCPI25_13(%rip) */ + MOVL $4294967295, R15 + +LBB25_25: + LONG $0x6f7ac1c4; BYTE $0x3a // vmovdqu (%r10), %xmm7 + LONG $0x6441c1c4; BYTE $0xc0 // vpcmpgtb %xmm8, %xmm7, %xmm0 + LONG $0xcf64b1c5 // vpcmpgtb %xmm7, %xmm9, %xmm1 + LONG $0xc1dbf9c5 // vpand %xmm1, %xmm0, %xmm0 + LONG $0xcf74a9c5 // vpcmpeqb %xmm7, %xmm10, %xmm1 + LONG $0xd774a1c5 // vpcmpeqb %xmm7, %xmm11, %xmm2 + LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 + LONG $0xd4ebc1c5 // vpor %xmm4, %xmm7, %xmm2 + LONG $0xd674e9c5 // vpcmpeqb %xmm6, %xmm2, %xmm2 + LONG $0xfd74c1c5 // vpcmpeqb %xmm5, %xmm7, %xmm7 + LONG $0xdfebe9c5 // vpor %xmm7, %xmm2, %xmm3 + LONG $0xc0ebf1c5 // vpor %xmm0, %xmm1, %xmm0 + LONG $0xc0ebe1c5 // vpor %xmm0, %xmm3, %xmm0 + LONG $0xd7d7f9c5 // vpmovmskb %xmm7, %edx + LONG $0xead779c5 // vpmovmskb %xmm2, %r13d + LONG $0xe1d779c5 // vpmovmskb %xmm1, %r12d + LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx + XORQ R15, CX BSFQ CX, CX CMPL CX, $16 - JE LBB26_32 - MOVL $-1, DX - SHLL CX, DX - NOTL DX - ANDL DX, BX - ANDL DX, R13 - ANDL R12, DX - MOVL DX, R12 - -LBB26_32: - LEAL -1(BX), DX - ANDL BX, DX - JNE LBB26_33 - LEAL -1(R13), DX - ANDL R13, DX - JNE LBB26_33 - LEAL -1(R12), DX - ANDL R12, DX - JNE LBB26_33 - TESTL BX, BX - JE LBB26_40 - BSFL BX, BX - CMPQ R9, $-1 - JNE LBB26_83 - LEAQ 0(R11)(AX*1), R9 - ADDQ BX, R9 + JE LBB25_27 + MOVL $-1, BX + SHLL CX, BX + NOTL BX + ANDL BX, DX + ANDL BX, R13 + ANDL R12, BX + MOVL BX, R12 + +LBB25_27: + LEAL -1(DX), BX + ANDL DX, BX + JNE LBB25_71 + LEAL -1(R13), BX + ANDL R13, BX + JNE LBB25_71 + LEAL -1(R12), BX + ANDL R12, BX + JNE LBB25_71 + TESTL DX, DX + JE LBB25_33 + MOVQ R10, BX + SUBQ DI, BX + BSFL DX, DX + ADDQ BX, DX + CMPQ R8, $-1 + JNE LBB25_72 + MOVQ DX, R8 -LBB26_40: +LBB25_33: TESTL R13, R13 - JE LBB26_43 + JE LBB25_36 + MOVQ R10, BX + SUBQ DI, BX BSFL R13, DX - CMPQ R8, $-1 - JNE LBB26_84 - LEAQ 0(R11)(AX*1), R8 - ADDQ DX, R8 + ADDQ BX, DX + CMPQ AX, $-1 + JNE LBB25_72 + MOVQ DX, AX -LBB26_43: +LBB25_36: TESTL R12, R12 - JE LBB26_46 + JE LBB25_39 + MOVQ R10, BX + SUBQ DI, BX BSFL R12, DX - CMPQ R10, $-1 - JNE LBB26_84 - LEAQ 0(R11)(AX*1), R10 - ADDQ DX, R10 + ADDQ BX, DX + CMPQ R9, $-1 + JNE LBB25_72 + MOVQ DX, R9 -LBB26_46: +LBB25_39: CMPL CX, $16 - JNE LBB26_85 + JNE LBB25_55 + ADDQ $16, R10 ADDQ $-16, SI - ADDQ $16, AX CMPQ SI, $15 - JA LBB26_30 - ADDQ AX, R15 + JA LBB25_25 + MOVQ R14, SI + MOVQ R11, R10 -LBB26_49: +LBB25_42: TESTQ SI, SI - JE LBB26_66 - LEAQ 0(R15)(SI*1), R11 - MOVQ R15, BX - SUBQ DI, BX - XORL AX, AX - LONG $0x9a358d4c; WORD $0x0001; BYTE $0x00 // leaq $410(%rip), %r14 /* LJTI26_0(%rip) */ - JMP LBB26_51 + JE LBB25_56 + LEAQ 0(R10)(SI*1), R11 + LONG $0x8f1d8d48; WORD $0x0001; BYTE $0x00 // leaq $399(%rip), %rbx /* LJTI25_0(%rip) */ + JMP LBB25_45 + +LBB25_44: + MOVQ CX, R10 + DECQ SI + JE LBB25_75 + +LBB25_45: + MOVBLSX 0(R10), DX + ADDL $-43, DX + CMPL DX, $58 + JA LBB25_56 + LEAQ 1(R10), CX + MOVLQSX 0(BX)(DX*4), DX + ADDQ BX, DX + JMP DX + +LBB25_47: + MOVQ CX, DX + SUBQ DI, DX + CMPQ R9, $-1 + JNE LBB25_79 + DECQ DX + MOVQ DX, R9 + JMP LBB25_44 -LBB26_53: - CMPL CX, $101 - JNE LBB26_65 +LBB25_49: + MOVQ CX, DX + SUBQ DI, DX + CMPQ AX, $-1 + JNE LBB25_79 + DECQ DX + MOVQ DX, AX + JMP LBB25_44 -LBB26_54: +LBB25_51: + MOVQ CX, DX + SUBQ DI, DX CMPQ R8, $-1 - JNE LBB26_59 - LEAQ 0(BX)(AX*1), R8 - -LBB26_63: - ADDQ $1, AX - CMPQ SI, AX - JE LBB26_64 - -LBB26_51: - MOVBLSX 0(R15)(AX*1), CX - LEAL -48(CX), DX - CMPL DX, $10 - JB LBB26_63 - LEAL -43(CX), DX - CMPL DX, $26 - JA LBB26_53 - MOVLQSX 0(R14)(DX*4), CX - ADDQ R14, CX - JMP CX - -LBB26_61: - CMPQ R10, $-1 - JNE LBB26_59 - LEAQ 0(BX)(AX*1), R10 - JMP LBB26_63 - -LBB26_57: - CMPQ R9, $-1 - JNE LBB26_59 - LEAQ 0(BX)(AX*1), R9 - JMP LBB26_63 + JNE LBB25_79 + DECQ DX + MOVQ DX, R8 + JMP LBB25_44 -LBB26_1: +LBB25_53: MOVQ $-1, AX - JMP LBB26_80 + JMP LBB25_74 -LBB26_82: - ADDQ AX, R15 - ADDQ DI, R15 +LBB25_54: + ADDQ CX, R14 WORD $0xf8c5; BYTE $0x77 // vzeroupper - MOVQ $-1, AX - TESTQ R8, R8 - JNE LBB26_67 - JMP LBB26_80 + MOVQ R14, R10 + MOVQ $-1, DX + TESTQ AX, AX + JNE LBB25_57 + JMP LBB25_73 -LBB26_85: - ADDQ AX, CX - ADDQ CX, R15 - MOVQ $-1, AX - TESTQ R8, R8 - JNE LBB26_67 - JMP LBB26_80 +LBB25_55: + ADDQ CX, R10 -LBB26_65: - ADDQ AX, R15 +LBB25_56: + MOVQ $-1, DX + TESTQ AX, AX + JE LBB25_73 -LBB26_66: - MOVQ $-1, AX +LBB25_57: + TESTQ R9, R9 + JE LBB25_73 TESTQ R8, R8 - JE LBB26_80 - -LBB26_67: - TESTQ R10, R10 - JE LBB26_80 + JE LBB25_73 + SUBQ DI, R10 + LEAQ -1(R10), CX + CMPQ AX, CX + JE LBB25_65 + CMPQ R8, CX + JE LBB25_65 + CMPQ R9, CX + JE LBB25_65 TESTQ R9, R9 - JE LBB26_80 - SUBQ DI, R15 - LEAQ -1(R15), AX - CMPQ R8, AX - JE LBB26_72 - CMPQ R9, AX - JE LBB26_72 - CMPQ R10, AX - JE LBB26_72 - TESTQ R10, R10 - JLE LBB26_76 - LEAQ -1(R10), AX - CMPQ R8, AX - JE LBB26_76 - NOTQ R10 - MOVQ R10, AX - JMP LBB26_80 - -LBB26_72: - NEGQ R15 - MOVQ R15, AX - JMP LBB26_80 - -LBB26_76: - MOVQ R9, AX - ORQ R8, AX - CMPQ R9, R8 - JL LBB26_79 - TESTQ AX, AX - JS LBB26_79 + JLE LBB25_66 + LEAQ -1(R9), CX + CMPQ AX, CX + JE LBB25_66 NOTQ R9 + MOVQ R9, DX MOVQ R9, AX - JMP LBB26_80 + JMP LBB25_74 -LBB26_79: - LEAQ -1(R8), CX - TESTQ AX, AX +LBB25_65: + NEGQ R10 + MOVQ R10, DX + MOVQ R10, AX + JMP LBB25_74 + +LBB25_66: + MOVQ R8, CX + ORQ AX, CX + CMPQ R8, AX + JL LBB25_69 + TESTQ CX, CX + JS LBB25_69 NOTQ R8 - LONG $0xc7480f4d // cmovsq %r15, %r8 - CMPQ R9, CX - LONG $0xc7450f4d // cmovneq %r15, %r8 + MOVQ R8, DX MOVQ R8, AX - JMP LBB26_80 - -LBB26_64: - MOVQ R11, R15 - MOVQ $-1, AX - TESTQ R8, R8 - JNE LBB26_67 - JMP LBB26_80 - -LBB26_12: - BSFL CX, CX - JMP LBB26_13 - -LBB26_81: - MOVL CX, CX + JMP LBB25_74 -LBB26_13: - NOTQ AX - SUBQ CX, AX - JMP LBB26_80 - -LBB26_33: - BSFL DX, CX - JMP LBB26_34 - -LBB26_59: - SUBQ R15, DI - JMP LBB26_60 +LBB25_69: + TESTQ CX, CX + LEAQ -1(AX), CX + NOTQ AX + LONG $0xc2480f49 // cmovsq %r10, %rax + CMPQ R8, CX + LONG $0xc2450f49 // cmovneq %r10, %rax + JMP LBB25_74 -LBB26_84: - MOVL DX, CX - JMP LBB26_34 +LBB25_70: + SUBQ DI, R14 + BSFL BX, DX + ADDQ R14, DX + JMP LBB25_72 -LBB26_83: - MOVL BX, CX +LBB25_71: + SUBQ DI, R10 + BSFL BX, DX + ADDQ R10, DX -LBB26_34: - SUBQ R15, DI - SUBQ CX, DI +LBB25_72: + NOTQ DX -LBB26_60: - NOTQ AX - ADDQ DI, AX +LBB25_73: + MOVQ DX, AX -LBB26_80: +LBB25_74: BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -7838,46 +8088,91 @@ LBB26_80: WORD $0xf8c5; BYTE $0x77 // vzeroupper RET -LBB26_7: - MOVQ $-1, R8 +LBB25_75: + MOVQ R11, R10 + MOVQ $-1, DX + TESTQ AX, AX + JNE LBB25_57 + JMP LBB25_73 + +LBB25_79: + NEGQ DX + JMP LBB25_73 + +LBB25_76: MOVQ $-1, R9 - MOVQ DI, R15 + MOVQ $-1, AX + MOVQ $-1, R8 + MOVQ DI, R10 CMPQ SI, $16 - JAE LBB26_29 - JMP LBB26_49 - -// .set L26_0_set_61, LBB26_61-LJTI26_0 -// .set L26_0_set_65, LBB26_65-LJTI26_0 -// .set L26_0_set_57, LBB26_57-LJTI26_0 -// .set L26_0_set_54, LBB26_54-LJTI26_0 -LJTI26_0: - LONG $0xfffffeaa // .long L26_0_set_61 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xfffffeaa // .long L26_0_set_61 - LONG $0xfffffeba // .long L26_0_set_57 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xffffff07 // .long L26_0_set_65 - LONG $0xfffffe71 // .long L26_0_set_54 + JAE LBB25_24 + JMP LBB25_42 + +// .set L25_0_set_47, LBB25_47-LJTI25_0 +// .set L25_0_set_56, LBB25_56-LJTI25_0 +// .set L25_0_set_51, LBB25_51-LJTI25_0 +// .set L25_0_set_44, LBB25_44-LJTI25_0 +// .set L25_0_set_49, LBB25_49-LJTI25_0 +LJTI25_0: + LONG $0xfffffe98 // .long L25_0_set_47 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xfffffe98 // .long L25_0_set_47 + LONG $0xfffffec8 // .long L25_0_set_51 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xfffffe73 // .long L25_0_set_44 + LONG $0xfffffe73 // .long L25_0_set_44 + LONG $0xfffffe73 // .long L25_0_set_44 + LONG $0xfffffe73 // .long L25_0_set_44 + LONG $0xfffffe73 // .long L25_0_set_44 + LONG $0xfffffe73 // .long L25_0_set_44 + LONG $0xfffffe73 // .long L25_0_set_44 + LONG $0xfffffe73 // .long L25_0_set_44 + LONG $0xfffffe73 // .long L25_0_set_44 + LONG $0xfffffe73 // .long L25_0_set_44 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xfffffeb0 // .long L25_0_set_49 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xffffff09 // .long L25_0_set_56 + LONG $0xfffffeb0 // .long L25_0_set_49 _skip_positive: BYTE $0x55 // pushq %rbp @@ -7886,27 +8181,26 @@ _skip_positive: BYTE $0x53 // pushq %rbx MOVQ SI, R14 MOVQ 0(SI), BX - ADDQ $-1, BX + DECQ BX MOVQ 0(DI), AX ADDQ BX, AX MOVQ 8(DI), SI SUBQ BX, SI MOVQ AX, DI - LONG $0xfffac7e8; BYTE $0xff // callq _do_skip_number + LONG $0xfffa20e8; BYTE $0xff // callq _do_skip_number TESTQ AX, AX - JS LBB27_1 + JS LBB26_1 MOVQ 0(R14), CX - ADDQ AX, CX - ADDQ $-1, CX - JMP LBB27_3 + LEAQ -1(AX)(CX*1), CX + JMP LBB26_3 -LBB27_1: +LBB26_1: MOVQ 0(R14), CX SUBQ AX, CX ADDQ $-2, CX MOVQ $-2, BX -LBB27_3: +LBB26_3: MOVQ CX, 0(R14) MOVQ BX, AX BYTE $0x5b // popq %rbx @@ -7931,22 +8225,22 @@ _skip_number: SETEQ AX ADDQ AX, BX SUBQ AX, SI - JE LBB28_6 + JE LBB27_6 CMPQ R15, SI - JAE LBB28_3 + JAE LBB27_3 MOVB 0(BX), AX ADDB $-48, AX CMPB AX, $9 - JA LBB28_8 + JA LBB27_8 -LBB28_3: +LBB27_3: MOVQ BX, DI - LONG $0xfffa57e8; BYTE $0xff // callq _do_skip_number + LONG $0xfff9b2e8; BYTE $0xff // callq _do_skip_number TESTQ AX, AX - JS LBB28_7 + JS LBB27_7 ADDQ AX, BX -LBB28_5: +LBB27_5: SUBQ R12, BX MOVQ BX, 0(R14) MOVQ R15, AX @@ -7957,17 +8251,17 @@ LBB28_5: BYTE $0x5d // popq %rbp RET -LBB28_6: +LBB27_6: MOVQ $-1, R15 - JMP LBB28_5 + JMP LBB27_5 -LBB28_7: +LBB27_7: NOTQ AX ADDQ AX, BX -LBB28_8: +LBB27_8: MOVQ $-2, R15 - JMP LBB28_5 + JMP LBB27_5 _validate_one: BYTE $0x55 // pushq %rbp @@ -7984,95 +8278,72 @@ _validate_one: _find_non_ascii: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + MOVQ DI, CX CMPQ SI, $32 - JL LBB30_1 - XORL AX, AX - XORL DX, DX + JL LBB29_5 -LBB30_3: - LONG $0x046ffec5; BYTE $0x07 // vmovdqu (%rdi,%rax), %ymm0 - LONG $0xc8d7fdc5 // vpmovmskb %ymm0, %ecx - TESTL CX, CX - JNE LBB30_4 - ADDQ $32, AX - LEAQ 0(SI)(DX*1), CX - ADDQ $-32, CX - ADDQ $-32, DX +LBB29_1: + LONG $0x016ffec5 // vmovdqu (%rcx), %ymm0 + LONG $0xc0d7fdc5 // vpmovmskb %ymm0, %eax + TESTL AX, AX + JNE LBB29_2 ADDQ $32, CX - CMPQ CX, $63 - JG LBB30_3 - MOVQ DI, CX - SUBQ DX, CX - SUBQ AX, SI - WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ SI, $16 - JGE LBB30_13 - -LBB30_8: - MOVQ SI, DX - JMP LBB30_9 + CMPQ SI, $63 + LEAQ -32(SI), SI + JG LBB29_1 -LBB30_1: - MOVQ DI, CX +LBB29_5: WORD $0xf8c5; BYTE $0x77 // vzeroupper CMPQ SI, $16 - JL LBB30_8 - -LBB30_13: - MOVQ DI, R8 - SUBQ CX, R8 + JL LBB29_6 -LBB30_14: +LBB29_11: LONG $0x016ffac5 // vmovdqu (%rcx), %xmm0 LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax TESTW AX, AX - JNE LBB30_15 - LEAQ -16(SI), DX + JNE LBB29_12 ADDQ $16, CX - ADDQ $-16, R8 CMPQ SI, $31 - MOVQ DX, SI - JG LBB30_14 + LEAQ -16(SI), SI + JG LBB29_11 -LBB30_9: +LBB29_6: MOVQ $-1, AX - TESTQ DX, DX - JLE LBB30_18 - ADDQ $1, DX - MOVQ CX, SI - SUBQ DI, SI + TESTQ SI, SI + JLE LBB29_14 + INCQ SI -LBB30_11: +LBB29_8: CMPB 0(CX), $0 - JS LBB30_12 - ADDQ $1, CX - ADDQ $-1, DX - ADDQ $1, SI - CMPQ DX, $1 - JG LBB30_11 + JS LBB29_13 + INCQ CX + DECQ SI + CMPQ SI, $1 + JG LBB29_8 -LBB30_18: +LBB29_14: BYTE $0x5d // popq %rbp RET -LBB30_12: - MOVQ SI, AX +LBB29_13: + SUBQ DI, CX + MOVQ CX, AX BYTE $0x5d // popq %rbp RET -LBB30_4: - MOVLQSX CX, AX - BSFQ AX, AX - SUBQ DX, AX - BYTE $0x5d // popq %rbp - WORD $0xf8c5; BYTE $0x77 // vzeroupper - RET +LBB29_2: + WORD $0x9848 // cltq + JMP LBB29_3 -LBB30_15: +LBB29_12: MOVWLZX AX, AX - BSFQ AX, AX - SUBQ R8, AX - BYTE $0x5d // popq %rbp + +LBB29_3: + SUBQ DI, CX + BSFQ AX, AX + ADDQ CX, AX + BYTE $0x5d // popq %rbp + WORD $0xf8c5; BYTE $0x77 // vzeroupper RET _print_mantissa: @@ -8080,11 +8351,11 @@ _print_mantissa: WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp WORD $0x5641 // pushq %r14 BYTE $0x53 // pushq %rbx - MOVL DX, R14 + MOVLQSX DX, R14 ADDQ SI, R14 MOVQ DI, AX SHRQ $32, AX - JE LBB31_2 + JE LBB30_2 MOVQ $-6067343680855748867, DX MOVQ DI, AX MULQ DX @@ -8116,7 +8387,7 @@ _print_mantissa: LONG $0x64fa6b41 // imull $100, %r10d, %edi SUBL DI, AX MOVWLZX AX, R11 - LONG $0x663d8d48; WORD $0x0059; BYTE $0x00 // leaq $22886(%rip), %rdi /* _Digits(%rip) */ + LONG $0x203d8d48; WORD $0x0059; BYTE $0x00 // leaq $22816(%rip), %rdi /* _Digits(%rip) */ MOVWLZX 0(DI)(R8*2), AX MOVW AX, -2(R14) MOVWLZX 0(DI)(R9*2), AX @@ -8128,13 +8399,13 @@ _print_mantissa: ADDQ $-8, R14 MOVQ DX, DI -LBB31_2: +LBB30_2: CMPL DI, $10000 - JB LBB31_3 + JB LBB30_3 MOVL $3518437209, R8 - LONG $0x1e0d8d4c; WORD $0x0059; BYTE $0x00 // leaq $22814(%rip), %r9 /* _Digits(%rip) */ + LONG $0xd80d8d4c; WORD $0x0058; BYTE $0x00 // leaq $22744(%rip), %r9 /* _Digits(%rip) */ -LBB31_5: +LBB30_5: MOVL DI, AX IMULQ R8, AX SHRQ $45, AX @@ -8151,11 +8422,11 @@ LBB31_5: ADDQ $-4, R14 CMPL DI, $99999999 MOVL AX, DI - JA LBB31_5 + JA LBB30_5 CMPL AX, $100 - JB LBB31_8 + JB LBB30_8 -LBB31_7: +LBB30_7: MOVWLZX AX, CX SHRL $2, CX LONG $0x147bc969; WORD $0x0000 // imull $5243, %ecx, %ecx @@ -8163,17 +8434,17 @@ LBB31_7: WORD $0xd16b; BYTE $0x64 // imull $100, %ecx, %edx SUBL DX, AX MOVWLZX AX, AX - LONG $0xb7158d48; WORD $0x0058; BYTE $0x00 // leaq $22711(%rip), %rdx /* _Digits(%rip) */ + LONG $0x71158d48; WORD $0x0058; BYTE $0x00 // leaq $22641(%rip), %rdx /* _Digits(%rip) */ MOVWLZX 0(DX)(AX*2), AX MOVW AX, -2(R14) ADDQ $-2, R14 MOVL CX, AX -LBB31_8: +LBB30_8: CMPL AX, $10 - JB LBB31_10 + JB LBB30_10 MOVL AX, AX - LONG $0x9a0d8d48; WORD $0x0058; BYTE $0x00 // leaq $22682(%rip), %rcx /* _Digits(%rip) */ + LONG $0x540d8d48; WORD $0x0058; BYTE $0x00 // leaq $22612(%rip), %rcx /* _Digits(%rip) */ MOVWLZX 0(CX)(AX*2), AX MOVW AX, -2(R14) BYTE $0x5b // popq %rbx @@ -8181,13 +8452,13 @@ LBB31_8: BYTE $0x5d // popq %rbp RET -LBB31_3: +LBB30_3: MOVL DI, AX CMPL AX, $100 - JAE LBB31_7 - JMP LBB31_8 + JAE LBB30_7 + JMP LBB30_8 -LBB31_10: +LBB30_10: ADDB $48, AX MOVB AX, 0(SI) BYTE $0x5b // popq %rbx @@ -8201,51 +8472,44 @@ _left_shift: WORD $0x5641 // pushq %r14 BYTE $0x53 // pushq %rbx MOVL SI, CX - IMUL3Q $104, CX, R11 - LONG $0xbe158d48; WORD $0x008b; BYTE $0x00 // leaq $35774(%rip), %rdx /* _LSHIFT_TAB(%rip) */ - MOVL 0(R11)(DX*1), R8 + IMUL3Q $104, CX, DX + LONG $0x78358d48; WORD $0x008b; BYTE $0x00 // leaq $35704(%rip), %rsi /* _LSHIFT_TAB(%rip) */ + MOVL 0(DX)(SI*1), R8 MOVQ 0(DI), R10 MOVLQSX 16(DI), R9 + MOVB 4(DX)(SI*1), AX TESTQ R9, R9 - JE LBB32_1 - LEAQ 0(R11)(DX*1), SI - ADDQ $4, SI - XORL BX, BX + JE LBB31_6 + LEAQ 5(DX)(SI*1), DX + XORL SI, SI -LBB32_4: - MOVBLZX 0(SI)(BX*1), AX +LBB31_3: TESTB AX, AX - JE LBB32_10 - CMPB 0(R10)(BX*1), AX - JNE LBB32_6 - ADDQ $1, BX - CMPQ R9, BX - JNE LBB32_4 - MOVL R9, SI - ADDQ R11, DX - CMPB 4(SI)(DX*1), $0 - JNE LBB32_9 - JMP LBB32_10 - -LBB32_1: - XORL SI, SI - ADDQ R11, DX - CMPB 4(SI)(DX*1), $0 - JE LBB32_10 + JE LBB31_8 + CMPB 0(R10)(SI*1), AX + JNE LBB31_5 + MOVBLZX 0(DX)(SI*1), AX + INCQ SI + CMPQ R9, SI + JNE LBB31_3 + +LBB31_6: + TESTB AX, AX + JE LBB31_8 -LBB32_9: - ADDL $-1, R8 +LBB31_7: + DECL R8 -LBB32_10: +LBB31_8: TESTL R9, R9 - JLE LBB32_25 + JLE LBB31_23 LEAL 0(R8)(R9*1), AX MOVLQSX AX, R14 - ADDQ $-1, R14 + DECQ R14 XORL DX, DX MOVQ $-3689348814741910323, R11 -LBB32_12: +LBB31_10: MOVBQSX -1(R10)(R9*1), SI ADDQ $-48, SI SHLQ CX, SI @@ -8258,91 +8522,83 @@ LBB32_12: MOVQ SI, AX SUBQ BX, AX CMPQ 8(DI), R14 - JBE LBB32_18 + JBE LBB31_16 ADDB $48, AX MOVB AX, 0(R10)(R14*1) - JMP LBB32_20 + JMP LBB31_18 -LBB32_18: +LBB31_16: TESTQ AX, AX - JE LBB32_20 + JE LBB31_18 MOVL $1, 28(DI) -LBB32_20: +LBB31_18: CMPQ R9, $2 - JL LBB32_14 - ADDQ $-1, R9 + JL LBB31_12 + DECQ R9 MOVQ 0(DI), R10 - ADDQ $-1, R14 - JMP LBB32_12 + DECQ R14 + JMP LBB31_10 -LBB32_14: +LBB31_12: CMPQ SI, $10 - JAE LBB32_15 + JAE LBB31_13 -LBB32_25: +LBB31_23: MOVLQSX 16(DI), CX MOVLQSX R8, AX ADDQ CX, AX MOVL AX, 16(DI) MOVQ 8(DI), CX CMPQ CX, AX - JA LBB32_27 + JA LBB31_25 MOVL CX, 16(DI) MOVL CX, AX -LBB32_27: +LBB31_25: ADDL R8, 20(DI) TESTL AX, AX - JLE LBB32_31 + JLE LBB31_29 MOVQ 0(DI), CX - MOVL AX, DX - ADDQ $1, DX - ADDL $-1, AX + MOVL AX, AX -LBB32_29: - MOVL AX, SI - CMPB 0(CX)(SI*1), $48 - JNE LBB32_33 - MOVL AX, 16(DI) - ADDQ $-1, DX - ADDL $-1, AX +LBB31_27: + CMPB -1(CX)(AX*1), $48 + JNE LBB31_31 + MOVL AX, DX + DECQ AX + DECL DX + MOVL DX, 16(DI) + LEAQ 1(AX), DX CMPQ DX, $1 - JG LBB32_29 - JMP LBB32_32 + JG LBB31_27 -LBB32_31: - JE LBB32_32 +LBB31_29: + TESTL AX, AX + JE LBB31_30 -LBB32_33: +LBB31_31: BYTE $0x5b // popq %rbx WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET -LBB32_32: - MOVL $0, 20(DI) - BYTE $0x5b // popq %rbx - WORD $0x5e41 // popq %r14 - BYTE $0x5d // popq %rbp - RET - -LBB32_15: +LBB31_13: MOVLQSX R14, SI - ADDQ $-1, SI - JMP LBB32_16 + DECQ SI + JMP LBB31_14 -LBB32_17: +LBB31_15: ADDB $48, AX MOVQ 0(DI), BX MOVB AX, 0(BX)(SI*1) -LBB32_24: - ADDQ $-1, SI +LBB31_22: + DECQ SI CMPQ CX, $9 - JBE LBB32_25 + JBE LBB31_23 -LBB32_16: +LBB31_14: MOVQ DX, CX MOVQ DX, AX MULQ R11 @@ -8352,157 +8608,153 @@ LBB32_16: MOVQ CX, AX SUBQ BX, AX CMPQ 8(DI), SI - JA LBB32_17 + JA LBB31_15 TESTQ AX, AX - JE LBB32_24 + JE LBB31_22 MOVL $1, 28(DI) - JMP LBB32_24 + JMP LBB31_22 -LBB32_6: - JL LBB32_9 - JMP LBB32_10 +LBB31_30: + MOVL $0, 20(DI) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 + BYTE $0x5d // popq %rbp + RET + +LBB31_5: + JL LBB31_7 + JMP LBB31_8 _right_shift: - BYTE $0x55 // pushq %rbp - WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - BYTE $0x53 // pushq %rbx - MOVL SI, CX - MOVL 16(DI), R8 - XORL DX, DX - TESTL R8, R8 - MOVL $0, R11 - LONG $0xd8490f45 // cmovnsl %r8d, %r11d - XORL AX, AX + BYTE $0x55 // pushq %rbp + WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + MOVL SI, CX + MOVLQSX 16(DI), R9 + XORL SI, SI + XORL AX, AX -LBB33_1: - CMPQ R11, DX - JE LBB33_2 +LBB32_1: + CMPQ SI, R9 + JGE LBB32_2 LEAQ 0(AX)(AX*4), AX - MOVQ 0(DI), SI - MOVBQSX 0(SI)(DX*1), SI - LEAQ 0(SI)(AX*2), AX - ADDQ $-48, AX - ADDQ $1, DX - MOVQ AX, SI - SHRQ CX, SI - TESTQ SI, SI - JE LBB33_1 - MOVL DX, R11 + MOVQ 0(DI), DX + MOVBQSX 0(DX)(SI*1), DX + LEAQ -48(DX)(AX*2), AX + INCQ SI + MOVQ AX, DX + SHRQ CX, DX + TESTQ DX, DX + JE LBB32_1 -LBB33_7: +LBB32_6: MOVL 20(DI), DX - SUBL R11, DX - ADDL $1, DX - MOVQ $-1, R9 - SHLQ CX, R9 + SUBL SI, DX + INCL DX + MOVQ $-1, R8 + SHLQ CX, R8 MOVL DX, 20(DI) - NOTQ R9 + NOTQ R8 XORL R10, R10 - CMPL R11, R8 - JGE LBB33_10 - MOVLQSX R11, R8 + CMPL SI, R9 + JGE LBB32_9 + MOVLQSX SI, R9 MOVQ 0(DI), SI XORL R10, R10 -LBB33_9: +LBB32_8: MOVQ AX, DX SHRQ CX, DX - ANDQ R9, AX + ANDQ R8, AX ADDB $48, DX MOVB DX, 0(SI)(R10*1) - MOVQ 0(DI), SI - LEAQ 0(SI)(R8*1), DX - MOVBQSX 0(R10)(DX*1), R11 - LEAQ 1(R8)(R10*1), BX - ADDQ $1, R10 LEAQ 0(AX)(AX*4), AX - LEAQ 0(R11)(AX*2), AX - ADDQ $-48, AX - MOVLQSX 16(DI), DX - CMPQ BX, DX - JL LBB33_9 - JMP LBB33_10 - -LBB33_12: + MOVQ 0(DI), SI + LEAQ 0(SI)(R9*1), DX + MOVBQSX 0(R10)(DX*1), DX + LEAQ -48(DX)(AX*2), AX + MOVLQSX 16(DI), R11 + LEAQ 1(R9)(R10*1), DX + INCQ R10 + CMPQ DX, R11 + JL LBB32_8 + JMP LBB32_9 + +LBB32_11: ADDB $48, SI - MOVQ 0(DI), BX - MOVB SI, 0(BX)(DX*1) - ADDL $1, DX - MOVL DX, R10 + MOVQ 0(DI), DX + MOVB SI, 0(DX)(R9*1) + INCL R9 + MOVL R9, R10 -LBB33_15: +LBB32_14: ADDQ AX, AX LEAQ 0(AX)(AX*4), AX -LBB33_10: +LBB32_9: TESTQ AX, AX - JE LBB33_16 + JE LBB32_15 MOVQ AX, SI SHRQ CX, SI - ANDQ R9, AX - MOVLQSX R10, DX - CMPQ 8(DI), DX - JA LBB33_12 + ANDQ R8, AX + MOVLQSX R10, R9 + CMPQ 8(DI), R9 + JA LBB32_11 TESTQ SI, SI - JE LBB33_15 + JE LBB32_14 MOVL $1, 28(DI) - JMP LBB33_15 + JMP LBB32_14 -LBB33_16: +LBB32_15: MOVL R10, 16(DI) TESTL R10, R10 - JLE LBB33_20 + JLE LBB32_19 MOVQ 0(DI), AX - MOVL R10, CX - ADDQ $1, CX - ADDL $-1, R10 - -LBB33_18: - MOVL R10, DX - CMPB 0(AX)(DX*1), $48 - JNE LBB33_22 - MOVL R10, 16(DI) - ADDQ $-1, CX - ADDL $-1, R10 + MOVL R10, R10 + +LBB32_17: + CMPB -1(AX)(R10*1), $48 + JNE LBB32_21 + MOVL R10, CX + DECQ R10 + DECL CX + MOVL CX, 16(DI) + LEAQ 1(R10), CX CMPQ CX, $1 - JG LBB33_18 - JMP LBB33_21 + JG LBB32_17 + +LBB32_19: + TESTL R10, R10 + JE LBB32_20 + +LBB32_21: + BYTE $0x5d // popq %rbp + RET -LBB33_2: +LBB32_2: TESTQ AX, AX - JE LBB33_23 + JE LBB32_22 MOVQ AX, DX SHRQ CX, DX TESTQ DX, DX - JNE LBB33_7 + JNE LBB32_6 -LBB33_4: +LBB32_4: ADDQ AX, AX LEAQ 0(AX)(AX*4), AX - ADDL $1, R11 + INCL SI MOVQ AX, DX SHRQ CX, DX TESTQ DX, DX - JE LBB33_4 - JMP LBB33_7 - -LBB33_20: - JE LBB33_21 - -LBB33_22: - BYTE $0x5b // popq %rbx - BYTE $0x5d // popq %rbp - RET + JE LBB32_4 + JMP LBB32_6 -LBB33_21: +LBB32_20: MOVL $0, 20(DI) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB33_23: +LBB32_22: MOVL $0, 16(DI) - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET @@ -12658,29 +12910,29 @@ _LSHIFT_TAB: LONG $0x00000000 // .asciz 4, '\x00\x00\x00\x00' _P10_TAB: - QUAD $0x3ff0000000000000 // .quad 0x3ff0000000000000 - QUAD $0x4024000000000000 // .quad 0x4024000000000000 - QUAD $0x4059000000000000 // .quad 0x4059000000000000 - QUAD $0x408f400000000000 // .quad 0x408f400000000000 - QUAD $0x40c3880000000000 // .quad 0x40c3880000000000 - QUAD $0x40f86a0000000000 // .quad 0x40f86a0000000000 - QUAD $0x412e848000000000 // .quad 0x412e848000000000 - QUAD $0x416312d000000000 // .quad 0x416312d000000000 - QUAD $0x4197d78400000000 // .quad 0x4197d78400000000 - QUAD $0x41cdcd6500000000 // .quad 0x41cdcd6500000000 - QUAD $0x4202a05f20000000 // .quad 0x4202a05f20000000 - QUAD $0x42374876e8000000 // .quad 0x42374876e8000000 - QUAD $0x426d1a94a2000000 // .quad 0x426d1a94a2000000 - QUAD $0x42a2309ce5400000 // .quad 0x42a2309ce5400000 - QUAD $0x42d6bcc41e900000 // .quad 0x42d6bcc41e900000 - QUAD $0x430c6bf526340000 // .quad 0x430c6bf526340000 - QUAD $0x4341c37937e08000 // .quad 0x4341c37937e08000 - QUAD $0x4376345785d8a000 // .quad 0x4376345785d8a000 - QUAD $0x43abc16d674ec800 // .quad 0x43abc16d674ec800 - QUAD $0x43e158e460913d00 // .quad 0x43e158e460913d00 - QUAD $0x4415af1d78b58c40 // .quad 0x4415af1d78b58c40 - QUAD $0x444b1ae4d6e2ef50 // .quad 0x444b1ae4d6e2ef50 - QUAD $0x4480f0cf064dd592 // .quad 0x4480f0cf064dd592 + QUAD $0x3ff0000000000000 // .quad 4607182418800017408 + QUAD $0x4024000000000000 // .quad 4621819117588971520 + QUAD $0x4059000000000000 // .quad 4636737291354636288 + QUAD $0x408f400000000000 // .quad 4652007308841189376 + QUAD $0x40c3880000000000 // .quad 4666723172467343360 + QUAD $0x40f86a0000000000 // .quad 4681608360884174848 + QUAD $0x412e848000000000 // .quad 4696837146684686336 + QUAD $0x416312d000000000 // .quad 4711630319722168320 + QUAD $0x4197d78400000000 // .quad 4726483295884279808 + QUAD $0x41cdcd6500000000 // .quad 4741671816366391296 + QUAD $0x4202a05f20000000 // .quad 4756540486875873280 + QUAD $0x42374876e8000000 // .quad 4771362005757984768 + QUAD $0x426d1a94a2000000 // .quad 4786511204640096256 + QUAD $0x42a2309ce5400000 // .quad 4801453603149578240 + QUAD $0x42d6bcc41e900000 // .quad 4816244402031689728 + QUAD $0x430c6bf526340000 // .quad 4831355200913801216 + QUAD $0x4341c37937e08000 // .quad 4846369599423283200 + QUAD $0x4376345785d8a000 // .quad 4861130398305394688 + QUAD $0x43abc16d674ec800 // .quad 4876203697187506176 + QUAD $0x43e158e460913d00 // .quad 4891288408196988160 + QUAD $0x4415af1d78b58c40 // .quad 4906019910204099648 + QUAD $0x444b1ae4d6e2ef50 // .quad 4921056587992461136 + QUAD $0x4480f0cf064dd592 // .quad 4936209963552724370 _first: QUAD $0xf0f0f0f0f0f0f0f0; QUAD $0xf0f0f0f0f0f0f0f0 // .ascii 16, '\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0' @@ -12724,7 +12976,7 @@ _entry: _f64toa: MOVQ out+0(FP), DI MOVSD val+8(FP), X0 - CALL ·__native_entry__+825(SB) // _f64toa + CALL ·__native_entry__+903(SB) // _f64toa MOVQ AX, ret+16(FP) RET @@ -12746,7 +12998,7 @@ _html_escape: MOVQ nb+8(FP), SI MOVQ dp+16(FP), DX MOVQ dn+24(FP), CX - CALL ·__native_entry__+10493(SB) // _html_escape + CALL ·__native_entry__+10249(SB) // _html_escape MOVQ AX, ret+32(FP) RET @@ -12766,7 +13018,7 @@ _entry: _i64toa: MOVQ out+0(FP), DI MOVQ val+8(FP), SI - CALL ·__native_entry__+3908(SB) // _i64toa + CALL ·__native_entry__+3915(SB) // _i64toa MOVQ AX, ret+16(FP) RET @@ -12787,7 +13039,7 @@ _lspace: MOVQ sp+0(FP), DI MOVQ nb+8(FP), SI MOVQ off+16(FP), DX - CALL ·__native_entry__+379(SB) // _lspace + CALL ·__native_entry__+429(SB) // _lspace MOVQ AX, ret+24(FP) RET @@ -12820,7 +13072,7 @@ TEXT ·__quote(SB), NOSPLIT | NOFRAME, $0 - 48 _entry: MOVQ (TLS), R14 - LEAQ -72(SP), R12 + LEAQ -56(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12830,7 +13082,7 @@ _quote: MOVQ dp+16(FP), DX MOVQ dn+24(FP), CX MOVQ flags+32(FP), R8 - CALL ·__native_entry__+5325(SB) // _quote + CALL ·__native_entry__+5328(SB) // _quote MOVQ AX, ret+40(FP) RET @@ -12843,7 +13095,7 @@ TEXT ·__skip_array(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -160(SP), R12 + LEAQ -136(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12851,7 +13103,7 @@ _skip_array: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+21311(SB) // _skip_array + CALL ·__native_entry__+22298(SB) // _skip_array MOVQ AX, ret+24(FP) RET @@ -12871,7 +13123,7 @@ _entry: _skip_number: MOVQ s+0(FP), DI MOVQ p+8(FP), SI - CALL ·__native_entry__+24831(SB) // _skip_number + CALL ·__native_entry__+25946(SB) // _skip_number MOVQ AX, ret+16(FP) RET @@ -12884,7 +13136,7 @@ TEXT ·__skip_object(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -160(SP), R12 + LEAQ -136(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12892,7 +13144,7 @@ _skip_object: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+21348(SB) // _skip_object + CALL ·__native_entry__+22335(SB) // _skip_object MOVQ AX, ret+24(FP) RET @@ -12905,7 +13157,7 @@ TEXT ·__skip_one(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -160(SP), R12 + LEAQ -136(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12913,7 +13165,7 @@ _skip_one: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+19468(SB) // _skip_one + CALL ·__native_entry__+19172(SB) // _skip_one MOVQ AX, ret+24(FP) RET @@ -12933,7 +13185,7 @@ _entry: _u64toa: MOVQ out+0(FP), DI MOVQ val+8(FP), SI - CALL ·__native_entry__+4003(SB) // _u64toa + CALL ·__native_entry__+4008(SB) // _u64toa MOVQ AX, ret+16(FP) RET @@ -12956,7 +13208,7 @@ _unquote: MOVQ dp+16(FP), DX MOVQ ep+24(FP), CX MOVQ flags+32(FP), R8 - CALL ·__native_entry__+7998(SB) // _unquote + CALL ·__native_entry__+7794(SB) // _unquote MOVQ AX, ret+40(FP) RET @@ -12969,7 +13221,7 @@ TEXT ·__validate_one(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -160(SP), R12 + LEAQ -136(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12977,7 +13229,7 @@ _validate_one: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+24948(SB) // _validate_one + CALL ·__native_entry__+26063(SB) // _validate_one MOVQ AX, ret+24(FP) RET @@ -12990,7 +13242,7 @@ TEXT ·__value(SB), NOSPLIT | NOFRAME, $0 - 48 _entry: MOVQ (TLS), R14 - LEAQ -424(SP), R12 + LEAQ -408(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -13000,7 +13252,7 @@ _value: MOVQ p+16(FP), DX MOVQ v+24(FP), CX MOVQ allow_control+32(FP), R8 - CALL ·__native_entry__+14390(SB) // _value + CALL ·__native_entry__+14495(SB) // _value MOVQ AX, ret+40(FP) RET @@ -13013,7 +13265,7 @@ TEXT ·__vnumber(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -320(SP), R12 + LEAQ -312(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -13021,7 +13273,7 @@ _vnumber: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+17482(SB), AX // _vnumber + LEAQ ·__native_entry__+17330(SB), AX // _vnumber JMP AX _stack_grow: @@ -13041,7 +13293,7 @@ _vsigned: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+18912(SB), AX // _vsigned + LEAQ ·__native_entry__+18644(SB), AX // _vsigned JMP AX _stack_grow: @@ -13061,7 +13313,7 @@ _vstring: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+16575(SB), AX // _vstring + LEAQ ·__native_entry__+16453(SB), AX // _vstring JMP AX _stack_grow: @@ -13073,7 +13325,7 @@ TEXT ·__vunsigned(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -16(SP), R12 + LEAQ -8(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -13081,7 +13333,7 @@ _vunsigned: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+19192(SB), AX // _vunsigned + LEAQ ·__native_entry__+18903(SB), AX // _vunsigned JMP AX _stack_grow: diff --git a/internal/native/avx2/native_subr_amd64.go b/internal/native/avx2/native_subr_amd64.go index e0cd1b393..98da9c18a 100644 --- a/internal/native/avx2/native_subr_amd64.go +++ b/internal/native/avx2/native_subr_amd64.go @@ -9,24 +9,24 @@ package avx2 func __native_entry__() uintptr var ( - _subr__f64toa = __native_entry__() + 825 - _subr__html_escape = __native_entry__() + 10493 - _subr__i64toa = __native_entry__() + 3908 - _subr__lspace = __native_entry__() + 379 + _subr__f64toa = __native_entry__() + 903 + _subr__html_escape = __native_entry__() + 10249 + _subr__i64toa = __native_entry__() + 3915 + _subr__lspace = __native_entry__() + 429 _subr__lzero = __native_entry__() + 13 - _subr__quote = __native_entry__() + 5325 - _subr__skip_array = __native_entry__() + 21311 - _subr__skip_number = __native_entry__() + 24831 - _subr__skip_object = __native_entry__() + 21348 - _subr__skip_one = __native_entry__() + 19468 - _subr__u64toa = __native_entry__() + 4003 - _subr__unquote = __native_entry__() + 7998 - _subr__validate_one = __native_entry__() + 24948 - _subr__value = __native_entry__() + 14390 - _subr__vnumber = __native_entry__() + 17482 - _subr__vsigned = __native_entry__() + 18912 - _subr__vstring = __native_entry__() + 16575 - _subr__vunsigned = __native_entry__() + 19192 + _subr__quote = __native_entry__() + 5328 + _subr__skip_array = __native_entry__() + 22298 + _subr__skip_number = __native_entry__() + 25946 + _subr__skip_object = __native_entry__() + 22335 + _subr__skip_one = __native_entry__() + 19172 + _subr__u64toa = __native_entry__() + 4008 + _subr__unquote = __native_entry__() + 7794 + _subr__validate_one = __native_entry__() + 26063 + _subr__value = __native_entry__() + 14495 + _subr__vnumber = __native_entry__() + 17330 + _subr__vsigned = __native_entry__() + 18644 + _subr__vstring = __native_entry__() + 16453 + _subr__vunsigned = __native_entry__() + 18903 ) const ( @@ -35,19 +35,19 @@ const ( _stack__i64toa = 24 _stack__lspace = 8 _stack__lzero = 8 - _stack__quote = 72 - _stack__skip_array = 160 + _stack__quote = 56 + _stack__skip_array = 136 _stack__skip_number = 96 - _stack__skip_object = 160 - _stack__skip_one = 160 + _stack__skip_object = 136 + _stack__skip_one = 136 _stack__u64toa = 8 _stack__unquote = 72 - _stack__validate_one = 160 - _stack__value = 424 - _stack__vnumber = 320 + _stack__validate_one = 136 + _stack__value = 408 + _stack__vnumber = 312 _stack__vsigned = 16 _stack__vstring = 112 - _stack__vunsigned = 16 + _stack__vunsigned = 8 ) var ( diff --git a/issue_test/issue263_test.go b/issue_test/issue263_test.go new file mode 100644 index 000000000..a27a84764 --- /dev/null +++ b/issue_test/issue263_test.go @@ -0,0 +1,91 @@ +package issue_test + +import ( + "bytes" + "testing" + + "github.com/bytedance/sonic/ast" + "github.com/bytedance/sonic/decoder" + "github.com/davecgh/go-spew/spew" +) + + +type Response struct { + Menu Menu `json:"menu"` +} + +type Menu struct { + Items []*Item `json:"items"` +} + +type Item struct { + ID string `json:"id"` +} + +func (i *Item) UnmarshalJSON(buf []byte) error { return nil +} + +func TestName(t *testing.T) { + q := `{ + "menu": { + "items": [{ + "id": "carrotcake", + "name": { + "en": "CarrotCake Name" + }, + "operational_name": "carrotCake-op", + "description": null, + "plu": "carrotCake45", + "ian": "carrotCake_ian_45", + "external_data": "", + "image": { + "url": "http://127.0.0.1:50207/image7.jpg" + }, + "tax_rate": "20", + "modifier_ids": [ + "add-hot-drinks-mod" + ], + "contains_alcohol": false, + "max_quantity": null, + "is_eligible_for_substitution": true, + "is_eligible_as_replacement": true + }, + { + "id": "cheeseburger", + "name": { + "en": "Cheeseburger Name" + }, + "operational_name": "cheeseburger-op", + "description": null, + "plu": "cheeseburger40", + "ian": "cheeseburger_ian_40", + "external_data": "", + "image": { + "url": "http://127.0.0.1:50207/image1.jpg" + }, + "tax_rate": "20", + "modifier_ids": [ + "add-drinks-mod" + ], + "contains_alcohol": false, + "max_quantity": null, + "is_eligible_for_substitution": true, + "is_eligible_as_replacement": true + } + ] + } +}` + +n, err := ast.NewSearcher(q).GetByPath("menu", "items", 1) + if err != nil { + t.Fatal(err) + } + spew.Dump(n.Interface()) + + var response Response + err = decoder.NewStreamDecoder(bytes.NewReader([]byte(q))).Decode(&response) + if err != nil { + t.Fatal(err) + } + +} \ No newline at end of file diff --git a/native/scanning.c b/native/scanning.c index 90a1e99f3..792dbaf42 100644 --- a/native/scanning.c +++ b/native/scanning.c @@ -972,10 +972,12 @@ void vunsigned(const GoString *src, long *p, JsonState *ret) { #define FSM_ARR_0 5 #define FSM_OBJ_0 6 + +#define ERR_HANDLE(v) (*p >= src->len ? -ERR_EOF : -v) #define FSM_DROP(v) (v)->sp-- #define FSM_REPL(v, t) (v)->vt[(v)->sp - 1] = (t) -#define FSM_CHAR(c) do { if (ch != (c)) return -ERR_INVAL; } while (0) +#define FSM_CHAR(c) do { if (ch != (c)) return ERR_HANDLE(ERR_INVAL); } while (0) #define FSM_XERR(v) do { long r = (v); if (r < 0) return r; } while (0) static inline void fsm_init(StateMachine *self, int vt) { @@ -1022,7 +1024,7 @@ static inline long fsm_exec(StateMachine *self, const GoString *src, long *p, in switch (ch) { case ']' : FSM_DROP(self); continue; case ',' : FSM_XERR(fsm_push(self, FSM_VAL)); continue; - default : return -ERR_INVAL; + default : return ERR_HANDLE(ERR_INVAL); } } @@ -1031,7 +1033,7 @@ static inline long fsm_exec(StateMachine *self, const GoString *src, long *p, in switch (ch) { case '}' : FSM_DROP(self); continue; case ',' : FSM_XERR(fsm_push(self, FSM_KEY)); continue; - default : return -ERR_INVAL; + default : return ERR_HANDLE(ERR_INVAL); } } @@ -1065,7 +1067,7 @@ static inline long fsm_exec(StateMachine *self, const GoString *src, long *p, in case FSM_OBJ_0: { switch (ch) { default: { - return -ERR_INVAL; + return ERR_HANDLE(ERR_INVAL); } /* empty object */ @@ -1116,7 +1118,7 @@ static inline long fsm_exec(StateMachine *self, const GoString *src, long *p, in break; } case 0 : return -ERR_EOF; - default : return -ERR_INVAL; + default : return ERR_HANDLE(ERR_INVAL); } }