From 410625ca9eb1a81586ff188be7c2c64f525c5e31 Mon Sep 17 00:00:00 2001 From: liu Date: Wed, 8 Jun 2022 21:50:10 +0800 Subject: [PATCH] opt: remove bound and loop unrolling in quote (begin to use clang13) (#237) * opt: remove most bound check in quote * opt: remove branch in copied escaped chars * opt: use cache-friendly bool array * opt: use loop unrolling in quote * feat: add `-t` for bench.py Change-Id: I3f41218187597126ef960eab09c6fa6fe252f347 * test:(bench) adjust repeat_time * test: add Diff on CI * test: update bench.py * doc: update README.md Co-authored-by: liuqiang Co-authored-by: duanyi.aster --- .github/workflows/benchmark-linux-amd64.yml | 5 +- README.md | 108 +- ast/encode_test.go | 6 +- bench.py | 29 +- bench.sh | 4 +- internal/native/avx/native_amd64.s | 9296 +++++++------- internal/native/avx/native_subr_amd64.go | 54 +- internal/native/avx2/native_amd64.s | 11750 +++++++++--------- internal/native/avx2/native_subr_amd64.go | 52 +- native/parsing.c | 161 +- 10 files changed, 10921 insertions(+), 10544 deletions(-) diff --git a/.github/workflows/benchmark-linux-amd64.yml b/.github/workflows/benchmark-linux-amd64.yml index 199fa3a1f..a03d7622e 100644 --- a/.github/workflows/benchmark-linux-amd64.yml +++ b/.github/workflows/benchmark-linux-amd64.yml @@ -4,7 +4,7 @@ on: pull_request jobs: build: - runs-on: self-hosted + runs-on: [self-hosted, X64] steps: - uses: actions/checkout@v2 @@ -25,3 +25,6 @@ jobs: - name: Benchmark run: sh bench.sh + + # - name: Diff + # run: ./bench.py -b '"^Benchmark.*Sonic"' -c diff --git a/README.md b/README.md index 813213c98..c4847bc09 100644 --- a/README.md +++ b/README.md @@ -20,60 +20,60 @@ goversion: 1.17.1 goos: darwin goarch: amd64 cpu: Intel(R) Core(TM) i9-9880H CPU @ 2.30GHz -BenchmarkEncoder_Generic_Sonic-16 42688 ns/op 305.36 MB/s 15608 B/op 4 allocs/op -BenchmarkEncoder_Generic_Sonic_Fast-16 30043 ns/op 433.87 MB/s 14638 B/op 4 allocs/op -BenchmarkEncoder_Generic_JsonIter-16 46461 ns/op 280.56 MB/s 13433 B/op 77 allocs/op -BenchmarkEncoder_Generic_GoJson-16 73608 ns/op 177.09 MB/s 23219 B/op 16 allocs/op -BenchmarkEncoder_Generic_StdLib-16 122622 ns/op 106.30 MB/s 49137 B/op 827 allocs/op -BenchmarkEncoder_Binding_Sonic-16 8190 ns/op 1591.61 MB/s 16175 B/op 4 allocs/op -BenchmarkEncoder_Binding_Sonic_Fast-16 7365 ns/op 1769.85 MB/s 14367 B/op 4 allocs/op -BenchmarkEncoder_Binding_JsonIter-16 23326 ns/op 558.81 MB/s 9487 B/op 2 allocs/op -BenchmarkEncoder_Binding_GoJson-16 9412 ns/op 1384.93 MB/s 9480 B/op 1 allocs/op -BenchmarkEncoder_Binding_StdLib-16 18510 ns/op 704.22 MB/s 9479 B/op 1 allocs/op -BenchmarkEncoder_Parallel_Generic_Sonic-16 7716 ns/op 1689.37 MB/s 12812 B/op 4 allocs/op -BenchmarkEncoder_Parallel_Generic_Sonic_Fast-16 4791 ns/op 2720.47 MB/s 10884 B/op 4 allocs/op -BenchmarkEncoder_Parallel_Generic_JsonIter-16 10505 ns/op 1240.85 MB/s 13455 B/op 77 allocs/op -BenchmarkEncoder_Parallel_Generic_GoJson-16 24086 ns/op 541.19 MB/s 23379 B/op 17 allocs/op -BenchmarkEncoder_Parallel_Generic_StdLib-16 65697 ns/op 198.41 MB/s 49164 B/op 827 allocs/op -BenchmarkEncoder_Parallel_Binding_Sonic-16 2085 ns/op 6251.53 MB/s 12933 B/op 4 allocs/op -BenchmarkEncoder_Parallel_Binding_Sonic_Fast-16 1612 ns/op 8087.31 MB/s 11177 B/op 4 allocs/op -BenchmarkEncoder_Parallel_Binding_JsonIter-16 6169 ns/op 2112.84 MB/s 9494 B/op 2 allocs/op -BenchmarkEncoder_Parallel_Binding_GoJson-16 3492 ns/op 3733.14 MB/s 9492 B/op 1 allocs/op -BenchmarkEncoder_Parallel_Binding_StdLib-16 5170 ns/op 2521.50 MB/s 9482 B/op 1 allocs/op - -BenchmarkDecoder_Generic_Sonic-16 71589 ns/op 182.08 MB/s 57531 B/op 723 allocs/op -BenchmarkDecoder_Generic_Sonic_Fast-16 57653 ns/op 226.10 MB/s 49743 B/op 313 allocs/op -BenchmarkDecoder_Generic_StdLib-16 143584 ns/op 90.78 MB/s 50870 B/op 772 allocs/op -BenchmarkDecoder_Generic_JsonIter-16 94775 ns/op 137.54 MB/s 55783 B/op 1068 allocs/op -BenchmarkDecoder_Generic_GoJson-16 88647 ns/op 147.04 MB/s 66371 B/op 973 allocs/op -BenchmarkDecoder_Binding_Sonic-16 32399 ns/op 402.33 MB/s 27814 B/op 137 allocs/op -BenchmarkDecoder_Binding_Sonic_Fast-16 28655 ns/op 454.89 MB/s 25127 B/op 34 allocs/op -BenchmarkDecoder_Binding_StdLib-16 116617 ns/op 111.78 MB/s 7344 B/op 103 allocs/op -BenchmarkDecoder_Binding_JsonIter-16 36206 ns/op 360.02 MB/s 14673 B/op 385 allocs/op -BenchmarkDecoder_Binding_GoJson-16 29396 ns/op 443.43 MB/s 22042 B/op 49 allocs/op -BenchmarkDecoder_Parallel_Generic_Sonic-16 12243 ns/op 1064.68 MB/s 57135 B/op 723 allocs/op -BenchmarkDecoder_Parallel_Generic_Sonic_Fast-16 10101 ns/op 1290.48 MB/s 49440 B/op 313 allocs/op -BenchmarkDecoder_Parallel_Generic_StdLib-16 57352 ns/op 227.28 MB/s 50877 B/op 772 allocs/op -BenchmarkDecoder_Parallel_Generic_JsonIter-16 58693 ns/op 222.09 MB/s 55814 B/op 1068 allocs/op -BenchmarkDecoder_Parallel_Generic_GoJson-16 45245 ns/op 288.10 MB/s 66430 B/op 974 allocs/op -BenchmarkDecoder_Parallel_Binding_Sonic-16 7035 ns/op 1852.89 MB/s 27731 B/op 137 allocs/op -BenchmarkDecoder_Parallel_Binding_Sonic_Fast-16 6510 ns/op 2002.33 MB/s 24841 B/op 34 allocs/op -BenchmarkDecoder_Parallel_Binding_StdLib-16 33086 ns/op 393.97 MB/s 7344 B/op 103 allocs/op -BenchmarkDecoder_Parallel_Binding_JsonIter-16 17827 ns/op 731.18 MB/s 14680 B/op 385 allocs/op -BenchmarkDecoder_Parallel_Binding_GoJson-16 16813 ns/op 775.29 MB/s 22268 B/op 49 allocs/op - -BenchmarkGetOne_Sonic-16 11328 ns/op 1149.64 MB/s 29 B/op 1 allocs/op -BenchmarkGetOne_Gjson-16 12970 ns/op 1004.07 MB/s 0 B/op 0 allocs/op -BenchmarkGetOne_Jsoniter-16 59928 ns/op 217.31 MB/s 27936 B/op 647 allocs/op -BenchmarkGetOne_Parallel_Sonic-16 1447 ns/op 9002.23 MB/s 114 B/op 1 allocs/op -BenchmarkGetOne_Parallel_Gjson-16 1171 ns/op 11125.73 MB/s 0 B/op 0 allocs/op -BenchmarkGetOne_Parallel_Jsoniter-16 15545 ns/op 837.75 MB/s 27940 B/op 647 allocs/op -BenchmarkSetOne_Sonic-16 16922 ns/op 769.57 MB/s 1936 B/op 17 allocs/op -BenchmarkSetOne_Sjson-16 42683 ns/op 305.11 MB/s 52181 B/op 9 allocs/op -BenchmarkSetOne_Jsoniter-16 91104 ns/op 142.95 MB/s 45861 B/op 964 allocs/op -BenchmarkSetOne_Parallel_Sonic-16 2065 ns/op 6305.03 MB/s 2383 B/op 17 allocs/op -BenchmarkSetOne_Parallel_Sjson-16 11526 ns/op 1129.87 MB/s 52175 B/op 9 allocs/op -BenchmarkSetOne_Parallel_Jsoniter-16 35044 ns/op 371.61 MB/s 45887 B/op 964 allocs/op +BenchmarkEncoder_Generic_Sonic-16 32393 ns/op 402.40 MB/s 11965 B/op 4 allocs/op +BenchmarkEncoder_Generic_Sonic_Fast-16 21668 ns/op 601.57 MB/s 10940 B/op 4 allocs/op +BenchmarkEncoder_Generic_JsonIter-16 42168 ns/op 309.12 MB/s 14345 B/op 115 allocs/op +BenchmarkEncoder_Generic_GoJson-16 65189 ns/op 199.96 MB/s 23261 B/op 16 allocs/op +BenchmarkEncoder_Generic_StdLib-16 106322 ns/op 122.60 MB/s 49136 B/op 789 allocs/op +BenchmarkEncoder_Binding_Sonic-16 6269 ns/op 2079.26 MB/s 14173 B/op 4 allocs/op +BenchmarkEncoder_Binding_Sonic_Fast-16 5281 ns/op 2468.16 MB/s 12322 B/op 4 allocs/op +BenchmarkEncoder_Binding_JsonIter-16 20056 ns/op 649.93 MB/s 9488 B/op 2 allocs/op +BenchmarkEncoder_Binding_GoJson-16 8311 ns/op 1568.32 MB/s 9481 B/op 1 allocs/op +BenchmarkEncoder_Binding_StdLib-16 16448 ns/op 792.52 MB/s 9479 B/op 1 allocs/op +BenchmarkEncoder_Parallel_Generic_Sonic-16 6681 ns/op 1950.93 MB/s 12738 B/op 4 allocs/op +BenchmarkEncoder_Parallel_Generic_Sonic_Fast-16 4179 ns/op 3118.99 MB/s 10757 B/op 4 allocs/op +BenchmarkEncoder_Parallel_Generic_JsonIter-16 9861 ns/op 1321.84 MB/s 14362 B/op 115 allocs/op +BenchmarkEncoder_Parallel_Generic_GoJson-16 18850 ns/op 691.52 MB/s 23278 B/op 16 allocs/op +BenchmarkEncoder_Parallel_Generic_StdLib-16 45902 ns/op 283.97 MB/s 49174 B/op 789 allocs/op +BenchmarkEncoder_Parallel_Binding_Sonic-16 1480 ns/op 8810.09 MB/s 13049 B/op 4 allocs/op +BenchmarkEncoder_Parallel_Binding_Sonic_Fast-16 1209 ns/op 10785.23 MB/s 11546 B/op 4 allocs/op +BenchmarkEncoder_Parallel_Binding_JsonIter-16 6170 ns/op 2112.58 MB/s 9504 B/op 2 allocs/op +BenchmarkEncoder_Parallel_Binding_GoJson-16 3321 ns/op 3925.52 MB/s 9496 B/op 1 allocs/op +BenchmarkEncoder_Parallel_Binding_StdLib-16 3739 ns/op 3486.49 MB/s 9480 B/op 1 allocs/op + +BenchmarkDecoder_Generic_Sonic-16 66812 ns/op 195.10 MB/s 57602 B/op 723 allocs/op +BenchmarkDecoder_Generic_Sonic_Fast-16 54523 ns/op 239.07 MB/s 49786 B/op 313 allocs/op +BenchmarkDecoder_Generic_StdLib-16 124260 ns/op 104.90 MB/s 50869 B/op 772 allocs/op +BenchmarkDecoder_Generic_JsonIter-16 91274 ns/op 142.81 MB/s 55782 B/op 1068 allocs/op +BenchmarkDecoder_Generic_GoJson-16 88569 ns/op 147.17 MB/s 66367 B/op 973 allocs/op +BenchmarkDecoder_Binding_Sonic-16 32557 ns/op 400.38 MB/s 28302 B/op 137 allocs/op +BenchmarkDecoder_Binding_Sonic_Fast-16 28649 ns/op 455.00 MB/s 24999 B/op 34 allocs/op +BenchmarkDecoder_Binding_StdLib-16 111437 ns/op 116.97 MB/s 10576 B/op 208 allocs/op +BenchmarkDecoder_Binding_JsonIter-16 35090 ns/op 371.48 MB/s 14673 B/op 385 allocs/op +BenchmarkDecoder_Binding_GoJson-16 28738 ns/op 453.59 MB/s 22039 B/op 49 allocs/op +BenchmarkDecoder_Parallel_Generic_Sonic-16 12321 ns/op 1057.91 MB/s 57233 B/op 723 allocs/op +BenchmarkDecoder_Parallel_Generic_Sonic_Fast-16 10644 ns/op 1224.64 MB/s 49362 B/op 313 allocs/op +BenchmarkDecoder_Parallel_Generic_StdLib-16 57587 ns/op 226.35 MB/s 50874 B/op 772 allocs/op +BenchmarkDecoder_Parallel_Generic_JsonIter-16 38666 ns/op 337.12 MB/s 55789 B/op 1068 allocs/op +BenchmarkDecoder_Parallel_Generic_GoJson-16 30259 ns/op 430.79 MB/s 66370 B/op 974 allocs/op +BenchmarkDecoder_Parallel_Binding_Sonic-16 5965 ns/op 2185.28 MB/s 27747 B/op 137 allocs/op +BenchmarkDecoder_Parallel_Binding_Sonic_Fast-16 5170 ns/op 2521.31 MB/s 24715 B/op 34 allocs/op +BenchmarkDecoder_Parallel_Binding_StdLib-16 27582 ns/op 472.58 MB/s 10576 B/op 208 allocs/op +BenchmarkDecoder_Parallel_Binding_JsonIter-16 13571 ns/op 960.51 MB/s 14685 B/op 385 allocs/op +BenchmarkDecoder_Parallel_Binding_GoJson-16 10031 ns/op 1299.51 MB/s 22111 B/op 49 allocs/op + +BenchmarkGetOne_Sonic-16 11650 ns/op 1117.81 MB/s 29 B/op 1 allocs/op +BenchmarkGetOne_Gjson-16 9431 ns/op 1380.81 MB/s 0 B/op 0 allocs/op +BenchmarkGetOne_Jsoniter-16 51178 ns/op 254.46 MB/s 27936 B/op 647 allocs/op +BenchmarkGetOne_Parallel_Sonic-16 1955 ns/op 6659.94 MB/s 125 B/op 1 allocs/op +BenchmarkGetOne_Parallel_Gjson-16 1076 ns/op 12098.62 MB/s 0 B/op 0 allocs/op +BenchmarkGetOne_Parallel_Jsoniter-16 17741 ns/op 734.06 MB/s 27945 B/op 647 allocs/op +BenchmarkSetOne_Sonic-16 16124 ns/op 807.70 MB/s 1787 B/op 17 allocs/op +BenchmarkSetOne_Sjson-16 36456 ns/op 357.22 MB/s 52180 B/op 9 allocs/op +BenchmarkSetOne_Jsoniter-16 79475 ns/op 163.86 MB/s 45862 B/op 964 allocs/op +BenchmarkSetOne_Parallel_Sonic-16 2383 ns/op 5465.02 MB/s 2186 B/op 17 allocs/op +BenchmarkSetOne_Parallel_Sjson-16 18194 ns/op 715.77 MB/s 52247 B/op 9 allocs/op +BenchmarkSetOne_Parallel_Jsoniter-16 33560 ns/op 388.05 MB/s 45892 B/op 964 allocs/op ``` - [Small](https://github.com/bytedance/sonic/blob/main/testdata/small.go) (400B, 11 keys, 3 layers) ![small benchmarks](bench-small.jpg) diff --git a/ast/encode_test.go b/ast/encode_test.go index 31faa29c3..677bd577b 100644 --- a/ast/encode_test.go +++ b/ast/encode_test.go @@ -144,7 +144,7 @@ func TestEncodeNode(t *testing.T) { } } -func BenchmarkEncodeRaw(b *testing.B) { +func BenchmarkEncodeRaw_Sonic(b *testing.B) { data := _TwitterJson root, e := NewSearcher(data).GetByPath() if e != nil { @@ -164,7 +164,7 @@ func BenchmarkEncodeRaw(b *testing.B) { } } -func BenchmarkEncodeSkip(b *testing.B) { +func BenchmarkEncodeSkip_Sonic(b *testing.B) { data := _TwitterJson root, e := NewParser(data).Parse() if e != 0 { @@ -185,7 +185,7 @@ func BenchmarkEncodeSkip(b *testing.B) { } } -func BenchmarkEncodeLoad(b *testing.B) { +func BenchmarkEncodeLoad_Sonic(b *testing.B) { data := _TwitterJson root, e := NewParser(data).Parse() if e != 0 { diff --git a/bench.py b/bench.py index 9e6c8cafd..c7e56fc45 100755 --- a/bench.py +++ b/bench.py @@ -28,6 +28,16 @@ def run(cmd): print ("Failed to run cmd: %s"%(cmd)) exit(1) +def run_s(cmd): + print (cmd) + try: + res = os.popen(cmd) + except subprocess.CalledProcessError as e: + if e.returncode: + print (e.output) + exit(1) + return res.read() + def run_r(cmd): print (cmd) try: @@ -41,12 +51,12 @@ def run_r(cmd): def compare(args): # detech current branch. - result = run_r("git branch") - current_branch = None - for br in result.split('\n'): - if br.startswith("* "): - current_branch = br.lstrip('* ') - break + # result = run_r("git branch") + current_branch = run_s("git status | head -n1 | sed 's/On branch //'") + # for br in result.split('\n'): + # if br.startswith("* "): + # current_branch = br.lstrip('* ') + # break if not current_branch: print ("Failed to detech current branch") @@ -70,7 +80,7 @@ def compare(args): run("git checkout -- .") if current_branch != "main": run("git checkout main") - run("git pull origin main") + run("git pull --allow-unrelated-histories origin main") # benchmark main branch (fd, main) = tempfile.mkstemp(".main.txt") @@ -93,12 +103,17 @@ def main(): help='Specify the filter for golang benchmark') argparser.add_argument('-c', '--compare', dest='compare', action='store_true', required=False, help='Compare with the main benchmarking') + argparser.add_argument('-t', '--times', dest='times', required=False, + help='benchmark the times') args = argparser.parse_args() if args.filter: gbench_args = "-bench=%s"%(args.filter) else: gbench_args = "-bench=." + + if args.times: + gbench_args += " -benchtime=%s"%(args.times) if args.compare: target = compare(gbench_args) diff --git a/bench.sh b/bench.sh index d72ff5e1b..9680e3690 100644 --- a/bench.sh +++ b/bench.sh @@ -10,9 +10,9 @@ cd $pwd/decoder go test -benchmem -run=^$ -benchtime=100000x -bench "^(BenchmarkDecoder_.*)$" cd $pwd/ast -go test -benchmem -run=^$ -benchtime=100000x -bench "^(BenchmarkGet.*|BenchmarkEncode.*)$" +go test -benchmem -run=^$ -benchtime=1000000x -bench "^(BenchmarkGet.*|BenchmarkSet.*)$" -go test -benchmem -run=^$ -benchtime=10000x -bench "^(BenchmarkParser_.*)$" +go test -benchmem -run=^$ -benchtime=10000x -bench "^(BenchmarkParser_.*|BenchmarkEncode.*)$" go test -benchmem -run=^$ -benchtime=10000000x -bench "^(BenchmarkNodeGetByPath|BenchmarkStructGetByPath|BenchmarkNodeIndex|BenchmarkStructIndex|BenchmarkSliceIndex|BenchmarkMapIndex|BenchmarkNodeGet|BenchmarkSliceGet|BenchmarkMapGet|BenchmarkNodeSet|BenchmarkMapSet|BenchmarkNodeSetByIndex|BenchmarkSliceSetByIndex|BenchmarkStructSetByIndex|BenchmarkNodeUnset|BenchmarkMapUnset|BenchmarkNodUnsetByIndex|BenchmarkSliceUnsetByIndex|BenchmarkNodeAdd|BenchmarkSliceAdd|BenchmarkMapAdd)$" diff --git a/internal/native/avx/native_amd64.s b/internal/native/avx/native_amd64.s index f3d8095a0..ecfc14bda 100644 --- a/internal/native/avx/native_amd64.s +++ b/internal/native/avx/native_amd64.s @@ -15,89 +15,75 @@ _lzero: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp CMPQ SI, $32 - JB LBB0_5 - LEAQ -32(SI), AX - MOVQ AX, CX - ANDQ $-32, CX - LEAQ 32(CX)(DI*1), CX - ANDL $31, AX - -LBB0_2: + JB LBB0_3 + +LBB0_1: LONG $0x076ffec5 // vmovdqu (%rdi), %ymm0 LONG $0x177de2c4; BYTE $0xc0 // vptest %ymm0, %ymm0 - JNE LBB0_13 + JNE LBB0_9 ADDQ $32, DI ADDQ $-32, SI CMPQ SI, $31 - JA LBB0_2 - MOVQ AX, SI - MOVQ CX, DI + JA LBB0_1 -LBB0_5: +LBB0_3: WORD $0xf8c5; BYTE $0x77 // vzeroupper CMPQ SI, $16 - JB LBB0_10 - LEAQ -16(SI), AX - MOVQ AX, CX - ANDQ $-16, CX - LEAQ 16(CX)(DI*1), CX - ANDL $15, AX - -LBB0_7: + JB LBB0_6 + +LBB0_4: LONG $0x076ffac5 // vmovdqu (%rdi), %xmm0 LONG $0x1779e2c4; BYTE $0xc0 // vptest %xmm0, %xmm0 - JNE LBB0_14 + JNE LBB0_10 ADDQ $16, DI ADDQ $-16, SI CMPQ SI, $15 - JA LBB0_7 - MOVQ AX, SI - MOVQ CX, DI + JA LBB0_4 -LBB0_10: +LBB0_6: CMPQ SI, $8 - JB LBB0_16 + JB LBB0_12 MOVL $1, AX CMPQ 0(DI), $0 - JNE LBB0_12 + JNE LBB0_8 ADDQ $8, DI ADDQ $-8, SI -LBB0_16: +LBB0_12: CMPQ SI, $4 - JB LBB0_19 + JB LBB0_15 MOVL $1, AX CMPL 0(DI), $0 - JNE LBB0_12 + JNE LBB0_8 ADDQ $4, DI ADDQ $-4, SI -LBB0_19: +LBB0_15: CMPQ SI, $2 - JB LBB0_22 + JB LBB0_18 MOVL $1, AX CMPW 0(DI), $0 - JNE LBB0_12 + JNE LBB0_8 ADDQ $2, DI ADDQ $-2, SI -LBB0_22: +LBB0_18: XORL AX, AX TESTQ SI, SI - JE LBB0_12 + JE LBB0_8 CMPB 0(DI), $0 SETNE AX BYTE $0x5d // popq %rbp RET -LBB0_12: +LBB0_8: BYTE $0x5d // popq %rbp RET -LBB0_13: +LBB0_9: WORD $0xf8c5; BYTE $0x77 // vzeroupper -LBB0_14: +LBB0_10: MOVL $1, AX BYTE $0x5d // popq %rbp RET @@ -117,111 +103,108 @@ LCPI1_3: _lspace: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - LEAQ 0(DI)(DX*1), AX - SUBQ DX, SI + MOVQ DX, AX + LEAQ 0(DI)(DX*1), R10 + MOVQ SI, DX + SUBQ AX, DX JE LBB1_7 - MOVL AX, CX + MOVL R10, CX ANDL $15, CX TESTQ CX, CX JE LBB1_7 - LEAQ -1(SI), R9 - XORL DX, DX + LEAQ 1(DI), R9 + LEAQ -1(SI), CX MOVQ $4294977024, R8 LBB1_3: - MOVBLSX 0(AX)(DX*1), CX - CMPL CX, $32 - JA LBB1_5 - BTQ CX, R8 - JAE LBB1_5 - LEAQ 1(DX), R10 - CMPQ R9, DX + MOVBLSX 0(DI)(AX*1), DX + CMPL DX, $32 + JA LBB1_22 + BTQ DX, R8 + JAE LBB1_22 + LEAQ 1(AX), DX + CMPQ CX, AX JE LBB1_6 - LEAQ 1(AX)(DX*1), CX - ANDL $15, CX - MOVQ R10, DX - TESTQ CX, CX + ADDL R9, AX + ANDL $15, AX + TESTQ AX, AX + MOVQ DX, AX JNE LBB1_3 LBB1_6: - ADDQ R10, AX - SUBQ R10, SI + LEAQ 0(DI)(DX*1), R10 + SUBQ DX, SI + MOVQ SI, DX LBB1_7: - CMPQ SI, $16 + CMPQ DX, $16 JB LBB1_13 - LEAQ -16(SI), CX - MOVQ CX, DX - ANDQ $-16, DX - LEAQ 16(DX)(AX*1), R8 - ANDL $15, CX - QUAD $0xffffff42056ffac5 // vmovdqu $-190(%rip), %xmm0 /* LCPI1_0(%rip) */ - QUAD $0xffffff4a0d6ffac5 // vmovdqu $-182(%rip), %xmm1 /* LCPI1_1(%rip) */ - QUAD $0xffffff52156ffac5 // vmovdqu $-174(%rip), %xmm2 /* LCPI1_2(%rip) */ - QUAD $0xffffff5a1d6ffac5 // vmovdqu $-166(%rip), %xmm3 /* LCPI1_3(%rip) */ + MOVQ DI, SI + SUBQ R10, SI + QUAD $0xffffff44056ffac5 // vmovdqu $-188(%rip), %xmm0 /* LCPI1_0(%rip) */ + QUAD $0xffffff4c0d6ffac5 // vmovdqu $-180(%rip), %xmm1 /* LCPI1_1(%rip) */ + QUAD $0xffffff54156ffac5 // vmovdqu $-172(%rip), %xmm2 /* LCPI1_2(%rip) */ + QUAD $0xffffff5c1d6ffac5 // vmovdqu $-164(%rip), %xmm3 /* LCPI1_3(%rip) */ LBB1_9: - LONG $0x206ff9c5 // vmovdqa (%rax), %xmm4 - LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 - LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 - LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 - LONG $0xf274d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm6 - LONG $0xe374d9c5 // vpcmpeqb %xmm3, %xmm4, %xmm4 - LONG $0xe6ebd9c5 // vpor %xmm6, %xmm4, %xmm4 - LONG $0xe5ebd9c5 // vpor %xmm5, %xmm4, %xmm4 - LONG $0xd4d7f9c5 // vpmovmskb %xmm4, %edx - CMPW DX, $-1 + LONG $0x6f79c1c4; BYTE $0x22 // vmovdqa (%r10), %xmm4 + LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 + LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 + LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 + LONG $0xf274d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm6 + LONG $0xe374d9c5 // vpcmpeqb %xmm3, %xmm4, %xmm4 + LONG $0xe6ebd9c5 // vpor %xmm6, %xmm4, %xmm4 + LONG $0xe5ebd9c5 // vpor %xmm5, %xmm4, %xmm4 + LONG $0xc4d7f9c5 // vpmovmskb %xmm4, %eax + CMPW AX, $-1 JNE LBB1_10 - ADDQ $16, AX + ADDQ $16, R10 + ADDQ $-16, DX ADDQ $-16, SI - CMPQ SI, $15 + CMPQ DX, $15 JA LBB1_9 - MOVQ CX, SI - MOVQ R8, AX LBB1_13: - TESTQ SI, SI - JE LBB1_22 - LEAQ 0(AX)(SI*1), R8 - INCQ AX - MOVQ $4294977024, DX + TESTQ DX, DX + JE LBB1_20 + LEAQ 0(R10)(DX*1), R8 + XORL AX, AX + MOVQ $4294977024, R9 LBB1_15: - MOVBLSX -1(AX), CX - CMPL CX, $32 + MOVBLSX 0(R10)(AX*1), SI + CMPL SI, $32 JA LBB1_17 - BTQ CX, DX + BTQ SI, R9 JAE LBB1_17 - DECQ SI - INCQ AX - TESTQ SI, SI + ADDQ $1, AX + CMPQ DX, AX JNE LBB1_15 - MOVQ R8, AX - JMP LBB1_22 + MOVQ R8, R10 -LBB1_10: - MOVWLZX DX, CX - SUBQ DI, AX - NOTL CX - BSFL CX, CX - ADDQ CX, AX - BYTE $0x5d // popq %rbp - RET +LBB1_20: + SUBQ DI, R10 -LBB1_5: - ADDQ DX, AX +LBB1_21: + MOVQ R10, AX LBB1_22: - SUBQ DI, AX - BYTE $0x5d // popq %rbp + BYTE $0x5d // popq %rbp RET -LBB1_17: - NOTQ DI - ADDQ DI, AX - BYTE $0x5d // popq %rbp +LBB1_10: + MOVWLZX AX, AX + NOTL AX + BSFL AX, AX + SUBQ SI, AX + BYTE $0x5d // popq %rbp RET +LBB1_17: + SUBQ DI, R10 + ADDQ AX, R10 + JMP LBB1_21 + LCPI2_0: QUAD $0x3030303030303030; QUAD $0x3030303030303030 // .space 16, '0000000000000000' QUAD $0x3030303030303030; QUAD $0x3030303030303030 // .space 16, '0000000000000000' @@ -234,585 +217,570 @@ _f64toa: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $40, SP - MOVQ DI, R13 + SUBQ $56, SP + MOVQ DI, R12 LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax XORL R9, R9 TESTQ AX, AX - JS LBB2_108 + JS LBB2_1 TESTQ AX, AX - JE LBB2_109 + JE LBB2_3 -LBB2_2: +LBB2_4: MOVQ AX, DX SHRQ $52, DX XORL BX, BX CMPL DX, $2047 - JE LBB2_107 + JE LBB2_105 MOVQ $4503599627370495, DI ANDQ DI, AX - INCQ DI + ADDQ $1, DI ORQ AX, DI LEAL -1023(DX), CX CMPL CX, $52 MOVL R9, -44(BP) - MOVQ R13, -64(BP) - JA LBB2_5 + MOVQ R12, -56(BP) + JA LBB2_8 MOVL $1075, CX SUBQ DX, CX MOVQ $-1, SI SHLQ CX, SI NOTQ SI TESTQ SI, DI - JE LBB2_14 + JE LBB2_7 -LBB2_5: +LBB2_8: TESTL DX, DX LONG $0xf8440f48 // cmoveq %rax, %rdi LEAL -1077(DX), CX - MOVL $-1076, R11 - LONG $0xd9450f44 // cmovnel %ecx, %r11d - MOVQ DI, -72(BP) - LEAQ 0(DI*4), R8 + MOVL $-1076, BX + WORD $0x450f; BYTE $0xd9 // cmovnel %ecx, %ebx + MOVQ DI, -64(BP) + LEAQ 0(DI*4), R10 TESTQ AX, AX SETNE AX CMPL DX, $2 SETCS R13 ORB AX, R13 - MOVBLZX R13, R9 - TESTL R11, R11 - JS LBB2_12 - LONG $0x41e36945; WORD $0x0134; BYTE $0x00 // imull $78913, %r11d, %r12d - SHRL $18, R12 + MOVBLZX R13, R15 + TESTL BX, BX + JS LBB2_22 + LONG $0x41eb6944; WORD $0x0134; BYTE $0x00 // imull $78913, %ebx, %r13d + SHRL $18, R13 XORL AX, AX - CMPL R11, $3 - SETGT AX - SUBL AX, R12 - LONG $0x4fdc6941; WORD $0x1293; BYTE $0x00 // imull $1217359, %r12d, %ebx - MOVQ R12, AX + CMPL BX, $4 + SETGE AX + SUBL AX, R13 + LONG $0x4ff56941; WORD $0x1293; BYTE $0x00 // imull $1217359, %r13d, %esi + MOVQ R13, AX SHLQ $4, AX - LONG $0x510d8d48; WORD $0x007e; BYTE $0x00 // leaq $32337(%rip), %rcx /* _DOUBLE_POW5_INV_SPLIT(%rip) */ - MOVQ R8, DI - ORQ $2, DI - MOVQ 0(AX)(CX*1), R10 + LONG $0x540d8d48; WORD $0x0081; BYTE $0x00 // leaq $33108(%rip), %rcx /* _DOUBLE_POW5_INV_SPLIT(%rip) */ + MOVQ R10, R12 + ORQ $2, R12 + MOVQ 0(AX)(CX*1), R11 MOVQ 8(AX)(CX*1), R14 - MOVQ R10, AX - MULQ DI + MOVQ R11, AX + MULQ R12 MOVQ DX, CX MOVQ R14, AX - MOVQ R14, -56(BP) - MULQ DI - MOVQ AX, R13 - MOVQ DX, SI - SHRL $19, BX - ADDQ CX, R13 - ADCQ $0, SI - MOVL R12, CX - SUBL R11, CX - ADDL BX, CX + MULQ R12 + MOVQ AX, R8 + MOVQ DX, DI + SHRL $19, SI + ADDQ CX, R8 + ADCQ $0, DI + MOVL R13, CX + SUBL BX, CX + ADDL SI, CX ADDB $61, CX - LONG $0xf5ad0f49 // shrdq %cl, %rsi, %r13 - SHRQ CX, SI - NOTQ R9 - ADDQ R8, R9 - MOVQ R10, AX - MULQ R9 - MOVQ DX, R15 + LONG $0xf8ad0f49 // shrdq %cl, %rdi, %r8 + SHRQ CX, DI + NOTQ R15 + ADDQ R10, R15 + MOVQ R11, AX + MULQ R15 + MOVQ DX, BX + MOVQ R14, AX + MULQ R15 + MOVQ DX, R9 + MOVQ AX, SI + ADDQ BX, SI + ADCQ $0, R9 + LONG $0xcead0f4c // shrdq %cl, %r9, %rsi + SHRQ CX, R9 + MOVQ R11, AX + MULQ R10 + MOVQ DX, R11 MOVQ R14, AX - MULQ R9 + MULQ R10 MOVQ DX, R14 - MOVQ AX, BX - ADDQ R15, BX + ADDQ R11, AX ADCQ $0, R14 - LONG $0xf3ad0f4c // shrdq %cl, %r14, %rbx + LONG $0xf0ad0f4c // shrdq %cl, %r14, %rax SHRQ CX, R14 - MOVQ R10, AX - MULQ R8 - MOVQ DX, R11 - MOVQ -56(BP), AX - MULQ R8 - MOVQ DX, R10 - ADDQ R11, AX - ADCQ $0, R10 - LONG $0xd0ad0f4c // shrdq %cl, %r10, %rax - SHRQ CX, R10 TESTB $64, CX - LONG $0xf5440f49 // cmoveq %r13, %rsi - LONG $0xf3440f4c // cmoveq %rbx, %r14 - LONG $0xd0440f4c // cmoveq %rax, %r10 - CMPL R12, $21 - JA LBB2_23 + LONG $0xf8440f49 // cmoveq %r8, %rdi + LONG $0xce440f4c // cmoveq %rsi, %r9 + LONG $0xf0440f4c // cmoveq %rax, %r14 + CMPL R13, $21 + JA LBB2_10 MOVQ $-3689348814741910323, AX MOVQ $3689348814741910323, CX - MOVQ R8, DX + MOVQ R10, DX IMULQ AX, DX CMPQ DX, CX - JBE LBB2_17 - TESTB $1, -72(BP) - JNE LBB2_20 + JBE LBB2_12 + TESTB $1, -64(BP) + JNE LBB2_16 MOVL $-1, DX -LBB2_10: - IMULQ AX, R9 - INCL DX - CMPQ R9, CX - JBE LBB2_10 - CMPL DX, R12 - SETCC R13 +LBB2_20: + IMULQ AX, R15 + ADDL $1, DX + CMPQ R15, CX + JBE LBB2_20 + CMPL DX, R13 + SETCC R11 XORL R15, R15 - TESTB R15, R15 - JE LBB2_26 - JMP LBB2_29 + TESTB R11, R11 + JE LBB2_29 + JMP LBB2_30 -LBB2_12: - LONG $0x05e36945; WORD $0xf4d1; BYTE $0xff // imull $-732923, %r11d, %r12d - SHRL $20, R12 +LBB2_22: + LONG $0x05c36944; WORD $0xf4d1; BYTE $0xff // imull $-732923, %ebx, %r8d + SHRL $20, R8 XORL AX, AX - CMPL R11, $-1 + CMPL BX, $-1 SETNE AX - SUBL AX, R12 - ADDL R12, R11 - MOVL R11, AX + SUBL AX, R8 + ADDL R8, BX + MOVL BX, AX NEGL AX - LONG $0xb1db6941; WORD $0xed6c; BYTE $0xff // imull $-1217359, %r11d, %ebx - SHRL $19, BX - MOVLQSX AX, SI - SHLQ $4, SI - LONG $0x7c158d4c; WORD $0x0092; BYTE $0x00 // leaq $37500(%rip), %r10 /* _DOUBLE_POW5_SPLIT(%rip) */ - MOVQ R8, DI - ORQ $2, DI - MOVQ 0(SI)(R10*1), R14 - MOVQ R14, AX - MOVQ R14, -56(BP) - MULQ DI - MOVQ DX, CX - MOVQ 8(SI)(R10*1), R10 - MOVQ R10, AX - MULQ DI + LONG $0x6cb1f369; WORD $0xffed // imull $-1217359, %ebx, %esi + SHRL $19, SI + MOVLQSX AX, DI + SHLQ $4, DI + LONG $0x871d8d4c; WORD $0x0095; BYTE $0x00 // leaq $38279(%rip), %r11 /* _DOUBLE_POW5_SPLIT(%rip) */ + MOVQ R10, CX + ORQ $2, CX + MOVQ 0(DI)(R11*1), R9 + MOVQ R9, AX + MOVQ R9, -80(BP) + MULQ CX + MOVQ DX, R14 + MOVQ 8(DI)(R11*1), AX + MOVQ AX, -88(BP) + MULQ CX MOVQ DX, DI - MOVQ AX, SI - ADDQ CX, SI + MOVQ AX, R11 + ADDQ R14, R11 ADCQ $0, DI - MOVL R12, CX - SUBL BX, CX + MOVL R8, CX + SUBL SI, CX ADDB $60, CX - LONG $0xfead0f48 // shrdq %cl, %rdi, %rsi + LONG $0xfbad0f49 // shrdq %cl, %rdi, %r11 SHRQ CX, DI - NOTQ R9 - ADDQ R8, R9 + NOTQ R15 + ADDQ R10, R15 + MOVQ R9, AX + MULQ R15 + MOVQ DX, -72(BP) + MOVQ -88(BP), R14 MOVQ R14, AX - MULQ R9 + MULQ R15 + MOVQ DX, R9 + MOVQ AX, SI + ADDQ -72(BP), SI + ADCQ $0, R9 + LONG $0xcead0f4c // shrdq %cl, %r9, %rsi + SHRQ CX, R9 + MOVQ -80(BP), AX + MULQ R10 MOVQ DX, R15 - MOVQ R10, AX - MULQ R9 + MOVQ R14, AX + MULQ R10 MOVQ DX, R14 - MOVQ AX, BX - ADDQ R15, BX + ADDQ R15, AX ADCQ $0, R14 - LONG $0xf3ad0f4c // shrdq %cl, %r14, %rbx + LONG $0xf0ad0f4c // shrdq %cl, %r14, %rax SHRQ CX, R14 - MOVQ -56(BP), AX - MULQ R8 - MOVQ DX, R15 - MOVQ R10, AX - MULQ R8 - MOVQ DX, R10 - ADDQ R15, AX - ADCQ $0, R10 - LONG $0xd0ad0f4c // shrdq %cl, %r10, %rax - SHRQ CX, R10 TESTB $64, CX - LONG $0xfe440f48 // cmoveq %rsi, %rdi - MOVQ DI, SI - LONG $0xf3440f4c // cmoveq %rbx, %r14 - LONG $0xd0440f4c // cmoveq %rax, %r10 - CMPL R12, $1 - JA LBB2_15 - MOVQ -72(BP), AX + LONG $0xfb440f49 // cmoveq %r11, %rdi + LONG $0xce440f4c // cmoveq %rsi, %r9 + LONG $0xf0440f4c // cmoveq %rax, %r14 + CMPL R8, $1 + JA LBB2_24 + MOVQ -64(BP), AX ANDL $1, AX TESTQ AX, AX - SETEQ CX - ANDB CX, R13 - SUBQ AX, SI + SETEQ R11 + ANDB R13, R11 + SUBQ AX, DI MOVB $1, R15 - MOVL R11, AX - MOVQ AX, -56(BP) + MOVL BX, R13 JMP LBB2_30 -LBB2_14: +LBB2_7: MOVL CX, CX SHRQ CX, DI - XORL R12, R12 + XORL R13, R13 MOVL $1, R15 CMPQ DI, $10 - JAE LBB2_46 - JMP LBB2_61 + JAE LBB2_52 + JMP LBB2_67 -LBB2_15: - CMPL R12, $62 - JA LBB2_24 +LBB2_24: + CMPL R8, $62 + JA LBB2_25 MOVQ $-1, AX - MOVL R12, CX + MOVL R8, CX SHLQ CX, AX NOTQ AX - TESTQ AX, R8 + TESTQ AX, R10 SETEQ R15 - JMP LBB2_25 + JMP LBB2_27 -LBB2_17: +LBB2_12: MOVL $-1, DX -LBB2_18: - IMULQ AX, R8 - INCL DX - CMPQ R8, CX - JBE LBB2_18 - CMPL DX, R12 +LBB2_13: + IMULQ AX, R10 + ADDL $1, DX + CMPQ R10, CX + JBE LBB2_13 + CMPL DX, R13 SETCC R15 - XORL R13, R13 - TESTB R15, R15 - JE LBB2_26 - JMP LBB2_29 + XORL R11, R11 + TESTB R11, R11 + JE LBB2_29 + JMP LBB2_30 -LBB2_20: +LBB2_16: MOVL $-1, DX -LBB2_21: - IMULQ AX, DI - INCL DX - CMPQ DI, CX - JBE LBB2_21 - XORL AX, AX - CMPL DX, R12 - SETCC AX - SUBQ AX, SI +LBB2_17: + IMULQ AX, R12 + ADDL $1, DX + CMPQ R12, CX + JBE LBB2_17 + CMPL DX, R13 + ADCQ $-1, DI -LBB2_23: +LBB2_10: XORL R15, R15 - XORL R13, R13 - TESTB R15, R15 - JE LBB2_26 - JMP LBB2_29 + XORL R11, R11 + TESTB R11, R11 + JE LBB2_29 + JMP LBB2_30 -LBB2_24: +LBB2_25: XORL R15, R15 -LBB2_25: - XORL R13, R13 - MOVL R11, R12 - TESTB R15, R15 - JNE LBB2_29 +LBB2_27: + XORL R11, R11 + MOVL BX, R13 + TESTB R11, R11 + JNE LBB2_30 -LBB2_26: - TESTB R13, R13 - JNE LBB2_29 - MOVQ SI, BX - SHRQ $2, SI +LBB2_29: + TESTB R15, R15 + JNE LBB2_30 + MOVQ DI, AX + SHRQ $2, AX + MOVQ DI, BX MOVQ $2951479051793528259, DI - MOVQ SI, AX MULQ DI - MOVQ DX, CX - SHRQ $2, CX - MOVQ R14, AX + MOVQ DX, SI + MOVQ R9, AX SHRQ $2, AX MULQ DI - MOVQ DX, SI + MOVQ DX, CX SHRQ $2, SI - CMPQ CX, SI - JBE LBB2_39 - MOVQ R10, AX + SHRQ $2, CX + XORL R8, R8 + CMPQ SI, CX + JBE LBB2_44 + MOVQ R14, AX SHRQ $2, AX MULQ DI SHRQ $2, DX WORD $0xc26b; BYTE $0x9c // imull $-100, %edx, %eax - ADDL R10, AX - XORL DI, DI - CMPL AX, $49 - SETHI DI - MOVL $2, R11 - MOVQ SI, R14 - MOVQ CX, AX - MOVQ DX, R10 - MOVQ -64(BP), R13 - JMP LBB2_40 - -LBB2_29: - MOVQ R12, -56(BP) + ADDL R14, AX + CMPL AX, $50 + SETCC DI + MOVL $2, R8 + MOVQ DX, R14 + JMP LBB2_46 LBB2_30: - MOVQ $-3689348814741910323, BX - MOVQ SI, AX - MULQ BX - MOVQ DX, R12 - MOVQ R14, AX - MULQ BX - SHRQ $3, R12 + MOVQ $-3689348814741910323, R12 + MOVQ DI, AX + MULQ R12 + MOVQ DX, DI + MOVQ R9, AX + MULQ R12 + SHRQ $3, DI SHRQ $3, DX - XORL SI, SI - XORL R11, R11 - CMPQ R12, DX - JBE LBB2_37 XORL CX, CX + XORL R8, R8 + CMPQ DI, DX + JBE LBB2_31 + XORL BX, BX -LBB2_32: - MOVQ DX, R8 - LEAL 0(DX)(DX*1), DI - MOVQ R10, AX - MULQ BX - MOVQ DX, R9 - LEAL 0(DI)(DI*4), AX - SHRQ $3, R9 - LEAL 0(R9)(R9*1), DX - LEAL 0(DX)(DX*4), SI - NEGL SI - ADDB R10, SI - CMPL R14, AX +LBB2_33: + MOVQ DX, R10 + LEAL 0(DX)(DX*1), CX + MOVQ R14, AX + MULQ R12 + MOVQ DX, SI + LEAL 0(CX)(CX*4), AX + SHRQ $3, SI + LEAL 0(SI)(SI*1), CX + LEAL 0(CX)(CX*4), CX + NEGL CX + ADDB R14, CX + CMPL AX, R9 SETEQ AX - ANDB AX, R13 - TESTB CX, CX + ANDB AX, R11 + TESTB BX, BX SETEQ AX ANDB AX, R15 - INCL R11 - MOVQ R12, AX - MULQ BX - MOVQ DX, R12 - SHRQ $3, R12 - MOVQ R8, AX - MULQ BX + ADDL $1, R8 + MOVQ DI, AX + MULQ R12 + MOVQ DX, DI + SHRQ $3, DI + MOVQ R10, AX + MULQ R12 SHRQ $3, DX + MOVQ SI, R14 + MOVQ R10, R9 + MOVL CX, BX + CMPQ DI, DX + JA LBB2_33 + TESTB R11, R11 + JNE LBB2_36 + JMP LBB2_35 + +LBB2_31: MOVQ R9, R10 - MOVQ R8, R14 - MOVL SI, CX - CMPQ R12, DX - JA LBB2_32 - TESTB R13, R13 - JE LBB2_38 + MOVQ R14, SI + TESTB R11, R11 + JE LBB2_35 -LBB2_34: - MOVQ R8, AX - MULQ BX - MOVQ DX, CX - SHRQ $3, CX - LEAL 0(CX)(CX*1), AX +LBB2_36: + MOVQ R10, AX + MULQ R12 + MOVQ DX, DI + SHRQ $3, DI + LEAL 0(DI)(DI*1), AX LEAL 0(AX)(AX*4), AX - CMPL R8, AX - MOVQ -72(BP), DI - MOVQ -56(BP), R12 - JNE LBB2_44 - MOVQ BX, R14 + CMPL AX, R10 + JNE LBB2_35 -LBB2_36: - MOVQ R9, AX - MULQ R14 - MOVQ DX, R10 - MOVQ CX, R8 - SHRQ $3, R10 - LEAL 0(R10)(R10*1), AX +LBB2_37: + MOVQ SI, AX + MULQ R12 + MOVQ DX, R9 + MOVQ DI, R10 + SHRQ $3, R9 + LEAL 0(R9)(R9*1), AX LEAL 0(AX)(AX*4), BX NEGL BX - ADDB R9, BX - TESTB SI, SI + ADDB SI, BX + TESTB CX, CX SETEQ AX ANDB AX, R15 - INCL R11 - MOVQ CX, AX - MULQ R14 - MOVQ DX, CX - SHRQ $3, CX - LEAL 0(CX)(CX*1), AX + ADDL $1, R8 + MOVQ DI, AX + MULQ R12 + MOVQ DX, DI + SHRQ $3, DI + LEAL 0(DI)(DI*1), AX LEAL 0(AX)(AX*4), AX - MOVQ R10, R9 - MOVL BX, SI - CMPL R8, AX - JE LBB2_36 - JMP LBB2_45 + MOVQ R9, SI + MOVL BX, CX + CMPL AX, R10 + JE LBB2_37 + JMP LBB2_38 -LBB2_37: - MOVQ R14, R8 - MOVQ R10, R9 - TESTB R13, R13 - JNE LBB2_34 +LBB2_35: + MOVL CX, BX + MOVQ SI, R9 LBB2_38: - MOVL SI, BX - MOVQ R9, R10 - MOVQ -72(BP), DI - MOVQ -56(BP), R12 - JMP LBB2_45 + TESTB R15, R15 + SETEQ CX + CMPB BX, $5 + SETNE AX + CMPQ R9, R10 + MOVQ -56(BP), R12 + JNE LBB2_41 + MOVL $1, DI + TESTB $1, -64(BP) + JNE LBB2_42 + TESTB R11, R11 + JE LBB2_42 + +LBB2_41: + MOVL R9, DX + ANDB $1, DX + ORB DX, CX + ORB CX, AX + CMPB BX, $5 + SETCC CX + ANDB AX, CX + MOVBLZX CX, DI + +LBB2_42: + ADDQ R9, DI + JMP LBB2_50 -LBB2_39: - XORL R11, R11 +LBB2_44: XORL DI, DI - MOVQ -64(BP), R13 - MOVQ BX, AX + MOVQ R9, CX + MOVQ BX, SI -LBB2_40: - MOVQ $-3689348814741910323, BX - MULQ BX - MOVQ DX, CX - SHRQ $3, CX - MOVQ R14, AX - MULQ BX +LBB2_46: + MOVQ -56(BP), R12 + MOVQ $-3689348814741910323, R9 + MOVQ SI, AX + MULQ R9 + MOVQ DX, SI + SHRQ $3, SI + MOVQ CX, AX + MULQ R9 SHRQ $3, DX - CMPQ CX, DX - JBE LBB2_43 + CMPQ SI, DX + JBE LBB2_49 -LBB2_41: - MOVQ R10, SI - MOVQ DX, R14 - MOVQ R10, AX - MULQ BX - MOVQ DX, R10 - SHRQ $3, R10 - INCL R11 - MOVQ CX, AX - MULQ BX +LBB2_47: + MOVQ R14, DI MOVQ DX, CX - SHRQ $3, CX MOVQ R14, AX - MULQ BX + MULQ R9 + MOVQ DX, R14 + SHRQ $3, R14 + ADDL $1, R8 + MOVQ SI, AX + MULQ R9 + MOVQ DX, SI + SHRQ $3, SI + MOVQ CX, AX + MULQ R9 SHRQ $3, DX - CMPQ CX, DX - JA LBB2_41 - LEAL 0(R10)(R10*1), AX + CMPQ SI, DX + JA LBB2_47 + LEAL 0(R14)(R14*1), AX LEAL 0(AX)(AX*4), AX - SUBL AX, SI - XORL DI, DI - CMPL SI, $4 - SETHI DI - -LBB2_43: - XORL AX, AX - CMPQ R10, R14 - SETEQ AX - ORQ DI, AX - ADDQ R10, AX - MOVQ AX, DI - ADDL R11, R12 - MOVL $1, R15 - CMPQ DI, $10 - JAE LBB2_46 - JMP LBB2_61 - -LBB2_44: - MOVL SI, BX - MOVQ R9, R10 - -LBB2_45: - TESTB R13, R13 - SETEQ SI - TESTB R15, R15 - SETNE AX - CMPB BX, $5 - SETEQ DX - TESTB $1, R10 - SETEQ CX - ANDB AX, CX - ANDB DX, CX - CMPQ R10, R8 - SETEQ DX - ORB SI, DI - CMPB BX, $4 - SETHI AX - XORB CX, AX - ANDB DX, DI - ORB AX, DI - MOVBLZX DI, DI - ADDQ R10, DI - MOVQ -64(BP), R13 - ADDL R11, R12 - MOVL $1, R15 - CMPQ DI, $10 - JB LBB2_61 + SUBL AX, DI + CMPL DI, $5 + SETCC DI + +LBB2_49: + CMPQ R14, CX + SETEQ AX + TESTB DI, DI + SETNE CX + ORB AX, CX + MOVBLZX CX, DI + ADDQ R14, DI + +LBB2_50: + ADDL R8, R13 + MOVL $1, R15 + CMPQ DI, $10 + JB LBB2_67 -LBB2_46: +LBB2_52: MOVL $2, R15 CMPQ DI, $100 - JB LBB2_61 + JB LBB2_67 MOVL $3, R15 CMPQ DI, $1000 - JB LBB2_61 + JB LBB2_67 MOVL $4, R15 CMPQ DI, $10000 - JB LBB2_61 + JB LBB2_67 MOVL $5, R15 CMPQ DI, $100000 - JB LBB2_61 + JB LBB2_67 MOVL $6, R15 CMPQ DI, $1000000 - JB LBB2_61 + JB LBB2_67 MOVL $7, R15 CMPQ DI, $10000000 - JB LBB2_61 + JB LBB2_67 MOVL $8, R15 CMPQ DI, $100000000 - JB LBB2_61 + JB LBB2_67 MOVL $9, R15 CMPQ DI, $1000000000 - JB LBB2_61 + JB LBB2_67 MOVQ $8589934464, AX ADDQ $1410065536, AX MOVL $10, R15 CMPQ DI, AX - JB LBB2_61 + JB LBB2_67 MOVQ DI, AX SHRQ $11, AX MOVL $11, R15 CMPQ AX, $48828125 - JB LBB2_61 + JB LBB2_67 MOVQ DI, AX SHRQ $12, AX MOVL $12, R15 CMPQ AX, $244140625 - JB LBB2_61 + JB LBB2_67 MOVQ DI, AX SHRQ $13, AX MOVL $13, R15 CMPQ AX, $1220703125 - JB LBB2_61 + JB LBB2_67 MOVL $14, R15 MOVQ $100000000000000, AX CMPQ DI, AX - JB LBB2_61 + JB LBB2_67 MOVL $15, R15 MOVQ $1000000000000000, AX CMPQ DI, AX - JB LBB2_61 + JB LBB2_67 MOVQ $10000000000000000, AX CMPQ DI, AX MOVL $17, R15 SBBL $0, R15 -LBB2_61: - LEAL 0(R15)(R12*1), R14 - LEAL 5(R15)(R12*1), AX +LBB2_67: + LEAL 0(R15)(R13*1), R14 + LEAL 0(R15)(R13*1), AX + ADDL $5, AX CMPL AX, $27 - JB LBB2_67 - LEAQ 1(R13), BX + JB LBB2_78 + LEAQ 1(R12), BX MOVQ BX, SI MOVL R15, DX - LONG $0x0047e3e8; BYTE $0x00 // callq _print_mantissa - MOVB 1(R13), AX - MOVB AX, 0(R13) + LONG $0x004adae8; BYTE $0x00 // callq _print_mantissa + MOVB 1(R12), AX + MOVB AX, 0(R12) MOVL $1, AX CMPL R15, $2 - JB LBB2_64 + JB LBB2_70 MOVB $46, 0(BX) - INCL R15 + ADDL $1, R15 MOVL R15, AX -LBB2_64: +LBB2_70: MOVL AX, BX - MOVB $101, 0(R13)(BX*1) - INCQ BX + MOVB $101, 0(R12)(BX*1) + ADDQ $1, BX TESTL R14, R14 - JLE LBB2_69 - DECL R14 + JLE LBB2_72 + ADDL $-1, R14 MOVL -44(BP), R9 CMPL R14, $100 - JL LBB2_70 + JL LBB2_75 -LBB2_66: +LBB2_74: MOVL R14, AX MOVL $3435973837, CX IMULQ AX, CX @@ -820,293 +788,230 @@ LBB2_66: LEAL 0(CX)(CX*1), AX LEAL 0(AX)(AX*4), AX SUBL AX, R14 - LONG $0x4e058d48; WORD $0x00a1; BYTE $0x00 // leaq $41294(%rip), %rax /* _Digits(%rip) */ + LONG $0x97058d48; WORD $0x00a4; BYTE $0x00 // leaq $42135(%rip), %rax /* _Digits(%rip) */ MOVWLZX 0(AX)(CX*2), AX MOVL BX, CX - MOVW AX, 0(R13)(CX*1) + MOVW AX, 0(R12)(CX*1) ORB $48, R14 - MOVLQSX BX, AX - MOVB R14, 2(AX)(R13*1) + MOVB R14, 2(R12)(CX*1) ADDL $3, BX - JMP LBB2_107 + JMP LBB2_105 -LBB2_67: +LBB2_78: TESTL R14, R14 - JLE LBB2_72 - MOVL R12, R13 - SARL $31, R13 - ANDL R14, R13 + JLE LBB2_82 + MOVL R13, R12 + SARL $31, R12 + ANDL R14, R12 XORL BX, BX - TESTL R12, R12 - LONG $0xe3480f44 // cmovsl %ebx, %r12d - JMP LBB2_74 + TESTL R13, R13 + LONG $0xeb480f44 // cmovsl %ebx, %r13d + JMP LBB2_80 -LBB2_69: +LBB2_72: ADDL $2, AX - MOVB $45, 0(R13)(BX*1) + MOVB $45, 0(R12)(BX*1) MOVL $1, CX SUBL R14, CX MOVL CX, R14 MOVL AX, BX MOVL -44(BP), R9 CMPL R14, $100 - JGE LBB2_66 + JGE LBB2_74 -LBB2_70: +LBB2_75: CMPL R14, $10 - JL LBB2_85 + JL LBB2_77 MOVLQSX R14, AX - LONG $0xe00d8d48; WORD $0x00a0; BYTE $0x00 // leaq $41184(%rip), %rcx /* _Digits(%rip) */ + LONG $0x2e0d8d48; WORD $0x00a4; BYTE $0x00 // leaq $42030(%rip), %rcx /* _Digits(%rip) */ MOVWLZX 0(CX)(AX*2), AX MOVL BX, CX - MOVW AX, 0(R13)(CX*1) + MOVW AX, 0(R12)(CX*1) ADDL $2, BX - JMP LBB2_107 + JMP LBB2_105 -LBB2_72: - MOVW $11824, 0(R13) +LBB2_82: + MOVW $11824, 0(R12) TESTL R14, R14 - JS LBB2_86 - XORL R13, R13 - MOVL $2, BX + JS LBB2_84 XORL R12, R12 + MOVL $2, BX + XORL R13, R13 -LBB2_74: +LBB2_80: MOVL BX, SI - ADDQ -64(BP), SI + ADDQ -56(BP), SI MOVL R15, DX - LONG $0x0046dfe8; BYTE $0x00 // callq _print_mantissa - TESTL R13, R13 - JE LBB2_78 - LEAL 0(R13)(BX*1), AX - CMPL R13, R15 - JGE LBB2_80 + LONG $0x0049d9e8; BYTE $0x00 // callq _print_mantissa + TESTL R12, R12 + JE LBB2_81 + LEAL 0(R12)(BX*1), AX + CMPL R12, R15 + JGE LBB2_88 LEAL 0(BX)(R15*1), CX MOVL AX, AX - MOVQ -64(BP), R10 + MOVQ -56(BP), R10 -LBB2_77: +LBB2_90: MOVBLZX -1(R10)(CX*1), DX MOVB DX, 0(R10)(CX*1) - DECQ CX - CMPQ CX, AX - JG LBB2_77 - JMP LBB2_81 + LEAQ -1(CX), DX + MOVQ DX, CX + CMPQ DX, AX + JG LBB2_90 + JMP LBB2_91 -LBB2_78: - MOVQ -64(BP), R10 +LBB2_81: + MOVQ -56(BP), R10 ADDL R15, BX - TESTL R12, R12 - JNE LBB2_82 - JMP LBB2_106 + TESTL R13, R13 + JNE LBB2_94 -LBB2_80: +LBB2_93: + MOVL -44(BP), R9 + JMP LBB2_105 + +LBB2_88: MOVL AX, AX - MOVQ -64(BP), R10 + MOVQ -56(BP), R10 -LBB2_81: +LBB2_91: MOVB $46, 0(R10)(AX*1) ORL $1, BX ADDL R15, BX - TESTL R12, R12 - JE LBB2_106 + TESTL R13, R13 + JE LBB2_93 -LBB2_82: +LBB2_94: MOVLQSX BX, BX - LEAL -1(R12), R8 + LEAL -1(R13), R8 XORL AX, AX CMPL R8, $127 MOVL -44(BP), R9 - JB LBB2_95 - INCQ R8 + JB LBB2_103 + ADDQ $1, R8 MOVQ R8, AX ANDQ $-128, AX - LEAQ -128(AX), DX - MOVQ DX, SI + LEAQ -128(AX), CX + MOVQ CX, SI SHRQ $7, SI - INCQ SI - MOVL SI, CX - ANDL $3, CX - CMPQ DX, $384 - JAE LBB2_89 - XORL SI, SI - JMP LBB2_91 + ADDQ $1, SI + MOVL SI, DX + ANDL $3, DX + CMPQ CX, $384 + JAE LBB2_97 + XORL DI, DI + JMP LBB2_99 -LBB2_85: +LBB2_77: ADDB $48, R14 MOVL BX, AX - INCL BX - MOVB R14, 0(R13)(AX*1) - JMP LBB2_107 + ADDL $1, BX + MOVB R14, 0(R12)(AX*1) + JMP LBB2_105 -LBB2_86: +LBB2_84: MOVL $2, BX SUBL R14, BX - LEAQ -2(BX), R8 MOVL $2, AX - CMPQ R8, $128 - JB LBB2_104 - MOVQ R8, AX - ANDQ $-128, AX - LEAQ -128(AX), DX - MOVQ DX, SI - SHRQ $7, SI - INCQ SI - MOVL SI, CX - ANDL $3, CX - CMPQ DX, $384 - JAE LBB2_97 - XORL SI, SI - JMP LBB2_99 - -LBB2_89: - LEAQ 480(BX)(R10*1), DI - MOVQ CX, DX - SUBQ SI, DX - XORL SI, SI - QUAD $0xfffff66d056ffec5 // vmovdqu $-2451(%rip), %ymm0 /* LCPI2_0(%rip) */ - -LBB2_90: - QUAD $0xfffe2037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-480(%rdi,%rsi) - QUAD $0xfffe4037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-448(%rdi,%rsi) - QUAD $0xfffe6037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-416(%rdi,%rsi) - QUAD $0xfffe8037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-384(%rdi,%rsi) - QUAD $0xfffea037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-352(%rdi,%rsi) - QUAD $0xfffec037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-320(%rdi,%rsi) - QUAD $0xfffee037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-288(%rdi,%rsi) - QUAD $0xffff0037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-256(%rdi,%rsi) - QUAD $0xffff2037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-224(%rdi,%rsi) - QUAD $0xffff4037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-192(%rdi,%rsi) - QUAD $0xffff6037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-160(%rdi,%rsi) - LONG $0x447ffec5; WORD $0x8037 // vmovdqu %ymm0, $-128(%rdi,%rsi) - LONG $0x447ffec5; WORD $0xa037 // vmovdqu %ymm0, $-96(%rdi,%rsi) - LONG $0x447ffec5; WORD $0xc037 // vmovdqu %ymm0, $-64(%rdi,%rsi) - LONG $0x447ffec5; WORD $0xe037 // vmovdqu %ymm0, $-32(%rdi,%rsi) - LONG $0x047ffec5; BYTE $0x37 // vmovdqu %ymm0, (%rdi,%rsi) - ADDQ $512, SI - ADDQ $4, DX - JNE LBB2_90 - -LBB2_91: - TESTQ CX, CX - JE LBB2_94 - ADDQ BX, SI - LEAQ 96(R10)(SI*1), DX - NEGQ CX - QUAD $0xfffff5c4056ffec5 // vmovdqu $-2620(%rip), %ymm0 /* LCPI2_0(%rip) */ - -LBB2_93: - LONG $0x427ffec5; BYTE $0xa0 // vmovdqu %ymm0, $-96(%rdx) - LONG $0x427ffec5; BYTE $0xc0 // vmovdqu %ymm0, $-64(%rdx) - LONG $0x427ffec5; BYTE $0xe0 // vmovdqu %ymm0, $-32(%rdx) - LONG $0x027ffec5 // vmovdqu %ymm0, (%rdx) - SUBQ $-128, DX - INCQ CX - JNE LBB2_93 -LBB2_94: - ADDQ AX, BX - CMPQ R8, AX - JE LBB2_107 - -LBB2_95: - SUBL AX, R12 - -LBB2_96: - MOVB $48, 0(R10)(BX*1) - INCQ BX - DECL R12 - JNE LBB2_96 - JMP LBB2_107 +LBB2_85: + MOVB $48, 0(R12)(AX*1) + ADDQ $1, AX + CMPQ BX, AX + JNE LBB2_85 + ADDQ BX, R12 + MOVQ R12, SI + MOVL R15, DX + LONG $0x0048fde8; BYTE $0x00 // callq _print_mantissa + ADDL BX, R15 + MOVL R15, BX + MOVL -44(BP), R9 + JMP LBB2_105 LBB2_97: - MOVQ CX, DX - SUBQ SI, DX - XORL SI, SI - QUAD $0xfffff577056ffec5 // vmovdqu $-2697(%rip), %ymm0 /* LCPI2_0(%rip) */ + LEAQ 0(BX)(R10*1), CX + ADDQ $480, CX + ANDQ $-4, SI + NEGQ SI + XORL DI, DI + QUAD $0xfffff6c8056ffec5 // vmovdqu $-2360(%rip), %ymm0 /* LCPI2_0(%rip) */ LBB2_98: - LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x02 // vmovdqu %ymm0, $2(%r13,%rsi) - LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x22 // vmovdqu %ymm0, $34(%r13,%rsi) - LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x42 // vmovdqu %ymm0, $66(%r13,%rsi) - LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x62 // vmovdqu %ymm0, $98(%r13,%rsi) - QUAD $0x008235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $130(%r13,%rsi) - QUAD $0x00a235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $162(%r13,%rsi) - QUAD $0x00c235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $194(%r13,%rsi) - QUAD $0x00e235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $226(%r13,%rsi) - QUAD $0x010235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $258(%r13,%rsi) - QUAD $0x012235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $290(%r13,%rsi) - QUAD $0x014235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $322(%r13,%rsi) - QUAD $0x016235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $354(%r13,%rsi) - QUAD $0x018235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $386(%r13,%rsi) - QUAD $0x01a235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $418(%r13,%rsi) - QUAD $0x01c235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $450(%r13,%rsi) - QUAD $0x01e235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $482(%r13,%rsi) - ADDQ $512, SI - ADDQ $4, DX + QUAD $0xfffe2039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-480(%rcx,%rdi) + QUAD $0xfffe4039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-448(%rcx,%rdi) + QUAD $0xfffe6039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-416(%rcx,%rdi) + QUAD $0xfffe8039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-384(%rcx,%rdi) + QUAD $0xfffea039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-352(%rcx,%rdi) + QUAD $0xfffec039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-320(%rcx,%rdi) + QUAD $0xfffee039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-288(%rcx,%rdi) + QUAD $0xffff0039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-256(%rcx,%rdi) + QUAD $0xffff2039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-224(%rcx,%rdi) + QUAD $0xffff4039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-192(%rcx,%rdi) + QUAD $0xffff6039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-160(%rcx,%rdi) + LONG $0x447ffec5; WORD $0x8039 // vmovdqu %ymm0, $-128(%rcx,%rdi) + LONG $0x447ffec5; WORD $0xa039 // vmovdqu %ymm0, $-96(%rcx,%rdi) + LONG $0x447ffec5; WORD $0xc039 // vmovdqu %ymm0, $-64(%rcx,%rdi) + LONG $0x447ffec5; WORD $0xe039 // vmovdqu %ymm0, $-32(%rcx,%rdi) + LONG $0x047ffec5; BYTE $0x39 // vmovdqu %ymm0, (%rcx,%rdi) + ADDQ $512, DI + ADDQ $4, SI JNE LBB2_98 LBB2_99: - TESTQ CX, CX + TESTQ DX, DX JE LBB2_102 - NEGQ CX - QUAD $0xfffff4c2056ffec5 // vmovdqu $-2878(%rip), %ymm0 /* LCPI2_0(%rip) */ + ADDQ BX, DI + LEAQ 0(R10)(DI*1), CX + ADDQ $96, CX + SHLQ $7, DX + XORL SI, SI + QUAD $0xfffff619056ffec5 // vmovdqu $-2535(%rip), %ymm0 /* LCPI2_0(%rip) */ LBB2_101: - MOVQ SI, DX - ORQ $2, DX - LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x00 // vmovdqu %ymm0, (%r13,%rdx) - LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x20 // vmovdqu %ymm0, $32(%r13,%rdx) - LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x40 // vmovdqu %ymm0, $64(%r13,%rdx) - LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x60 // vmovdqu %ymm0, $96(%r13,%rdx) + LONG $0x447ffec5; WORD $0xa031 // vmovdqu %ymm0, $-96(%rcx,%rsi) + LONG $0x447ffec5; WORD $0xc031 // vmovdqu %ymm0, $-64(%rcx,%rsi) + LONG $0x447ffec5; WORD $0xe031 // vmovdqu %ymm0, $-32(%rcx,%rsi) + LONG $0x047ffec5; BYTE $0x31 // vmovdqu %ymm0, (%rcx,%rsi) SUBQ $-128, SI - INCQ CX + CMPQ DX, SI JNE LBB2_101 LBB2_102: + ADDQ AX, BX CMPQ R8, AX JE LBB2_105 - ORQ $2, AX + +LBB2_103: + SUBL AX, R13 LBB2_104: - MOVB $48, 0(R13)(AX*1) - INCQ AX - CMPQ BX, AX + MOVB $48, 0(R10)(BX*1) + ADDQ $1, BX + ADDL $-1, R13 JNE LBB2_104 LBB2_105: - ADDQ BX, R13 - MOVQ R13, SI - MOVL R15, DX - WORD $0xf8c5; BYTE $0x77 // vzeroupper - LONG $0x0043cde8; BYTE $0x00 // callq _print_mantissa - ADDL BX, R15 - MOVL R15, BX - -LBB2_106: - MOVL -44(BP), R9 - -LBB2_107: ADDL R9, BX - JMP LBB2_110 + JMP LBB2_106 -LBB2_108: +LBB2_1: MOVQ $9223372036854775807, CX ANDQ CX, AX - MOVB $45, 0(R13) - INCQ R13 + MOVB $45, 0(R12) + ADDQ $1, R12 MOVL $1, R9 TESTQ AX, AX - JNE LBB2_2 + JNE LBB2_4 -LBB2_109: - MOVB $48, 0(R13) - INCL R9 +LBB2_3: + MOVB $48, 0(R12) + ADDL $1, R9 MOVL R9, BX -LBB2_110: +LBB2_106: MOVL BX, AX - ADDQ $40, SP + ADDQ $56, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -1125,10 +1030,10 @@ LBB3_1: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp MOVB $45, 0(DI) - INCQ DI + ADDQ $1, DI NEGQ SI - LONG $0x000044e8; BYTE $0x00 // callq _u64toa - INCL AX + LONG $0x000045e8; BYTE $0x00 // callq _u64toa + ADDL $1, AX BYTE $0x5d // popq %rbp RET @@ -1172,7 +1077,7 @@ _u64toa: ADDQ AX, AX CMPL SI, $1000 JB LBB4_3 - LONG $0xa20d8d48; WORD $0x009c; BYTE $0x00 // leaq $40098(%rip), %rcx /* _Digits(%rip) */ + LONG $0x190d8d48; WORD $0x00a1; BYTE $0x00 // leaq $41241(%rip), %rcx /* _Digits(%rip) */ MOVB 0(DX)(CX*1), CX MOVB CX, 0(DI) MOVL $1, CX @@ -1186,26 +1091,26 @@ LBB4_3: LBB4_4: MOVWLZX DX, DX ORQ $1, DX - LONG $0x81358d48; WORD $0x009c; BYTE $0x00 // leaq $40065(%rip), %rsi /* _Digits(%rip) */ + LONG $0xf8358d48; WORD $0x00a0; BYTE $0x00 // leaq $41208(%rip), %rsi /* _Digits(%rip) */ MOVB 0(DX)(SI*1), DX MOVL CX, SI - INCL CX + ADDL $1, CX MOVB DX, 0(DI)(SI*1) LBB4_6: - LONG $0x70158d48; WORD $0x009c; BYTE $0x00 // leaq $40048(%rip), %rdx /* _Digits(%rip) */ + LONG $0xe6158d48; WORD $0x00a0; BYTE $0x00 // leaq $41190(%rip), %rdx /* _Digits(%rip) */ MOVB 0(AX)(DX*1), DX MOVL CX, SI - INCL CX + ADDL $1, CX MOVB DX, 0(DI)(SI*1) LBB4_7: MOVWLZX AX, AX ORQ $1, AX - LONG $0x58158d48; WORD $0x009c; BYTE $0x00 // leaq $40024(%rip), %rdx /* _Digits(%rip) */ + LONG $0xcd158d48; WORD $0x00a0; BYTE $0x00 // leaq $41165(%rip), %rdx /* _Digits(%rip) */ MOVB 0(AX)(DX*1), AX MOVL CX, DX - INCL CX + ADDL $1, CX MOVB AX, 0(DI)(DX*1) MOVL CX, AX BYTE $0x5d // popq %rbp @@ -1249,7 +1154,7 @@ LBB4_8: ADDQ R11, R11 CMPL SI, $10000000 JB LBB4_11 - LONG $0xc1058d48; WORD $0x009b; BYTE $0x00 // leaq $39873(%rip), %rax /* _Digits(%rip) */ + LONG $0x35058d48; WORD $0x00a0; BYTE $0x00 // leaq $41013(%rip), %rax /* _Digits(%rip) */ MOVB 0(R10)(AX*1), AX MOVB AX, 0(DI) MOVL $1, CX @@ -1263,39 +1168,39 @@ LBB4_11: LBB4_12: MOVL R10, AX ORQ $1, AX - LONG $0x9c358d48; WORD $0x009b; BYTE $0x00 // leaq $39836(%rip), %rsi /* _Digits(%rip) */ + LONG $0x10358d48; WORD $0x00a0; BYTE $0x00 // leaq $40976(%rip), %rsi /* _Digits(%rip) */ MOVB 0(AX)(SI*1), AX MOVL CX, SI - INCL CX + ADDL $1, CX MOVB AX, 0(DI)(SI*1) LBB4_14: - LONG $0x8b058d48; WORD $0x009b; BYTE $0x00 // leaq $39819(%rip), %rax /* _Digits(%rip) */ + LONG $0xfe058d48; WORD $0x009f; BYTE $0x00 // leaq $40958(%rip), %rax /* _Digits(%rip) */ MOVB 0(R9)(AX*1), AX MOVL CX, SI - INCL CX + ADDL $1, CX MOVB AX, 0(DI)(SI*1) LBB4_15: MOVWLZX R9, AX ORQ $1, AX - LONG $0x71358d48; WORD $0x009b; BYTE $0x00 // leaq $39793(%rip), %rsi /* _Digits(%rip) */ + LONG $0xe3358d48; WORD $0x009f; BYTE $0x00 // leaq $40931(%rip), %rsi /* _Digits(%rip) */ MOVB 0(AX)(SI*1), AX MOVL CX, DX - MOVB AX, 0(DX)(DI*1) + MOVB AX, 0(DI)(DX*1) MOVB 0(R8)(SI*1), AX - MOVB AX, 1(DX)(DI*1) + MOVB AX, 1(DI)(DX*1) MOVWLZX R8, AX ORQ $1, AX MOVB 0(AX)(SI*1), AX - MOVB AX, 2(DX)(DI*1) + MOVB AX, 2(DI)(DX*1) MOVB 0(R11)(SI*1), AX - MOVB AX, 3(DX)(DI*1) + MOVB AX, 3(DI)(DX*1) MOVWLZX R11, AX ORQ $1, AX MOVB 0(AX)(SI*1), AX ADDL $5, CX - MOVB AX, 4(DX)(DI*1) + MOVB AX, 4(DI)(DX*1) MOVL CX, AX BYTE $0x5d // popq %rbp RET @@ -1317,7 +1222,7 @@ LBB4_16: LONG $0xe100c269; WORD $0x05f5 // imull $100000000, %edx, %eax SUBL AX, SI LONG $0xc26ef9c5 // vmovd %edx, %xmm0 - QUAD $0xfffffdcb0d6ffac5 // vmovdqu $-565(%rip), %xmm1 /* LCPI4_0(%rip) */ + QUAD $0xfffffdc60d6ffac5 // vmovdqu $-570(%rip), %xmm1 /* LCPI4_0(%rip) */ LONG $0xd1f4f9c5 // vpmuludq %xmm1, %xmm0, %xmm2 LONG $0xd273e9c5; BYTE $0x2d // vpsrlq $45, %xmm2, %xmm2 MOVL $10000, AX @@ -1328,11 +1233,11 @@ LBB4_16: LONG $0xf073f9c5; BYTE $0x02 // vpsllq $2, %xmm0, %xmm0 LONG $0xc070fbc5; BYTE $0x50 // vpshuflw $80, %xmm0, %xmm0 LONG $0xc070f9c5; BYTE $0x50 // vpshufd $80, %xmm0, %xmm0 - QUAD $0xfffffdc51512fbc5 // vmovddup $-571(%rip), %xmm2 /* LCPI4_1(%rip) */ + QUAD $0xfffffdc01512fbc5 // vmovddup $-576(%rip), %xmm2 /* LCPI4_1(%rip) */ LONG $0xc2e4f9c5 // vpmulhuw %xmm2, %xmm0, %xmm0 - QUAD $0xfffffdc12512fbc5 // vmovddup $-575(%rip), %xmm4 /* LCPI4_2(%rip) */ + QUAD $0xfffffdbc2512fbc5 // vmovddup $-580(%rip), %xmm4 /* LCPI4_2(%rip) */ LONG $0xc4e4f9c5 // vpmulhuw %xmm4, %xmm0, %xmm0 - QUAD $0xfffffd8d2d6ffac5 // vmovdqu $-627(%rip), %xmm5 /* LCPI4_3(%rip) */ + QUAD $0xfffffd882d6ffac5 // vmovdqu $-632(%rip), %xmm5 /* LCPI4_3(%rip) */ LONG $0xf5d5f9c5 // vpmullw %xmm5, %xmm0, %xmm6 LONG $0xf673c9c5; BYTE $0x10 // vpsllq $16, %xmm6, %xmm6 LONG $0xc6f9f9c5 // vpsubw %xmm6, %xmm0, %xmm0 @@ -1351,17 +1256,17 @@ LBB4_16: LONG $0xf273e9c5; BYTE $0x10 // vpsllq $16, %xmm2, %xmm2 LONG $0xcaf9f1c5 // vpsubw %xmm2, %xmm1, %xmm1 LONG $0xc167f9c5 // vpackuswb %xmm1, %xmm0, %xmm0 - QUAD $0xfffffd470dfcf9c5 // vpaddb $-697(%rip), %xmm0, %xmm1 /* LCPI4_4(%rip) */ + QUAD $0xfffffd420dfcf9c5 // vpaddb $-702(%rip), %xmm0, %xmm1 /* LCPI4_4(%rip) */ LONG $0xd2efe9c5 // vpxor %xmm2, %xmm2, %xmm2 LONG $0xc274f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm0 LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax + NOTL AX ORL $32768, AX - XORL $-32769, AX BSFL AX, AX MOVL $16, CX SUBL AX, CX SHLQ $4, AX - LONG $0xe6158d48; WORD $0x009a; BYTE $0x00 // leaq $39654(%rip), %rdx /* _VecShiftShuffles(%rip) */ + LONG $0x5b158d48; WORD $0x009f; BYTE $0x00 // leaq $40795(%rip), %rdx /* _VecShiftShuffles(%rip) */ LONG $0x0071e2c4; WORD $0x1004 // vpshufb (%rax,%rdx), %xmm1, %xmm0 LONG $0x077ffac5 // vmovdqu %xmm0, (%rdi) MOVL CX, AX @@ -1384,16 +1289,14 @@ LBB4_18: JMP LBB4_25 LBB4_20: - CMPL DX, $99 - JA LBB4_22 - MOVL DX, AX - LONG $0xc90d8d48; WORD $0x0099; BYTE $0x00 // leaq $39369(%rip), %rcx /* _Digits(%rip) */ - MOVB 0(CX)(AX*2), DX - MOVB 1(CX)(AX*2), AX - MOVB DX, 0(DI) - MOVB AX, 1(DI) - MOVL $2, CX - JMP LBB4_25 + CMPL DX, $99 + JA LBB4_22 + MOVL DX, AX + LONG $0x3e0d8d48; WORD $0x009e; BYTE $0x00 // leaq $40510(%rip), %rcx /* _Digits(%rip) */ + MOVWLZX 0(CX)(AX*2), AX + MOVW AX, 0(DI) + MOVL $2, CX + JMP LBB4_25 LBB4_22: MOVL DX, AX @@ -1412,11 +1315,9 @@ LBB4_22: WORD $0xc96b; BYTE $0x64 // imull $100, %ecx, %ecx SUBL CX, AX MOVWLZX AX, AX - LONG $0x780d8d48; WORD $0x0099; BYTE $0x00 // leaq $39288(%rip), %rcx /* _Digits(%rip) */ - MOVB 0(CX)(AX*2), DX - MOVB 1(CX)(AX*2), AX - MOVB DX, 1(DI) - MOVB AX, 2(DI) + LONG $0xf50d8d48; WORD $0x009d; BYTE $0x00 // leaq $40437(%rip), %rcx /* _Digits(%rip) */ + MOVWLZX 0(CX)(AX*2), AX + MOVW AX, 1(DI) MOVL $3, CX JMP LBB4_25 @@ -1424,18 +1325,15 @@ LBB4_24: WORD $0xc86b; BYTE $0x64 // imull $100, %eax, %ecx SUBL CX, DX MOVWLZX AX, AX - LONG $0x55058d4c; WORD $0x0099; BYTE $0x00 // leaq $39253(%rip), %r8 /* _Digits(%rip) */ - MOVB 0(R8)(AX*2), CX - MOVB 1(R8)(AX*2), AX - MOVB CX, 0(DI) - MOVB AX, 1(DI) + LONG $0xd70d8d48; WORD $0x009d; BYTE $0x00 // leaq $40407(%rip), %rcx /* _Digits(%rip) */ + MOVWLZX 0(CX)(AX*2), AX + MOVW AX, 0(DI) MOVWLZX DX, AX - MOVB 0(R8)(AX*2), CX + MOVB 0(CX)(AX*2), DX ADDQ AX, AX - MOVB CX, 2(DI) - ORL $1, AX + MOVB DX, 2(DI) MOVWLZX AX, AX - MOVB 0(AX)(R8*1), AX + MOVB 1(AX)(CX*1), AX MOVB AX, 3(DI) MOVL $4, CX @@ -1445,7 +1343,7 @@ LBB4_25: MULQ DX SHRQ $26, DX LONG $0xc26ef9c5 // vmovd %edx, %xmm0 - QUAD $0xfffffbe60d6ffac5 // vmovdqu $-1050(%rip), %xmm1 /* LCPI4_0(%rip) */ + QUAD $0xfffffbfc0d6ffac5 // vmovdqu $-1028(%rip), %xmm1 /* LCPI4_0(%rip) */ LONG $0xd1f4f9c5 // vpmuludq %xmm1, %xmm0, %xmm2 LONG $0xd273e9c5; BYTE $0x2d // vpsrlq $45, %xmm2, %xmm2 MOVL $10000, AX @@ -1456,11 +1354,11 @@ LBB4_25: LONG $0xf073f9c5; BYTE $0x02 // vpsllq $2, %xmm0, %xmm0 LONG $0xc070fbc5; BYTE $0x50 // vpshuflw $80, %xmm0, %xmm0 LONG $0xc070f9c5; BYTE $0x50 // vpshufd $80, %xmm0, %xmm0 - QUAD $0xfffffbe01512fbc5 // vmovddup $-1056(%rip), %xmm2 /* LCPI4_1(%rip) */ + QUAD $0xfffffbf61512fbc5 // vmovddup $-1034(%rip), %xmm2 /* LCPI4_1(%rip) */ LONG $0xc2e4f9c5 // vpmulhuw %xmm2, %xmm0, %xmm0 - QUAD $0xfffffbdc2512fbc5 // vmovddup $-1060(%rip), %xmm4 /* LCPI4_2(%rip) */ + QUAD $0xfffffbf22512fbc5 // vmovddup $-1038(%rip), %xmm4 /* LCPI4_2(%rip) */ LONG $0xc4e4f9c5 // vpmulhuw %xmm4, %xmm0, %xmm0 - QUAD $0xfffffba82d6ffac5 // vmovdqu $-1112(%rip), %xmm5 /* LCPI4_3(%rip) */ + QUAD $0xfffffbbe2d6ffac5 // vmovdqu $-1090(%rip), %xmm5 /* LCPI4_3(%rip) */ LONG $0xf5d5f9c5 // vpmullw %xmm5, %xmm0, %xmm6 LONG $0xf673c9c5; BYTE $0x10 // vpsllq $16, %xmm6, %xmm6 LONG $0xc6f9f9c5 // vpsubw %xmm6, %xmm0, %xmm0 @@ -1481,7 +1379,7 @@ LBB4_25: LONG $0xf273e9c5; BYTE $0x10 // vpsllq $16, %xmm2, %xmm2 LONG $0xcaf9f1c5 // vpsubw %xmm2, %xmm1, %xmm1 LONG $0xc167f9c5 // vpackuswb %xmm1, %xmm0, %xmm0 - QUAD $0xfffffb5a05fcf9c5 // vpaddb $-1190(%rip), %xmm0, %xmm0 /* LCPI4_4(%rip) */ + QUAD $0xfffffb7005fcf9c5 // vpaddb $-1168(%rip), %xmm0, %xmm0 /* LCPI4_4(%rip) */ MOVL CX, AX LONG $0x047ffac5; BYTE $0x07 // vmovdqu %xmm0, (%rdi,%rax) ORL $16, CX @@ -1507,327 +1405,503 @@ _quote: WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx SUBQ $32, SP - MOVQ DX, R10 + MOVQ CX, R11 + MOVQ DX, R15 + MOVQ 0(CX), R10 TESTB $1, R8 - LONG $0x57058d48; WORD $0x0099; BYTE $0x00 // leaq $39255(%rip), %rax /* __SingleQuoteTab(%rip) */ - LONG $0x50158d48; WORD $0x00a9; BYTE $0x00 // leaq $43344(%rip), %rdx /* __DoubleQuoteTab(%rip) */ - LONG $0xd0440f48 // cmoveq %rax, %rdx - MOVQ R10, R8 - MOVQ DI, AX - TESTQ SI, SI - JE LBB5_57 - MOVQ 0(CX), R14 - QUAD $0xffffff8c056ffac5 // vmovdqu $-116(%rip), %xmm0 /* LCPI5_0(%rip) */ - QUAD $0xffffff940d6ffac5 // vmovdqu $-108(%rip), %xmm1 /* LCPI5_1(%rip) */ - QUAD $0xffffff9c156ffac5 // vmovdqu $-100(%rip), %xmm2 /* LCPI5_2(%rip) */ + MOVQ SI, AX + LONG $0xdb0d8d48; WORD $0x009d; BYTE $0x00 // leaq $40411(%rip), %rcx /* __SingleQuoteTab(%rip) */ + LONG $0xd4258d4c; WORD $0x00ad; BYTE $0x00 // leaq $44500(%rip), %r12 /* __DoubleQuoteTab(%rip) */ + LONG $0xe1440f4c // cmoveq %rcx, %r12 + LEAQ 0(SI*8), CX + CMPQ R10, CX + JGE LBB5_56 + MOVQ R15, R14 + MOVQ DI, R9 + TESTQ AX, AX + JE LBB5_80 + MOVQ R11, -56(BP) + QUAD $0xffffff71056ffac5 // vmovdqu $-143(%rip), %xmm0 /* LCPI5_0(%rip) */ + QUAD $0xffffff790d6ffac5 // vmovdqu $-135(%rip), %xmm1 /* LCPI5_1(%rip) */ + QUAD $0xffffff81156ffac5 // vmovdqu $-127(%rip), %xmm2 /* LCPI5_2(%rip) */ LONG $0xdb76e1c5 // vpcmpeqd %xmm3, %xmm3, %xmm3 - MOVQ DI, AX - MOVQ R10, R8 - MOVQ R10, -56(BP) - MOVQ CX, -48(BP) - -LBB5_2: - CMPQ SI, $15 - SETGT R9 - MOVQ R14, R11 - MOVQ R8, R12 - MOVQ SI, R10 - MOVQ AX, R15 - CMPQ R14, $16 - JL LBB5_9 - CMPQ SI, $16 - JL LBB5_9 - MOVQ AX, R15 - MOVQ SI, R10 - MOVQ R8, R12 - MOVQ R14, R13 + MOVQ DI, CX + MOVQ R15, -48(BP) + MOVQ R15, R14 + MOVQ R12, -64(BP) + +LBB5_3: + MOVQ CX, R9 + CMPQ AX, $16 + SETGE BX + MOVQ R10, R12 + MOVQ R14, R15 + MOVQ AX, R11 + MOVQ CX, R13 + JL LBB5_10 + CMPQ R10, $16 + JL LBB5_10 + XORL R15, R15 + MOVQ AX, CX + MOVQ R10, DX -LBB5_5: - LONG $0x6f7ac1c4; BYTE $0x27 // vmovdqu (%r15), %xmm4 +LBB5_6: + LONG $0x6f7a81c4; WORD $0x3924 // vmovdqu (%r9,%r15), %xmm4 LONG $0xec64f9c5 // vpcmpgtb %xmm4, %xmm0, %xmm5 LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 LONG $0xfa74d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm7 LONG $0xf6ebc1c5 // vpor %xmm6, %xmm7, %xmm6 - LONG $0x7f7ac1c4; WORD $0x2424 // vmovdqu %xmm4, (%r12) + LONG $0x7f7a81c4; WORD $0x3e24 // vmovdqu %xmm4, (%r14,%r15) LONG $0xe364d9c5 // vpcmpgtb %xmm3, %xmm4, %xmm4 - LONG $0xe5dbd9c5 // vpand %xmm5, %xmm4, %xmm4 + LONG $0xe4dbd1c5 // vpand %xmm4, %xmm5, %xmm4 LONG $0xe4ebc9c5 // vpor %xmm4, %xmm6, %xmm4 - LONG $0xccd7f9c5 // vpmovmskb %xmm4, %ecx - TESTW CX, CX - JNE LBB5_6 + LONG $0xf4d7f9c5 // vpmovmskb %xmm4, %esi + TESTL SI, SI + JNE LBB5_23 + LEAQ -16(CX), R11 + LEAQ -16(DX), R12 ADDQ $16, R15 - ADDQ $16, R12 - LEAQ -16(R13), R11 - CMPQ R10, $31 - SETGT R9 - CMPQ R10, $32 - LEAQ -16(R10), R10 + CMPQ CX, $32 + SETGE BX JL LBB5_9 - CMPQ R13, $31 - MOVQ R11, R13 - JG LBB5_5 + MOVQ R11, CX + CMPQ DX, $31 + MOVQ R12, DX + JG LBB5_6 LBB5_9: - TESTB R9, R9 - JE LBB5_10 - LONG $0x6f7ac1c4; BYTE $0x27 // vmovdqu (%r15), %xmm4 - LONG $0xec64f9c5 // vpcmpgtb %xmm4, %xmm0, %xmm5 - LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 - LONG $0xfa74d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm7 - LONG $0xf6ebc1c5 // vpor %xmm6, %xmm7, %xmm6 - LONG $0xfb64d9c5 // vpcmpgtb %xmm3, %xmm4, %xmm7 - LONG $0xeddbc1c5 // vpand %xmm5, %xmm7, %xmm5 - LONG $0xedebc9c5 // vpor %xmm5, %xmm6, %xmm5 - LONG $0xcdd7f9c5 // vpmovmskb %xmm5, %ecx - ORL $65536, CX - BSFL CX, R9 - LONG $0x7ef9e1c4; BYTE $0xe1 // vmovq %xmm4, %rcx - CMPQ R11, R9 - JGE LBB5_13 - CMPQ R11, $8 - MOVQ SI, -72(BP) - JB LBB5_25 - MOVQ CX, 0(R12) - LEAQ 8(R15), R9 - ADDQ $8, R12 - LEAQ -8(R11), BX - JMP LBB5_27 + LEAQ 0(R9)(R15*1), R13 + ADDQ R14, R15 LBB5_10: + TESTB BX, BX + JE LBB5_14 + LONG $0x6f7ac1c4; WORD $0x0065 // vmovdqu (%r13), %xmm4 + LONG $0xec64f9c5 // vpcmpgtb %xmm4, %xmm0, %xmm5 + LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 + LONG $0xfa74d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm7 + LONG $0xf6ebc1c5 // vpor %xmm6, %xmm7, %xmm6 + LONG $0xfb64d9c5 // vpcmpgtb %xmm3, %xmm4, %xmm7 + LONG $0xefdbd1c5 // vpand %xmm7, %xmm5, %xmm5 + LONG $0xedebc9c5 // vpor %xmm5, %xmm6, %xmm5 + LONG $0xcdd7f9c5 // vpmovmskb %xmm5, %ecx + ORL $65536, CX + BSFL CX, R11 + LONG $0x7ef9e1c4; BYTE $0xe1 // vmovq %xmm4, %rcx + CMPQ R12, R11 + JGE LBB5_24 + CMPQ R12, $8 + JB LBB5_28 + MOVQ CX, 0(R15) + LEAQ 8(R13), R11 + ADDQ $8, R15 + LEAQ -8(R12), BX + CMPQ BX, $4 + JAE LBB5_29 + JMP LBB5_30 + +LBB5_14: TESTQ R11, R11 - MOVQ DI, -64(BP) - JLE LBB5_11 - TESTQ R10, R10 - MOVQ -48(BP), CX - MOVQ -56(BP), DI - LONG $0x052d8d4c; WORD $0x0098; BYTE $0x00 // leaq $38917(%rip), %r13 /* __SingleQuoteTab(%rip) */ - JLE LBB5_39 + JLE LBB5_21 + TESTQ R12, R12 + JLE LBB5_21 + MOVQ R9, -72(BP) + XORL R9, R9 + XORL CX, CX -LBB5_35: - MOVBLZX 0(R15), R9 - MOVQ R9, BX +LBB5_17: + MOVBLZX 0(R13)(R9*1), SI + MOVQ SI, BX SHLQ $4, BX - CMPQ 0(BX)(R13*1), $0 - JNE LBB5_36 - INCQ R15 - MOVB R9, 0(R12) - CMPQ R10, $2 - LEAQ -1(R10), R10 - JL LBB5_39 - INCQ R12 - CMPQ R11, $1 - LEAQ -1(R11), R11 - JG LBB5_35 - JMP LBB5_39 + LONG $0x6d158d48; WORD $0x009c; BYTE $0x00 // leaq $40045(%rip), %rdx /* __SingleQuoteTab(%rip) */ + CMPQ 0(BX)(DX*1), $0 + JNE LBB5_27 + LEAQ 0(R11)(CX*1), R8 + MOVB SI, 0(R15)(R9*1) + LEAQ -1(CX), BX + CMPQ R8, $2 + JL LBB5_20 + ADDQ R12, CX + ADDQ $1, R9 + CMPQ CX, $1 + MOVQ BX, CX + JG LBB5_17 + +LBB5_20: + SUBQ BX, R13 + ADDQ BX, R11 + MOVQ -72(BP), R9 -LBB5_6: - MOVWLZX CX, CX - SUBQ AX, R15 - BSFL CX, R11 - ADDQ R15, R11 - JMP LBB5_23 - -LBB5_13: - CMPL R9, $8 - JB LBB5_14 - MOVQ CX, 0(R12) - LEAQ 8(R15), R13 - ADDQ $8, R12 - LEAQ -8(R9), BX - CMPQ BX, $4 - JAE LBB5_17 - JMP LBB5_18 +LBB5_21: + TESTQ R11, R11 + MOVQ -64(BP), R12 + JE LBB5_26 + NOTQ R13 + ADDQ R9, R13 + JMP LBB5_41 -LBB5_36: - SUBQ AX, R15 - MOVQ R15, R11 - JMP LBB5_40 +LBB5_23: + BSFW SI, CX + MOVWLZX CX, R13 + ADDQ R15, R13 + JMP LBB5_40 + +LBB5_24: + CMPL R11, $8 + JB LBB5_34 + MOVQ CX, 0(R15) + LEAQ 8(R13), R12 + ADDQ $8, R15 + LEAQ -8(R11), BX + CMPQ BX, $4 + JAE LBB5_35 + JMP LBB5_36 -LBB5_25: - MOVQ R15, R9 - MOVQ R11, BX +LBB5_26: + SUBQ R9, R13 + JMP LBB5_41 LBB5_27: - MOVQ -48(BP), CX - MOVQ -56(BP), R10 - MOVQ $12884901889, R13 + MOVQ -72(BP), R9 + SUBQ R9, R13 + SUBQ CX, R13 + JMP LBB5_40 + +LBB5_28: + MOVQ R13, R11 + MOVQ R12, BX CMPQ BX, $4 - JAE LBB5_28 - CMPQ BX, $2 - JAE LBB5_30 + JB LBB5_30 + +LBB5_29: + MOVL 0(R11), CX + MOVL CX, 0(R15) + ADDQ $4, R11 + ADDQ $4, R15 + ADDQ $-4, BX + +LBB5_30: + CMPQ BX, $2 + JB LBB5_31 + MOVWLZX 0(R11), CX + MOVW CX, 0(R15) + ADDQ $2, R11 + ADDQ $2, R15 + ADDQ $-2, BX + TESTQ BX, BX + JNE LBB5_32 + JMP LBB5_33 LBB5_31: TESTQ BX, BX JE LBB5_33 LBB5_32: - MOVB 0(R9), BX - MOVB BX, 0(R12) + MOVB 0(R11), CX + MOVB CX, 0(R15) LBB5_33: - SUBQ AX, R11 - ADDQ R15, R11 - NOTQ R11 - MOVQ -72(BP), SI - JMP LBB5_41 - -LBB5_11: - MOVQ -48(BP), CX - MOVQ -56(BP), DI - -LBB5_39: - SUBQ AX, R15 - NEGQ R10 - SBBQ R11, R11 - XORQ R15, R11 - -LBB5_40: - MOVQ $12884901889, R13 - MOVQ DI, R10 - MOVQ -64(BP), DI - JMP LBB5_41 + ADDQ R13, R12 + NOTQ R12 + ADDQ R9, R12 + MOVQ R12, R13 + JMP LBB5_40 -LBB5_14: - MOVQ R15, R13 - MOVQ R9, BX +LBB5_34: + MOVQ R13, R12 + MOVQ R11, BX CMPQ BX, $4 - JB LBB5_18 + JB LBB5_36 -LBB5_17: - MOVL 0(R13), CX - MOVL CX, 0(R12) - ADDQ $4, R13 +LBB5_35: + MOVL 0(R12), CX + MOVL CX, 0(R15) ADDQ $4, R12 + ADDQ $4, R15 ADDQ $-4, BX -LBB5_18: - CMPQ BX, $2 - JAE LBB5_19 +LBB5_36: + CMPQ BX, $2 + JB LBB5_37 + MOVWLZX 0(R12), CX + MOVW CX, 0(R15) + ADDQ $2, R12 + ADDQ $2, R15 + ADDQ $-2, BX + TESTQ BX, BX + JNE LBB5_38 + JMP LBB5_39 + +LBB5_37: TESTQ BX, BX - JE LBB5_22 + JE LBB5_39 -LBB5_21: - MOVB 0(R13), CX - MOVB CX, 0(R12) +LBB5_38: + MOVB 0(R12), CX + MOVB CX, 0(R15) -LBB5_22: - SUBQ AX, R15 - ADDQ R9, R15 - MOVQ R15, R11 +LBB5_39: + SUBQ R9, R13 + ADDQ R11, R13 -LBB5_23: - MOVQ -48(BP), CX - MOVQ -56(BP), R10 - MOVQ $12884901889, R13 +LBB5_40: + MOVQ -64(BP), R12 LBB5_41: - TESTQ R11, R11 - JS LBB5_42 - ADDQ R11, AX - ADDQ R11, R8 - CMPQ SI, R11 - JE LBB5_57 - SUBQ R11, R14 - SUBQ SI, R11 + MOVQ $12884901889, R11 + TESTQ R13, R13 + JS LBB5_83 + ADDQ R13, R14 + CMPQ AX, R13 + JE LBB5_79 + SUBQ R13, R10 JMP LBB5_45 -LBB5_55: - INCQ AX - ADDQ R12, R8 - INCQ R11 - JE LBB5_57 +LBB5_44: + ADDQ SI, R14 + ADDQ $1, R13 + CMPQ AX, R13 + JE LBB5_79 LBB5_45: - MOVBLZX 0(AX), BX - SHLQ $4, BX - MOVQ 0(DX)(BX*1), SI - TESTL SI, SI - JE LBB5_56 - MOVLQSX SI, R12 - SUBQ R12, R14 - JL LBB5_47 - SHLQ $32, SI - LEAQ 8(DX)(BX*1), R15 - CMPQ SI, R13 + MOVBLZX 0(R9)(R13*1), CX + SHLQ $4, CX + MOVQ 0(R12)(CX*1), DX + TESTL DX, DX + JE LBB5_53 + MOVLQSX DX, SI + SUBQ SI, R10 + JL LBB5_81 + SHLQ $32, DX + LEAQ 0(R12)(CX*1), BX + ADDQ $8, BX + CMPQ DX, R11 JL LBB5_49 - MOVL 0(R15), SI - MOVL SI, 0(R8) - LEAQ 12(DX)(BX*1), R15 - LEAQ 4(R8), R9 - LEAQ -4(R12), BX - CMPQ BX, $2 - JGE LBB5_52 - JMP LBB5_53 + MOVL 0(BX), DX + MOVL DX, 0(R14) + LEAQ 0(R12)(CX*1), BX + ADDQ $12, BX + LEAQ 4(R14), R8 + LEAQ -4(SI), CX + CMPQ CX, $2 + JGE LBB5_50 + JMP LBB5_51 LBB5_49: - MOVQ R8, R9 - MOVQ R12, BX - CMPQ BX, $2 - JL LBB5_53 + MOVQ R14, R8 + MOVQ SI, CX + CMPQ CX, $2 + JL LBB5_51 -LBB5_52: - MOVWLZX 0(R15), SI - MOVW SI, 0(R9) - ADDQ $2, R15 - ADDQ $2, R9 - ADDQ $-2, BX +LBB5_50: + MOVWLZX 0(BX), DX + MOVW DX, 0(R8) + ADDQ $2, BX + ADDQ $2, R8 + ADDQ $-2, CX + +LBB5_51: + TESTQ CX, CX + JLE LBB5_44 + MOVBLZX 0(BX), CX + MOVB CX, 0(R8) + JMP LBB5_44 LBB5_53: - TESTQ BX, BX - JLE LBB5_55 - MOVBLZX 0(R15), BX - MOVB BX, 0(R9) - JMP LBB5_55 + LEAQ 0(R9)(R13*1), CX + SUBQ R13, AX + JNE LBB5_3 + +LBB5_79: + ADDQ R13, R9 + MOVQ -56(BP), R11 + MOVQ -48(BP), R15 + +LBB5_80: + SUBQ R15, R14 + MOVQ R14, 0(R11) + SUBQ DI, R9 + MOVQ R9, AX + JMP LBB5_82 LBB5_56: - MOVQ R11, SI - NEGQ SI - TESTQ R11, R11 - JNE LBB5_2 - JMP LBB5_57 + LONG $0x460d8d4c; WORD $0x00ba; BYTE $0x00 // leaq $47686(%rip), %r9 /* __EscTab(%rip) */ + QUAD $0xfffffc0b056ffac5 // vmovdqu $-1013(%rip), %xmm0 /* LCPI5_0(%rip) */ + QUAD $0xfffffc130d6ffac5 // vmovdqu $-1005(%rip), %xmm1 /* LCPI5_1(%rip) */ + QUAD $0xfffffc1b156ffac5 // vmovdqu $-997(%rip), %xmm2 /* LCPI5_2(%rip) */ + LONG $0xdb76e1c5 // vpcmpeqd %xmm3, %xmm3, %xmm3 + MOVQ R15, BX + MOVQ AX, SI -LBB5_28: - MOVL 0(R9), SI - MOVL SI, 0(R12) - ADDQ $4, R9 - ADDQ $4, R12 - ADDQ $-4, BX - CMPQ BX, $2 - JB LBB5_31 +LBB5_57: + CMPQ SI, $16 + JL LBB5_62 + MOVQ SI, R10 + NEGQ R10 + ADDQ $16, SI + +LBB5_59: + LONG $0x276ffac5 // vmovdqu (%rdi), %xmm4 + LONG $0xec64f9c5 // vpcmpgtb %xmm4, %xmm0, %xmm5 + LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 + LONG $0xfa74d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm7 + LONG $0xf6ebc1c5 // vpor %xmm6, %xmm7, %xmm6 + LONG $0x237ffac5 // vmovdqu %xmm4, (%rbx) + LONG $0xe364d9c5 // vpcmpgtb %xmm3, %xmm4, %xmm4 + LONG $0xe4dbd1c5 // vpand %xmm4, %xmm5, %xmm4 + LONG $0xe4ebc9c5 // vpor %xmm4, %xmm6, %xmm4 + LONG $0xccd7f9c5 // vpmovmskb %xmm4, %ecx + TESTL CX, CX + JNE LBB5_72 + ADDQ $16, DI + ADDQ $16, BX + ADDQ $16, R10 + ADDQ $-16, SI + CMPQ SI, $31 + JG LBB5_59 + NEGQ R10 + MOVQ R10, SI -LBB5_30: - MOVWLZX 0(R9), SI - MOVW SI, 0(R12) - ADDQ $2, R9 - ADDQ $2, R12 - ADDQ $-2, BX - TESTQ BX, BX - JNE LBB5_32 - JMP LBB5_33 +LBB5_62: + CMPQ SI, $8 + JL LBB5_66 + MOVBLZX 0(DI), CX + MOVBLZX 1(DI), DX + MOVB 0(DX)(R9*1), DX + ADDB DX, DX + ORB 0(CX)(R9*1), DX + MOVBLZX 2(DI), CX + MOVB 0(CX)(R9*1), CX + SHLB $2, CX + ORB DX, CX + MOVBLZX 3(DI), DX + MOVB 0(DX)(R9*1), DX + SHLB $3, DX + ORB CX, DX + MOVQ 0(DI), CX + MOVQ CX, 0(BX) + JNE LBB5_76 + MOVBLZX 4(DI), CX + MOVBLZX 5(DI), DX + MOVB 0(DX)(R9*1), DX + ADDB DX, DX + ORB 0(CX)(R9*1), DX + MOVBLZX 6(DI), CX + MOVB 0(CX)(R9*1), CX + SHLB $2, CX + ORB DX, CX + MOVBLZX 7(DI), DX + MOVB 0(DX)(R9*1), DX + SHLB $3, DX + ORB CX, DX + JNE LBB5_77 + ADDQ $8, BX + ADDQ $8, DI + ADDQ $-8, SI + +LBB5_66: + CMPQ SI, $4 + JL LBB5_69 + MOVBLZX 0(DI), CX + MOVBLZX 1(DI), DX + MOVB 0(DX)(R9*1), DX + ADDB DX, DX + ORB 0(CX)(R9*1), DX + MOVBLZX 2(DI), CX + MOVB 0(CX)(R9*1), CX + SHLB $2, CX + ORB DX, CX + MOVBLZX 3(DI), DX + MOVB 0(DX)(R9*1), DX + SHLB $3, DX + ORB CX, DX + MOVL 0(DI), CX + MOVL CX, 0(BX) + JNE LBB5_76 + ADDQ $4, BX + ADDQ $4, DI + ADDQ $-4, SI + +LBB5_69: + TESTQ SI, SI + JLE LBB5_78 + +LBB5_70: + MOVBLZX 0(DI), CX + CMPB 0(CX)(R9*1), $0 + JNE LBB5_73 + ADDQ $1, DI + MOVB CX, 0(BX) + ADDQ $1, BX + LEAQ -1(SI), CX + CMPQ SI, $1 + MOVQ CX, SI + JG LBB5_70 + JMP LBB5_78 -LBB5_19: - MOVWLZX 0(R13), CX - MOVW CX, 0(R12) - ADDQ $2, R13 - ADDQ $2, R12 - ADDQ $-2, BX - TESTQ BX, BX - JNE LBB5_21 - JMP LBB5_22 +LBB5_72: + BSFW CX, CX + MOVWLZX CX, CX + ADDQ CX, DI + ADDQ CX, R10 + NEGQ R10 + ADDQ CX, BX + MOVQ R10, SI -LBB5_57: - SUBQ R10, R8 - MOVQ R8, 0(CX) - SUBQ DI, AX - JMP LBB5_58 +LBB5_73: + MOVB 0(DI), CX -LBB5_47: - SUBQ R10, R8 - MOVQ R8, 0(CX) - NOTQ AX - ADDQ DI, AX +LBB5_74: + MOVQ BX, DX + MOVBLZX CX, CX + SHLQ $4, CX + MOVLQSX 0(R12)(CX*1), BX + MOVQ 8(R12)(CX*1), CX + MOVQ CX, 0(DX) + ADDQ DX, BX + CMPQ SI, $2 + JL LBB5_78 + MOVBLZX 1(DI), CX + ADDQ $1, DI + ADDQ $-1, SI + CMPB 0(CX)(R9*1), $0 + JNE LBB5_74 + JMP LBB5_57 + +LBB5_76: + MOVBLZX DX, CX + BSFL CX, CX + ADDQ CX, DI + SUBQ CX, SI + ADDQ CX, BX + JMP LBB5_73 -LBB5_58: +LBB5_77: + MOVBLZX DX, CX + BSFL CX, CX + LEAQ 4(CX), DX + ADDQ CX, DI + ADDQ $4, DI + SUBQ DX, SI + ADDQ CX, BX + ADDQ $4, BX + JMP LBB5_73 + +LBB5_78: + SUBQ R15, BX + MOVQ BX, 0(R11) + JMP LBB5_82 + +LBB5_81: + SUBQ -48(BP), R14 + MOVQ -56(BP), AX + MOVQ R14, 0(AX) + SUBQ R9, DI + NOTQ R13 + ADDQ DI, R13 + MOVQ R13, AX + +LBB5_82: ADDQ $32, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 @@ -1837,483 +1911,661 @@ LBB5_58: BYTE $0x5d // popq %rbp RET -LBB5_42: - SUBQ R10, R8 - NOTQ R11 - ADDQ R11, R8 - MOVQ R8, 0(CX) - SUBQ DI, AX - ADDQ R11, AX - NOTQ AX - JMP LBB5_58 +LBB5_83: + MOVQ -48(BP), CX + ADDQ R13, CX + NOTQ CX + ADDQ R14, CX + MOVQ -56(BP), AX + MOVQ CX, 0(AX) + SUBQ R9, DI + ADDQ R13, DI + MOVQ DI, AX + JMP LBB5_82 LCPI6_0: QUAD $0x5c5c5c5c5c5c5c5c; QUAD $0x5c5c5c5c5c5c5c5c // .space 16, '\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' _unquote: - BYTE $0x55 // pushq %rbp - WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - WORD $0x5741 // pushq %r15 - WORD $0x5641 // pushq %r14 - WORD $0x5541 // pushq %r13 - WORD $0x5441 // pushq %r12 - BYTE $0x53 // pushq %rbx - SUBQ $40, SP + BYTE $0x55 // pushq %rbp + WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + WORD $0x5741 // pushq %r15 + WORD $0x5641 // pushq %r14 + WORD $0x5541 // pushq %r13 + WORD $0x5441 // pushq %r12 + BYTE $0x53 // pushq %rbx + SUBQ $24, SP TESTQ SI, SI - JE LBB6_82 - MOVQ SI, R11 - MOVQ CX, -56(BP) - MOVQ R8, AX - MOVQ R8, -72(BP) - MOVL R8, R10 - ANDL $1, R10 - LONG $0x26058d4c; WORD $0x00b5; BYTE $0x00 // leaq $46374(%rip), %r8 /* __UnquoteTab(%rip) */ - QUAD $0xffffffb2056ffac5 // vmovdqu $-78(%rip), %xmm0 /* LCPI6_0(%rip) */ - MOVQ DI, R9 - MOVQ SI, R13 + JE LBB6_118 + MOVL R8, R9 + ANDL $1, R9 + QUAD $0xffffffc7056ffac5 // vmovdqu $-57(%rip), %xmm0 /* LCPI6_0(%rip) */ + MOVQ DI, R11 + MOVQ SI, R15 MOVQ DX, AX LBB6_2: - CMPB 0(R9), $92 + CMPB 0(R11), $92 JNE LBB6_4 - XORL SI, SI - JMP LBB6_13 + XORL R14, R14 + JMP LBB6_15 LBB6_4: - MOVQ R13, R15 - MOVQ AX, SI - MOVQ R9, R14 - CMPQ R13, $16 - JL LBB6_7 - -LBB6_5: - LONG $0x6f7ac1c4; BYTE $0x0e // vmovdqu (%r14), %xmm1 - LONG $0x0e7ffac5 // vmovdqu %xmm1, (%rsi) - LONG $0xc874f1c5 // vpcmpeqb %xmm0, %xmm1, %xmm1 - LONG $0xd9d7f9c5 // vpmovmskb %xmm1, %ebx - TESTW BX, BX - JNE LBB6_12 - ADDQ $16, R14 - ADDQ $16, SI - CMPQ R15, $31 - LEAQ -16(R15), R15 - JG LBB6_5 - -LBB6_7: - TESTQ R15, R15 - JE LBB6_83 - XORL BX, BX + MOVQ R15, R12 + MOVQ AX, R13 + MOVQ R11, R14 + CMPQ R15, $16 + JL LBB6_9 + XORL R13, R13 + MOVQ R15, R14 + +LBB6_6: + LONG $0x6f7a81c4; WORD $0x2b0c // vmovdqu (%r11,%r13), %xmm1 + LONG $0x7f7aa1c4; WORD $0x280c // vmovdqu %xmm1, (%rax,%r13) + LONG $0xc874f1c5 // vpcmpeqb %xmm0, %xmm1, %xmm1 + LONG $0xd9d7f9c5 // vpmovmskb %xmm1, %ebx + TESTL BX, BX + JNE LBB6_14 + LEAQ -16(R14), R12 + ADDQ $16, R13 + CMPQ R14, $31 + MOVQ R12, R14 + JG LBB6_6 + LEAQ 0(R11)(R13*1), R14 + ADDQ AX, R13 LBB6_9: - MOVBLZX 0(R14)(BX*1), CX - CMPB CX, $92 - JE LBB6_11 - MOVB CX, 0(SI)(BX*1) - INCQ BX - CMPQ R15, BX - JNE LBB6_9 - JMP LBB6_83 + TESTQ R12, R12 + JE LBB6_119 + XORL BX, BX LBB6_11: - ADDQ BX, R14 - SUBQ R9, R14 - MOVQ R14, SI - CMPQ SI, $-1 - JNE LBB6_13 - JMP LBB6_83 - -LBB6_12: - MOVWLZX BX, CX - SUBQ R9, R14 - BSFQ CX, SI - ADDQ R14, SI - CMPQ SI, $-1 - JE LBB6_83 + MOVBLZX 0(R14)(BX*1), R10 + CMPB R10, $92 + JE LBB6_13 + MOVB R10, 0(R13)(BX*1) + ADDQ $1, BX + CMPQ R12, BX + JNE LBB6_11 + JMP LBB6_119 LBB6_13: - LEAQ 2(SI), CX - SUBQ CX, R13 - JS LBB6_94 - LEAQ 2(R9)(SI*1), R9 - TESTQ R10, R10 - JNE LBB6_58 + SUBQ R11, R14 + ADDQ BX, R14 + CMPQ R14, $-1 + JNE LBB6_15 + JMP LBB6_119 + +LBB6_14: + BSFW BX, BX + MOVWLZX BX, R14 + ADDQ R13, R14 + CMPQ R14, $-1 + JE LBB6_119 LBB6_15: - ADDQ SI, AX - MOVBLZX -1(R9), CX - MOVB 0(CX)(R8*1), CX - CMPB CX, $-1 - JE LBB6_18 - TESTB CX, CX - JE LBB6_95 - MOVB CX, 0(AX) - INCQ AX - JMP LBB6_57 - -LBB6_18: - CMPQ R13, $3 - JLE LBB6_94 - MOVL 0(R9), SI - MOVL SI, CX - NOTL CX - LEAL -808464432(SI), BX - ANDL $-2139062144, CX - TESTL BX, CX - JNE LBB6_85 - LEAL 421075225(SI), BX - ORL SI, BX + LEAQ 2(R14), BX + SUBQ BX, R15 + JS LBB6_155 + ADDQ R14, R11 + ADDQ $2, R11 + TESTQ R9, R9 + JNE LBB6_42 + +LBB6_17: + ADDQ R14, AX + MOVBLZX -1(R11), BX + LONG $0x00158d4c; WORD $0x00b8; BYTE $0x00 // leaq $47104(%rip), %r10 /* __UnquoteTab(%rip) */ + MOVB 0(BX)(R10*1), BX + CMPB BX, $-1 + JE LBB6_20 + TESTB BX, BX + JE LBB6_134 + MOVB BX, 0(AX) + ADDQ $1, AX + JMP LBB6_72 + +LBB6_20: + CMPQ R15, $3 + JLE LBB6_155 + MOVL 0(R11), R14 + MOVL R14, R12 + NOTL R12 + LEAL -808464432(R14), BX + ANDL $-2139062144, R12 + TESTL BX, R12 + JNE LBB6_121 + LEAL 421075225(R14), BX + ORL R14, BX TESTL $-2139062144, BX - JNE LBB6_85 - MOVL SI, BX + JNE LBB6_121 + MOVL R14, BX ANDL $2139062143, BX - MOVL $-1061109568, R14 - SUBL BX, R14 - LEAL 1179010630(BX), R15 - ANDL CX, R14 - TESTL R15, R14 - JNE LBB6_85 - MOVL $-522133280, R14 - SUBL BX, R14 + MOVL $-1061109568, R10 + SUBL BX, R10 + LEAL 1179010630(BX), R13 + ANDL R12, R10 + TESTL R13, R10 + JNE LBB6_121 + MOVL $-522133280, R10 + SUBL BX, R10 ADDL $960051513, BX - ANDL R14, CX - TESTL BX, CX - JNE LBB6_85 - BSWAPL SI - MOVL SI, CX - SHRL $4, CX - NOTL CX - ANDL $16843009, CX - LEAL 0(CX)(CX*8), CX - ANDL $252645135, SI - ADDL CX, SI - MOVL SI, CX - SHRL $4, CX - ORL SI, CX - MOVBLZX CX, R15 - SHRL $8, CX - ANDL $65280, CX - ORL CX, R15 - LEAQ -4(R13), R14 - CMPL R15, $128 - JB LBB6_66 + ANDL R10, R12 + TESTL BX, R12 + JNE LBB6_121 + BSWAPL R14 + MOVL R14, BX + SHRL $4, BX + NOTL BX + ANDL $16843009, BX + LEAL 0(BX)(BX*8), BX + ANDL $252645135, R14 + ADDL BX, R14 + MOVL R14, R13 + SHRL $4, R13 + ORL R14, R13 + MOVL R13, BX + SHRL $8, BX + ANDL $65280, BX + MOVBLZX R13, R14 + ORL BX, R14 + LEAQ 4(R11), R12 + LEAQ -4(R15), R10 + CMPL R14, $128 + JB LBB6_50 + TESTQ R9, R9 + JNE LBB6_52 + TESTB $2, R8 + JE LBB6_75 XORL R12, R12 - TESTQ R10, R10 - JE LBB6_40 - -LBB6_25: - CMPL R15, $2048 - JB LBB6_68 - MOVL R15, CX - ANDL $-2048, CX - CMPL CX, $55296 - JNE LBB6_54 - TESTQ R14, R14 - JLE LBB6_72 - CMPB 4(R9)(R12*1), $92 - JNE LBB6_73 - CMPL R15, $56319 - JA LBB6_70 - CMPQ R14, $7 - JL LBB6_70 - CMPB 5(R9)(R12*1), $92 - JNE LBB6_70 - CMPB 6(R9)(R12*1), $117 - JNE LBB6_70 - MOVL 7(R9)(R12*1), SI - MOVL SI, CX - NOTL CX - LEAL -808464432(SI), BX - ANDL $-2139062144, CX - TESTL BX, CX - JNE LBB6_99 - LEAL 421075225(SI), BX - ORL SI, BX - TESTL $-2139062144, BX - JNE LBB6_99 - MOVL SI, BX - ANDL $2139062143, BX - MOVL $-1061109568, -44(BP) - SUBL BX, -44(BP) - MOVQ SI, -80(BP) - LEAL 1179010630(BX), SI - MOVL SI, -60(BP) - ANDL CX, -44(BP) - MOVL -60(BP), SI - TESTL SI, -44(BP) - MOVQ -80(BP), SI - JNE LBB6_99 - MOVL $-522133280, -44(BP) - SUBL BX, -44(BP) - ADDL $960051513, BX - ANDL -44(BP), CX - TESTL BX, CX - JNE LBB6_99 - BSWAPL SI - MOVL SI, CX - SHRL $4, CX - NOTL CX - ANDL $16843009, CX - LEAL 0(CX)(CX*8), CX - ANDL $252645135, SI - ADDL CX, SI - MOVL SI, CX - SHRL $4, CX - ORL SI, CX - MOVL CX, SI - SHRL $8, SI - ANDL $65280, SI - MOVBLZX CX, BX - ORL SI, BX - ANDL $16515072, CX - CMPL CX, $14417920 - JE LBB6_77 - TESTB $2, -72(BP) - JE LBB6_114 - ADDQ $-7, R14 - MOVW $-16401, 0(AX) - MOVB $-67, 2(AX) - ADDQ $3, AX - ADDQ $7, R12 - MOVL BX, R15 - CMPL BX, $127 - JA LBB6_25 - JMP LBB6_53 - -LBB6_40: - CMPL R15, $2048 - JB LBB6_68 - MOVL R15, CX - ANDL $-2048, CX - CMPL CX, $55296 - JNE LBB6_54 - CMPL R15, $56319 - JA LBB6_69 - CMPQ R14, $6 - JL LBB6_69 - CMPB 4(R9)(R12*1), $92 - JNE LBB6_69 - CMPB 5(R9)(R12*1), $117 + +LBB6_29: + CMPL R14, $2048 + JB LBB6_73 + MOVL R14, BX + ANDL $-2048, BX + CMPL BX, $55296 JNE LBB6_69 - MOVL 6(R9)(R12*1), SI - MOVL SI, CX - NOTL CX - LEAL -808464432(SI), BX - ANDL $-2139062144, CX - TESTL BX, CX - JNE LBB6_98 - LEAL 421075225(SI), BX - ORL SI, BX + CMPQ R10, $6 + JL LBB6_87 + CMPL R14, $56319 + JA LBB6_87 + CMPB 4(R11)(R12*1), $92 + JNE LBB6_87 + CMPB 5(R11)(R12*1), $117 + JNE LBB6_87 + MOVL 6(R11)(R12*1), R13 + MOVL R13, BX + NOTL BX + MOVQ R13, -56(BP) + ADDL $-808464432, R13 + ANDL $-2139062144, BX + MOVL BX, -60(BP) + TESTL R13, BX + JNE LBB6_135 + MOVQ -56(BP), R13 + LEAL 421075225(R13), BX + ORL R13, BX TESTL $-2139062144, BX - JNE LBB6_98 - MOVL SI, BX - ANDL $2139062143, BX - MOVL $-1061109568, -44(BP) - SUBL BX, -44(BP) - MOVQ SI, -80(BP) - LEAL 1179010630(BX), SI - MOVL SI, -60(BP) - ANDL CX, -44(BP) - MOVL -60(BP), SI - TESTL SI, -44(BP) - MOVQ -80(BP), SI - JNE LBB6_98 - MOVL $-522133280, -44(BP) - SUBL BX, -44(BP) - ADDL $960051513, BX - ANDL -44(BP), CX - TESTL BX, CX - JNE LBB6_98 - BSWAPL SI - MOVL SI, CX - SHRL $4, CX - NOTL CX - ANDL $16843009, CX - LEAL 0(CX)(CX*8), CX - ANDL $252645135, SI - ADDL CX, SI - MOVL SI, CX - SHRL $4, CX - ORL SI, CX - MOVL CX, SI - SHRL $8, SI - ANDL $65280, SI - MOVBLZX CX, BX - ORL SI, BX - ANDL $16515072, CX - CMPL CX, $14417920 - JE LBB6_76 - TESTB $2, -72(BP) - JE LBB6_113 - ADDQ $-6, R14 + JNE LBB6_135 + MOVQ -56(BP), R13 + ANDL $2139062143, R13 + MOVL $-1061109568, BX + SUBL R13, BX + MOVL BX, -64(BP) + LEAL 1179010630(R13), BX + MOVL BX, -44(BP) + MOVL -64(BP), BX + ANDL -60(BP), BX + TESTL BX, -44(BP) + JNE LBB6_135 + MOVL $-522133280, BX + SUBL R13, BX + MOVL BX, -44(BP) + ADDL $960051513, R13 + MOVL -60(BP), BX + ANDL -44(BP), BX + TESTL R13, BX + JNE LBB6_135 + MOVQ -56(BP), R13 + BSWAPL R13 + MOVL R13, BX + SHRL $4, BX + NOTL BX + ANDL $16843009, BX + LEAL 0(BX)(BX*8), BX + ANDL $252645135, R13 + ADDL BX, R13 + MOVL R13, BX + SHRL $4, BX + ORL R13, BX + MOVL BX, R13 + SHRL $8, BX + ANDL $65280, BX + MOVL BX, -56(BP) + MOVL R13, BX + MOVBLZX BX, R13 + ADDL -56(BP), R13 + ANDL $16515072, BX + CMPL BX, $14417920 + JE LBB6_90 MOVW $-16401, 0(AX) MOVB $-67, 2(AX) ADDQ $3, AX ADDQ $6, R12 - MOVL BX, R15 - CMPL BX, $128 - JAE LBB6_40 + ADDQ $-6, R10 + MOVL R13, R14 + CMPL R13, $127 + JA LBB6_29 + ADDQ R11, R12 + ADDQ $4, R12 + JMP LBB6_51 + +LBB6_42: + TESTL R15, R15 + JE LBB6_155 + CMPB -1(R11), $92 + JNE LBB6_146 + CMPB 0(R11), $92 + JNE LBB6_49 + CMPL R15, $1 + JLE LBB6_155 + MOVB 1(R11), BX + CMPB BX, $34 + JE LBB6_48 + CMPB BX, $92 + JNE LBB6_148 + +LBB6_48: + ADDQ $1, R11 + ADDQ $-1, R15 + +LBB6_49: + ADDQ $1, R11 + ADDQ $-1, R15 + JMP LBB6_17 + +LBB6_50: + MOVL R14, R13 + +LBB6_51: + MOVB R13, 0(AX) + ADDQ $1, AX + JMP LBB6_71 -LBB6_53: - LEAQ 4(R9)(R12*1), R9 - MOVL BX, R15 - JMP LBB6_67 +LBB6_52: + TESTB $2, R8 + JE LBB6_93 + XORL R12, R12 LBB6_54: - LEAQ 4(R9)(R12*1), R9 - MOVL R15, CX - SHRL $12, CX - ORB $-32, CX - MOVB CX, 0(AX) - MOVL R15, CX - SHRL $6, CX - ANDB $63, CX - ORB $-128, CX - MOVB CX, 1(AX) - ANDB $63, R15 - ORB $-128, R15 - MOVB R15, 2(AX) - -LBB6_55: - ADDQ $3, AX - -LBB6_56: - MOVQ R14, R13 - -LBB6_57: - TESTQ R13, R13 - JNE LBB6_2 - JMP LBB6_96 - -LBB6_58: - TESTL R13, R13 - JE LBB6_94 - CMPB -1(R9), $92 - JNE LBB6_97 - CMPB 0(R9), $92 - JNE LBB6_65 - CMPL R13, $1 - JLE LBB6_94 - MOVB 1(R9), CX - CMPB CX, $34 - JE LBB6_64 - CMPB CX, $92 - JNE LBB6_109 - -LBB6_64: - INCQ R9 - DECQ R13 - -LBB6_65: - INCQ R9 - DECQ R13 - JMP LBB6_15 - -LBB6_66: - ADDQ $4, R9 - -LBB6_67: - MOVB R15, 0(AX) - INCQ AX - JMP LBB6_56 - -LBB6_68: - LEAQ 4(R9)(R12*1), R9 - MOVL R15, CX - SHRL $6, CX - ORB $-64, CX - MOVB CX, 0(AX) - ANDB $63, R15 - ORB $-128, R15 - MOVB R15, 1(AX) - ADDQ $2, AX - JMP LBB6_56 + CMPL R14, $2048 + JB LBB6_73 + MOVL R14, BX + ANDL $-2048, BX + CMPL BX, $55296 + JNE LBB6_69 + TESTQ R10, R10 + JLE LBB6_152 + CMPB 4(R11)(R12*1), $92 + JNE LBB6_113 + CMPQ R10, $7 + JL LBB6_111 + CMPL R14, $56319 + JA LBB6_111 + CMPB 5(R11)(R12*1), $92 + JNE LBB6_111 + CMPB 6(R11)(R12*1), $117 + JNE LBB6_111 + MOVL 7(R11)(R12*1), R13 + MOVL R13, BX + NOTL BX + MOVQ R13, -56(BP) + ADDL $-808464432, R13 + ANDL $-2139062144, BX + MOVL BX, -60(BP) + TESTL R13, BX + JNE LBB6_149 + MOVQ -56(BP), R13 + LEAL 421075225(R13), BX + ORL R13, BX + TESTL $-2139062144, BX + JNE LBB6_149 + MOVQ -56(BP), R13 + ANDL $2139062143, R13 + MOVL $-1061109568, BX + SUBL R13, BX + MOVL BX, -64(BP) + LEAL 1179010630(R13), BX + MOVL BX, -44(BP) + MOVL -64(BP), BX + ANDL -60(BP), BX + TESTL BX, -44(BP) + JNE LBB6_149 + MOVL $-522133280, BX + SUBL R13, BX + MOVL BX, -44(BP) + ADDL $960051513, R13 + MOVL -60(BP), BX + ANDL -44(BP), BX + TESTL R13, BX + JNE LBB6_149 + MOVQ -56(BP), R13 + BSWAPL R13 + MOVL R13, BX + SHRL $4, BX + NOTL BX + ANDL $16843009, BX + LEAL 0(BX)(BX*8), BX + ANDL $252645135, R13 + ADDL BX, R13 + MOVQ R13, BX + SHRL $4, R13 + ORL BX, R13 + MOVL R13, BX + SHRL $8, BX + ANDL $65280, BX + MOVL BX, -56(BP) + MOVBLZX R13, BX + ADDL -56(BP), BX + ANDL $16515072, R13 + CMPL R13, $14417920 + JE LBB6_114 + MOVW $-16401, 0(AX) + MOVB $-67, 2(AX) + ADDQ $3, AX + ADDQ $7, R12 + ADDQ $-7, R10 + MOVQ BX, R13 + MOVL R13, R14 + CMPL R13, $128 + JAE LBB6_54 + ADDQ R11, R12 + ADDQ $4, R12 + MOVQ BX, R13 + JMP LBB6_51 LBB6_69: - LEAQ 4(R9)(R12*1), R9 - JMP LBB6_71 + ADDQ R11, R12 + ADDQ $4, R12 LBB6_70: - LEAQ 5(R9)(R12*1), R9 - SUBQ R12, R13 - ADDQ $-5, R13 - MOVQ R13, R14 + MOVL R14, BX + SHRL $12, BX + ORB $-32, BX + MOVB BX, 0(AX) + MOVL R14, BX + SHRL $6, BX + ANDB $63, BX + ORB $-128, BX + MOVB BX, 1(AX) + ANDB $63, R14 + ORB $-128, R14 + MOVB R14, 2(AX) + ADDQ $3, AX LBB6_71: - TESTB $2, -72(BP) - JNE LBB6_75 - JMP LBB6_116 + MOVQ R10, R15 + MOVQ R12, R11 LBB6_72: - TESTB $2, -72(BP) - JNE LBB6_74 - JMP LBB6_94 + TESTQ R15, R15 + JNE LBB6_2 + JMP LBB6_154 LBB6_73: - TESTB $2, -72(BP) - JE LBB6_117 + ADDQ R11, R12 + ADDQ $4, R12 LBB6_74: - LEAQ 4(R9)(R12*1), R9 + MOVL R14, BX + SHRL $6, BX + ORB $-64, BX + MOVB BX, 0(AX) + ANDB $63, R14 + ORB $-128, R14 + MOVB R14, 1(AX) + ADDQ $2, AX + JMP LBB6_71 LBB6_75: + CMPL R14, $2048 + JB LBB6_74 + ANDL $16252928, R13 + CMPL R13, $14155776 + JNE LBB6_70 + CMPQ R15, $10 + JL LBB6_107 + CMPL R14, $56319 + JA LBB6_107 + CMPB 0(R12), $92 + JNE LBB6_107 + CMPB 5(R11), $117 + JNE LBB6_107 + MOVL 6(R11), BX + MOVL BX, R10 + NOTL R10 + MOVQ BX, -56(BP) + ADDL $-808464432, BX + ANDL $-2139062144, R10 + TESTL BX, R10 + JNE LBB6_136 + MOVQ -56(BP), R13 + LEAL 421075225(R13), BX + ORL R13, BX + TESTL $-2139062144, BX + JNE LBB6_136 + MOVQ -56(BP), R13 + ANDL $2139062143, R13 + MOVL $-1061109568, BX + SUBL R13, BX + MOVL BX, -44(BP) + LEAL 1179010630(R13), BX + MOVL BX, -60(BP) + MOVL -44(BP), BX + ANDL R10, BX + TESTL BX, -60(BP) + JNE LBB6_136 + MOVL $-522133280, BX + SUBL R13, BX + ADDL $960051513, R13 + ANDL BX, R10 + TESTL R13, R10 + JNE LBB6_136 + MOVQ -56(BP), R12 + BSWAPL R12 + MOVL R12, BX + SHRL $4, BX + NOTL BX + ANDL $16843009, BX + LEAL 0(BX)(BX*8), BX + ANDL $252645135, R12 + ADDL BX, R12 + MOVL R12, R10 + SHRL $4, R10 + ORL R12, R10 + ADDQ $10, R11 + MOVL R10, BX + ANDL $16515072, BX + CMPL BX, $14417920 + JNE LBB6_117 + MOVL R10, R12 + SHRL $8, R12 + ANDL $65280, R12 + MOVBLZX R10, BX + ORL R12, BX + JMP LBB6_91 + +LBB6_87: + ADDQ R12, R11 + ADDQ $4, R11 + +LBB6_88: + TESTB $2, R8 + JE LBB6_150 + +LBB6_89: MOVW $-16401, 0(AX) MOVB $-67, 2(AX) - JMP LBB6_55 - -LBB6_76: - LEAQ 10(R9)(R12*1), R9 - SUBQ R12, R13 - ADDQ $-10, R13 - JMP LBB6_78 - -LBB6_77: - LEAQ 11(R9)(R12*1), R9 - SUBQ R12, R13 - ADDQ $-11, R13 - -LBB6_78: - SHLL $10, R15 - LEAL -56613888(R15)(BX*1), CX - CMPL CX, $1114112 - JB LBB6_81 - TESTB $2, -72(BP) - JE LBB6_111 - MOVW $-16401, 0(AX) - MOVB $-67, 2(AX) - ADDQ $3, AX - JMP LBB6_57 + ADDQ $3, AX + MOVQ R10, R15 + JMP LBB6_72 -LBB6_81: - MOVL CX, SI - SHRL $18, SI - ORB $-16, SI - MOVB SI, 0(AX) - MOVL CX, SI - SHRL $12, SI - ANDB $63, SI - ORB $-128, SI - MOVB SI, 1(AX) - MOVL CX, SI - SHRL $6, SI - ANDB $63, SI - ORB $-128, SI - MOVB SI, 2(AX) - ANDB $63, CX - ORB $-128, CX - MOVB CX, 3(AX) +LBB6_90: + MOVQ R13, BX + ADDQ R12, R11 + ADDQ $10, R11 + SUBQ R12, R15 + +LBB6_91: + ADDQ $-10, R15 + +LBB6_92: + SHLL $10, R14 + MOVL BX, R10 + ADDL R14, R10 + ADDL BX, R14 + ADDL $-56613888, R14 + MOVL R14, BX + SHRL $18, BX + ORB $-16, BX + MOVB BX, 0(AX) + MOVL R14, BX + SHRL $12, BX + ANDB $63, BX + ORB $-128, BX + MOVB BX, 1(AX) + SHRL $6, R14 + ANDB $63, R14 + ORB $-128, R14 + MOVB R14, 2(AX) + ANDB $63, R10 + ORB $-128, R10 + MOVB R10, 3(AX) ADDQ $4, AX - JMP LBB6_57 + JMP LBB6_72 -LBB6_82: - XORL R13, R13 +LBB6_93: + CMPL R14, $2048 + JB LBB6_74 + ANDL $16252928, R13 + CMPL R13, $14155776 + JNE LBB6_70 + CMPQ R15, $5 + JL LBB6_152 + CMPB 0(R12), $92 + JNE LBB6_116 + LEAQ 5(R11), R12 + CMPQ R15, $11 + JL LBB6_112 + CMPL R14, $56319 + JA LBB6_112 + CMPB 0(R12), $92 + JNE LBB6_112 + CMPB 6(R11), $117 + JNE LBB6_112 + MOVL 7(R11), BX + MOVL BX, R10 + NOTL R10 + MOVQ BX, -56(BP) + ADDL $-808464432, BX + ANDL $-2139062144, R10 + TESTL BX, R10 + JNE LBB6_136 + MOVQ -56(BP), R13 + LEAL 421075225(R13), BX + ORL R13, BX + TESTL $-2139062144, BX + JNE LBB6_136 + MOVQ -56(BP), R13 + ANDL $2139062143, R13 + MOVL $-1061109568, BX + SUBL R13, BX + MOVL BX, -44(BP) + LEAL 1179010630(R13), BX + MOVL BX, -60(BP) + MOVL -44(BP), BX + ANDL R10, BX + TESTL BX, -60(BP) + JNE LBB6_136 + MOVL $-522133280, BX + SUBL R13, BX + ADDL $960051513, R13 + ANDL BX, R10 + TESTL R13, R10 + JNE LBB6_136 + MOVQ -56(BP), R12 + BSWAPL R12 + MOVL R12, BX + SHRL $4, BX + NOTL BX + ANDL $16843009, BX + LEAL 0(BX)(BX*8), BX + ANDL $252645135, R12 + ADDL BX, R12 + MOVL R12, R10 + SHRL $4, R10 + ORL R12, R10 + ADDQ $11, R11 + MOVL R10, BX + ANDL $16515072, BX + CMPL BX, $14417920 + JNE LBB6_117 + MOVL R10, R12 + SHRL $8, R12 + ANDL $65280, R12 + MOVBLZX R10, BX + ORL R12, BX + JMP LBB6_115 + +LBB6_107: + MOVQ R12, R11 + JMP LBB6_88 + +LBB6_111: + ADDQ R11, R12 + ADDQ $5, R12 + +LBB6_112: + ADDQ $-1, R10 + MOVQ R12, R11 + JMP LBB6_88 + +LBB6_113: + ADDQ R12, R11 + ADDQ $4, R11 + TESTB $2, R8 + JNE LBB6_89 + JMP LBB6_117 + +LBB6_114: + ADDQ R12, R11 + ADDQ $11, R11 + SUBQ R12, R15 + +LBB6_115: + ADDQ $-11, R15 + JMP LBB6_92 + +LBB6_116: + MOVQ R12, R11 + TESTB $2, R8 + JNE LBB6_89 + +LBB6_117: + SUBQ DI, R11 + JMP LBB6_151 + +LBB6_118: + XORL R15, R15 MOVQ DX, AX -LBB6_83: - ADDQ R13, AX +LBB6_119: + ADDQ R15, AX SUBQ DX, AX -LBB6_84: - ADDQ $40, SP +LBB6_120: + ADDQ $24, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -2322,197 +2574,193 @@ LBB6_84: BYTE $0x5d // popq %rbp RET -LBB6_85: - MOVQ R9, DX - SUBQ DI, DX - MOVQ -56(BP), DI - MOVQ DX, 0(DI) - MOVB 0(R9), CX - LEAL -48(CX), SI - MOVQ $-2, AX - CMPB SI, $10 - JB LBB6_87 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_84 - -LBB6_87: - LEAQ 1(DX), CX - MOVQ CX, 0(DI) - MOVB 1(R9), CX - LEAL -48(CX), SI - CMPB SI, $10 - JB LBB6_89 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_84 - -LBB6_89: - LEAQ 2(DX), CX - MOVQ CX, 0(DI) - MOVB 2(R9), CX - LEAL -48(CX), SI - CMPB SI, $10 - JB LBB6_91 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_84 +LBB6_121: + MOVQ R11, DX + SUBQ DI, DX + MOVQ DX, 0(CX) + MOVB 0(R11), SI + LEAL -48(SI), AX + CMPB AX, $10 + JB LBB6_124 + MOVQ $-2, AX + ADDB $-65, SI + CMPB SI, $37 + JA LBB6_120 + MOVBLZX SI, SI + MOVQ $270582939711, DI + BTQ SI, DI + JAE LBB6_120 + +LBB6_124: + LEAQ 1(DX), AX + MOVQ AX, 0(CX) + MOVB 1(R11), SI + LEAL -48(SI), AX + CMPB AX, $9 + JBE LBB6_127 + MOVQ $-2, AX + ADDB $-65, SI + CMPB SI, $37 + JA LBB6_120 + MOVBLZX SI, SI + MOVQ $270582939711, DI + BTQ SI, DI + JAE LBB6_120 + +LBB6_127: + LEAQ 2(DX), AX + MOVQ AX, 0(CX) + MOVB 2(R11), SI + LEAL -48(SI), AX + CMPB AX, $10 + JB LBB6_130 + MOVQ $-2, AX + ADDB $-65, SI + CMPB SI, $37 + JA LBB6_120 + MOVBLZX SI, SI + MOVQ $270582939711, DI + BTQ SI, DI + JAE LBB6_120 -LBB6_91: - LEAQ 3(DX), CX - MOVQ CX, 0(DI) - MOVB 3(R9), CX - LEAL -48(CX), SI - CMPB SI, $10 - JB LBB6_93 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_84 +LBB6_130: + LEAQ 3(DX), AX + MOVQ AX, 0(CX) + MOVB 3(R11), SI + LEAL -48(SI), AX + CMPB AX, $10 + JB LBB6_133 + +LBB6_131: + MOVQ $-2, AX + ADDB $-65, SI + CMPB SI, $37 + JA LBB6_120 + MOVBLZX SI, SI + MOVQ $270582939711, DI + BTQ SI, DI + JAE LBB6_120 -LBB6_93: +LBB6_133: ADDQ $4, DX - MOVQ DX, 0(DI) - JMP LBB6_84 - -LBB6_94: - MOVQ -56(BP), AX - MOVQ R11, 0(AX) - MOVQ $-1, AX - JMP LBB6_84 + MOVQ DX, 0(CX) + MOVQ $-2, AX + JMP LBB6_120 -LBB6_95: +LBB6_134: NOTQ DI - ADDQ DI, R9 - MOVQ -56(BP), AX - MOVQ R9, 0(AX) + ADDQ DI, R11 + MOVQ R11, 0(CX) MOVQ $-3, AX - JMP LBB6_84 + JMP LBB6_120 -LBB6_96: - XORL R13, R13 - JMP LBB6_83 +LBB6_135: + ADDQ R11, R12 + ADDQ $4, R12 -LBB6_97: - NOTQ DI - ADDQ DI, R9 - JMP LBB6_110 +LBB6_136: + MOVQ R12, DX + SUBQ DI, DX + ADDQ $2, DX + MOVQ DX, 0(CX) + MOVB 2(R12), SI + LEAL -48(SI), AX + CMPB AX, $10 + JB LBB6_139 + MOVQ $-2, AX + ADDB $-65, SI + CMPB SI, $37 + JA LBB6_120 + MOVBLZX SI, SI + MOVQ $270582939711, DI + BTQ SI, DI + JAE LBB6_120 + +LBB6_139: + LEAQ 1(DX), AX + MOVQ AX, 0(CX) + MOVB 3(R12), SI + LEAL -48(SI), AX + CMPB AX, $9 + JBE LBB6_142 + MOVQ $-2, AX + ADDB $-65, SI + CMPB SI, $37 + JA LBB6_120 + MOVBLZX SI, SI + MOVQ $270582939711, DI + BTQ SI, DI + JAE LBB6_120 + +LBB6_142: + LEAQ 2(DX), AX + MOVQ AX, 0(CX) + MOVB 4(R12), SI + LEAL -48(SI), AX + CMPB AX, $10 + JB LBB6_145 + MOVQ $-2, AX + ADDB $-65, SI + CMPB SI, $37 + JA LBB6_120 + MOVBLZX SI, SI + MOVQ $270582939711, DI + BTQ SI, DI + JAE LBB6_120 -LBB6_98: - LEAQ 4(R9)(R12*1), SI - JMP LBB6_100 +LBB6_145: + LEAQ 3(DX), AX + MOVQ AX, 0(CX) + MOVB 5(R12), SI + LEAL -48(SI), AX + CMPB AX, $10 + JAE LBB6_131 + JMP LBB6_133 -LBB6_99: - LEAQ 5(R9)(R12*1), SI +LBB6_146: + NOTQ DI + ADDQ DI, R11 -LBB6_100: - MOVQ SI, DX - SUBQ DI, DX - ADDQ $2, DX - MOVQ -56(BP), AX - MOVQ DX, 0(AX) - MOVB 2(SI), CX - LEAL -48(CX), DI +LBB6_147: + MOVQ R11, 0(CX) MOVQ $-2, AX - CMPB DI, $10 - JB LBB6_102 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_84 - -LBB6_102: - LEAQ 1(DX), CX - MOVQ -56(BP), DI - MOVQ CX, 0(DI) - MOVB 3(SI), CX - LEAL -48(CX), DI - CMPB DI, $10 - JB LBB6_104 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_84 - -LBB6_104: - LEAQ 2(DX), CX - MOVQ -56(BP), DI - MOVQ CX, 0(DI) - MOVB 4(SI), CX - LEAL -48(CX), DI - CMPB DI, $10 - JB LBB6_106 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_84 - -LBB6_106: - LEAQ 3(DX), CX - MOVQ -56(BP), DI - MOVQ CX, 0(DI) - MOVB 5(SI), CX - LEAL -48(CX), SI - CMPB SI, $10 - JB LBB6_108 - ANDB $-33, CX - ADDB $-65, CX - CMPB CX, $5 - JA LBB6_84 - -LBB6_108: - ADDQ $4, DX - MOVQ -56(BP), CX - MOVQ DX, 0(CX) - JMP LBB6_84 + JMP LBB6_120 -LBB6_109: - SUBQ DI, R9 - INCQ R9 +LBB6_148: + SUBQ DI, R11 + ADDQ $1, R11 + JMP LBB6_147 -LBB6_110: - MOVQ -56(BP), AX - MOVQ R9, 0(AX) - MOVQ $-2, AX - JMP LBB6_84 +LBB6_149: + ADDQ R11, R12 + ADDQ $5, R12 + JMP LBB6_136 -LBB6_111: - SUBQ DI, R9 - ADDQ $-4, R9 +LBB6_150: + ADDQ DI, R9 + SUBQ R9, R11 -LBB6_112: - MOVQ -56(BP), AX - MOVQ R9, 0(AX) +LBB6_151: + ADDQ $-4, R11 + MOVQ R11, 0(CX) MOVQ $-4, AX - JMP LBB6_84 - -LBB6_113: - LEAQ 10(R9)(R12*1), AX - JMP LBB6_115 - -LBB6_114: - LEAQ 11(R9)(R12*1), AX + JMP LBB6_120 -LBB6_115: - SUBQ DI, AX - ADDQ $-4, AX - MOVQ -56(BP), CX - MOVQ AX, 0(CX) - MOVQ $-4, AX - JMP LBB6_84 +LBB6_152: + TESTB $2, R8 + JE LBB6_155 + MOVW $-16401, 0(AX) + MOVB $-67, 2(AX) + ADDQ $3, AX -LBB6_116: - LEAQ 4(R10)(DI*1), AX - SUBQ AX, R9 - JMP LBB6_112 +LBB6_154: + XORL R15, R15 + JMP LBB6_119 -LBB6_117: - ADDQ R12, R9 - SUBQ DI, R9 - JMP LBB6_112 +LBB6_155: + MOVQ SI, 0(CX) + MOVQ $-1, AX + JMP LBB6_120 LCPI7_0: QUAD $0x2626262626262626; QUAD $0x2626262626262626 // .space 16, '&&&&&&&&&&&&&&&&' @@ -2521,10 +2769,10 @@ LCPI7_1: QUAD $0xe2e2e2e2e2e2e2e2; QUAD $0xe2e2e2e2e2e2e2e2 // .space 16, '\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2' LCPI7_2: - QUAD $0x0202020202020202; QUAD $0x0202020202020202 // .space 16, '\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02' + QUAD $0xfdfdfdfdfdfdfdfd; QUAD $0xfdfdfdfdfdfdfdfd // .space 16, '\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd' LCPI7_3: - QUAD $0x3e3e3e3e3e3e3e3e; QUAD $0x3e3e3e3e3e3e3e3e // .space 16, '>>>>>>>>>>>>>>>>' + QUAD $0x3c3c3c3c3c3c3c3c; QUAD $0x3c3c3c3c3c3c3c3c // .space 16, '<<<<<<<<<<<<<<<<' _html_escape: BYTE $0x55 // pushq %rbp @@ -2534,359 +2782,372 @@ _html_escape: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $24, SP - MOVQ CX, -64(BP) - MOVQ DX, R10 - MOVQ DX, -56(BP) - MOVQ DI, -48(BP) + SUBQ $16, SP + MOVQ CX, -56(BP) + MOVQ DX, R14 + MOVQ DX, -48(BP) MOVQ DI, AX TESTQ SI, SI - JLE LBB7_59 - MOVQ -64(BP), AX + JLE LBB7_70 + MOVQ -56(BP), AX MOVQ 0(AX), R9 - QUAD $0xffffff85056ffac5 // vmovdqu $-123(%rip), %xmm0 /* LCPI7_0(%rip) */ - QUAD $0xffffff8d0d6ffac5 // vmovdqu $-115(%rip), %xmm1 /* LCPI7_1(%rip) */ - QUAD $0xffffff95156ffac5 // vmovdqu $-107(%rip), %xmm2 /* LCPI7_2(%rip) */ - QUAD $0xffffff9d1d6ffac5 // vmovdqu $-99(%rip), %xmm3 /* LCPI7_3(%rip) */ - MOVQ $5764607797912141824, R14 - LONG $0x8d1d8d4c; WORD $0x00ad; BYTE $0x00 // leaq $44429(%rip), %r11 /* __HtmlQuoteTab(%rip) */ - MOVQ -48(BP), R15 - MOVQ -56(BP), R10 + QUAD $0xffffff89056ffac5 // vmovdqu $-119(%rip), %xmm0 /* LCPI7_0(%rip) */ + QUAD $0xffffff910d6ffac5 // vmovdqu $-111(%rip), %xmm1 /* LCPI7_1(%rip) */ + QUAD $0xffffff99156ffac5 // vmovdqu $-103(%rip), %xmm2 /* LCPI7_2(%rip) */ + QUAD $0xffffffa11d6ffac5 // vmovdqu $-95(%rip), %xmm3 /* LCPI7_3(%rip) */ + LONG $0xd91d8d4c; WORD $0x00ad; BYTE $0x00 // leaq $44505(%rip), %r11 /* __HtmlQuoteTab(%rip) */ + MOVQ DI, R12 + MOVQ -48(BP), R14 LBB7_2: TESTQ R9, R9 - JLE LBB7_61 - CMPQ SI, $15 - SETGT BX - MOVQ R9, R12 - MOVQ R10, R8 - MOVQ SI, AX - MOVQ R15, R13 - CMPQ R9, $16 - JL LBB7_9 + JLE LBB7_3 CMPQ SI, $16 - JL LBB7_9 - MOVQ R15, R13 - MOVQ SI, AX - MOVQ R10, R8 + SETGE AX + MOVQ R9, R13 + MOVQ R14, R8 + MOVQ SI, BX + MOVQ R12, R15 + JL LBB7_12 + CMPQ R9, $16 + JL LBB7_12 + XORL R8, R8 + MOVQ SI, DX MOVQ R9, CX -LBB7_6: - LONG $0x6f7ac1c4; WORD $0x0065 // vmovdqu (%r13), %xmm4 +LBB7_7: + LONG $0x6f7a81c4; WORD $0x0424 // vmovdqu (%r12,%r8), %xmm4 LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 LONG $0xedebc9c5 // vpor %xmm5, %xmm6, %xmm5 - LONG $0xf2ebd9c5 // vpor %xmm2, %xmm4, %xmm6 + LONG $0xf2dbd9c5 // vpand %xmm2, %xmm4, %xmm6 LONG $0xf374c9c5 // vpcmpeqb %xmm3, %xmm6, %xmm6 LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 - LONG $0x7f7ac1c4; BYTE $0x20 // vmovdqu %xmm4, (%r8) - LONG $0xd5d7f9c5 // vpmovmskb %xmm5, %edx - TESTW DX, DX - JNE LBB7_21 - ADDQ $16, R13 + LONG $0x7f7a81c4; WORD $0x0624 // vmovdqu %xmm4, (%r14,%r8) + LONG $0xc5d7f9c5 // vpmovmskb %xmm5, %eax + TESTL AX, AX + JNE LBB7_8 + LEAQ -16(DX), BX + LEAQ -16(CX), R13 ADDQ $16, R8 - LEAQ -16(CX), R12 - CMPQ AX, $31 - SETGT BX - CMPQ AX, $32 - LEAQ -16(AX), AX - JL LBB7_9 + CMPQ DX, $32 + SETGE AX + JL LBB7_11 + MOVQ BX, DX CMPQ CX, $31 - MOVQ R12, CX - JG LBB7_6 + MOVQ R13, CX + JG LBB7_7 -LBB7_9: - TESTB BX, BX +LBB7_11: + LEAQ 0(R12)(R8*1), R15 + ADDQ R14, R8 + +LBB7_12: + TESTB AX, AX JE LBB7_13 - MOVQ R14, DX - LONG $0x6f7ac1c4; WORD $0x0065 // vmovdqu (%r13), %xmm4 - LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 - LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 - LONG $0xedebc9c5 // vpor %xmm5, %xmm6, %xmm5 - LONG $0xf2ebd9c5 // vpor %xmm2, %xmm4, %xmm6 - LONG $0xf374c9c5 // vpcmpeqb %xmm3, %xmm6, %xmm6 - LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 - LONG $0xc5d7f9c5 // vpmovmskb %xmm5, %eax + LONG $0x6f7ac1c4; BYTE $0x27 // vmovdqu (%r15), %xmm4 + LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 + LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 + LONG $0xedebc9c5 // vpor %xmm5, %xmm6, %xmm5 + LONG $0xf2dbd9c5 // vpand %xmm2, %xmm4, %xmm6 + LONG $0xf374c9c5 // vpcmpeqb %xmm3, %xmm6, %xmm6 + LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 + LONG $0xc5d7f9c5 // vpmovmskb %xmm5, %eax ORL $65536, AX - BSFL AX, R14 - LONG $0x7ef9e1c4; BYTE $0xe0 // vmovq %xmm4, %rax - CMPQ R12, R14 - JGE LBB7_22 - CMPQ R12, $8 - JB LBB7_25 + BSFL AX, R10 + LONG $0x7ef9e1c4; BYTE $0xe0 // vmovq %xmm4, %rax + CMPQ R13, R10 + JGE LBB7_24 + CMPQ R13, $8 + JB LBB7_35 MOVQ AX, 0(R8) - LEAQ 8(R13), R14 + LEAQ 8(R15), R10 ADDQ $8, R8 - LEAQ -8(R12), AX - CMPQ AX, $4 - JAE LBB7_26 - JMP LBB7_27 + LEAQ -8(R13), BX + CMPQ BX, $4 + JAE LBB7_38 + JMP LBB7_39 LBB7_13: - TESTQ R12, R12 - JLE LBB7_20 - TESTQ AX, AX - JLE LBB7_20 + TESTQ BX, BX + JLE LBB7_21 + TESTQ R13, R13 + JLE LBB7_21 + XORL DX, DX + XORL AX, AX -LBB7_15: - MOVBLZX 0(R13), CX - CMPQ CX, $62 +LBB7_16: + MOVBLZX 0(R15)(DX*1), R11 + CMPQ R11, $62 JA LBB7_17 - BTQ CX, R14 - JB LBB7_24 + MOVQ $5764607797912141824, CX + BTQ R11, CX + JB LBB7_45 LBB7_17: - CMPB CX, $-30 - JE LBB7_24 - INCQ R13 - MOVB CX, 0(R8) - CMPQ AX, $2 - LEAQ -1(AX), AX + CMPB R11, $-30 + JE LBB7_45 + LEAQ 0(BX)(AX*1), R10 + MOVB R11, 0(R8)(DX*1) + LEAQ -1(AX), CX + CMPQ R10, $2 JL LBB7_20 - INCQ R8 - CMPQ R12, $1 - LEAQ -1(R12), R12 - JG LBB7_15 + ADDQ R13, AX + ADDQ $1, DX + CMPQ AX, $1 + MOVQ CX, AX + JG LBB7_16 LBB7_20: - SUBQ R15, R13 - NEGQ AX - SBBQ R12, R12 - XORQ R13, R12 - TESTQ R12, R12 - JNS LBB7_37 - JMP LBB7_57 + SUBQ CX, R15 + ADDQ CX, BX + LONG $0x831d8d4c; WORD $0x00ac; BYTE $0x00 // leaq $44163(%rip), %r11 /* __HtmlQuoteTab(%rip) */ LBB7_21: - MOVWLZX DX, AX - SUBQ R15, R13 - BSFL AX, R12 - ADDQ R13, R12 - TESTQ R12, R12 - JNS LBB7_37 - JMP LBB7_57 + TESTQ BX, BX + JE LBB7_22 + NOTQ R15 + ADDQ R12, R15 + TESTQ R15, R15 + JNS LBB7_49 + JMP LBB7_48 -LBB7_22: - CMPL R14, $8 - JB LBB7_31 +LBB7_8: + BSFW AX, AX + MOVWLZX AX, R15 + ADDQ R8, R15 + TESTQ R15, R15 + JNS LBB7_49 + JMP LBB7_48 + +LBB7_24: + CMPL R10, $8 + JB LBB7_25 MOVQ AX, 0(R8) - LEAQ 8(R13), R12 + LEAQ 8(R15), R13 ADDQ $8, R8 - LEAQ -8(R14), AX - CMPQ AX, $4 - JAE LBB7_32 - JMP LBB7_33 + LEAQ -8(R10), BX + CMPQ BX, $4 + JAE LBB7_28 + JMP LBB7_29 -LBB7_24: - SUBQ R15, R13 - MOVQ R13, R12 - TESTQ R12, R12 - JNS LBB7_37 - JMP LBB7_57 +LBB7_45: + SUBQ R12, R15 + SUBQ AX, R15 + LONG $0x201d8d4c; WORD $0x00ac; BYTE $0x00 // leaq $44064(%rip), %r11 /* __HtmlQuoteTab(%rip) */ + TESTQ R15, R15 + JNS LBB7_49 + JMP LBB7_48 -LBB7_25: - MOVQ R13, R14 - MOVQ R12, AX - CMPQ AX, $4 - JB LBB7_27 +LBB7_35: + MOVQ R15, R10 + MOVQ R13, BX + CMPQ BX, $4 + JB LBB7_39 -LBB7_26: - MOVL 0(R14), CX - MOVL CX, 0(R8) - ADDQ $4, R14 +LBB7_38: + MOVL 0(R10), AX + MOVL AX, 0(R8) + ADDQ $4, R10 ADDQ $4, R8 - ADDQ $-4, AX - -LBB7_27: - CMPQ AX, $2 - JB LBB7_28 - MOVWLZX 0(R14), CX - MOVW CX, 0(R8) - ADDQ $2, R14 - ADDQ $2, R8 - ADDQ $-2, AX - TESTQ AX, AX - JNE LBB7_29 - JMP LBB7_30 + ADDQ $-4, BX -LBB7_28: - TESTQ AX, AX - JE LBB7_30 +LBB7_39: + CMPQ BX, $2 + JAE LBB7_40 + TESTQ BX, BX + JE LBB7_43 -LBB7_29: - MOVB 0(R14), AX +LBB7_42: + MOVB 0(R10), AX MOVB AX, 0(R8) -LBB7_30: - SUBQ R15, R12 - ADDQ R13, R12 - NOTQ R12 - MOVQ DX, R14 - TESTQ R12, R12 - JNS LBB7_37 - JMP LBB7_57 +LBB7_43: + ADDQ R15, R13 + NOTQ R13 + ADDQ R12, R13 + MOVQ R13, R15 + TESTQ R15, R15 + JNS LBB7_49 + JMP LBB7_48 -LBB7_31: - MOVQ R13, R12 - MOVQ R14, AX - CMPQ AX, $4 - JB LBB7_33 +LBB7_25: + MOVQ R15, R13 + MOVQ R10, BX + CMPQ BX, $4 + JB LBB7_29 -LBB7_32: - MOVL 0(R12), CX - MOVL CX, 0(R8) - ADDQ $4, R12 +LBB7_28: + MOVL 0(R13), AX + MOVL AX, 0(R8) + ADDQ $4, R13 ADDQ $4, R8 - ADDQ $-4, AX + ADDQ $-4, BX + +LBB7_29: + CMPQ BX, $2 + JAE LBB7_30 + TESTQ BX, BX + JE LBB7_33 + +LBB7_32: + MOVB 0(R13), AX + MOVB AX, 0(R8) LBB7_33: - CMPQ AX, $2 - JB LBB7_34 - MOVWLZX 0(R12), CX - MOVW CX, 0(R8) - ADDQ $2, R12 - ADDQ $2, R8 - ADDQ $-2, AX - TESTQ AX, AX - JNE LBB7_35 - JMP LBB7_36 + SUBQ R12, R15 + ADDQ R10, R15 + TESTQ R15, R15 + JNS LBB7_49 + JMP LBB7_48 -LBB7_34: - TESTQ AX, AX - JE LBB7_36 +LBB7_40: + MOVWLZX 0(R10), AX + MOVW AX, 0(R8) + ADDQ $2, R10 + ADDQ $2, R8 + ADDQ $-2, BX + TESTQ BX, BX + JNE LBB7_42 + JMP LBB7_43 -LBB7_35: - MOVB 0(R12), AX - MOVB AX, 0(R8) +LBB7_30: + MOVWLZX 0(R13), AX + MOVW AX, 0(R8) + ADDQ $2, R13 + ADDQ $2, R8 + ADDQ $-2, BX + TESTQ BX, BX + JNE LBB7_32 + JMP LBB7_33 -LBB7_36: - SUBQ R15, R13 - ADDQ R14, R13 - MOVQ R13, R12 - MOVQ DX, R14 - TESTQ R12, R12 - JS LBB7_57 - -LBB7_37: - ADDQ R12, R15 - ADDQ R12, R10 - SUBQ R12, SI - JLE LBB7_58 - SUBQ R12, R9 - MOVB 0(R15), CX +LBB7_22: + SUBQ R12, R15 + TESTQ R15, R15 + JS LBB7_48 + +LBB7_49: + ADDQ R15, R12 + ADDQ R15, R14 + SUBQ R15, SI + JLE LBB7_50 + SUBQ R15, R9 + MOVB 0(R12), CX CMPB CX, $-30 - JE LBB7_51 - MOVQ R15, AX + JE LBB7_53 + MOVQ R12, AX -LBB7_40: - MOVBLZX CX, DX - SHLQ $4, DX - MOVQ 0(DX)(R11*1), DI - MOVLQSX DI, BX - SUBQ BX, R9 - JL LBB7_60 - SHLQ $32, DI - LEAQ 8(DX)(R11*1), R15 - MOVQ $12884901889, CX - CMPQ DI, CX - JL LBB7_43 - MOVL 0(R15), CX - MOVL CX, 0(R10) - LEAQ 12(DX)(R11*1), R15 - LEAQ 4(R10), R8 - LEAQ -4(BX), DI - CMPQ DI, $2 - JGE LBB7_44 - JMP LBB7_45 +LBB7_57: + MOVBLZX CX, CX + SHLQ $4, CX + MOVQ 0(CX)(R11*1), DX + MOVLQSX DX, R15 + SUBQ R15, R9 + JL LBB7_58 + SHLQ $32, DX + LEAQ 0(CX)(R11*1), R10 + ADDQ $8, R10 + MOVQ $12884901889, BX + CMPQ DX, BX + JL LBB7_62 + MOVL 0(R10), DX + MOVL DX, 0(R14) + LEAQ 0(CX)(R11*1), R10 + ADDQ $12, R10 + LEAQ 4(R14), R8 + LEAQ -4(R15), CX + CMPQ CX, $2 + JGE LBB7_65 + JMP LBB7_66 -LBB7_43: - MOVQ R10, R8 - MOVQ BX, DI - CMPQ DI, $2 - JL LBB7_45 +LBB7_62: + MOVQ R14, R8 + MOVQ R15, CX + CMPQ CX, $2 + JL LBB7_66 -LBB7_44: - MOVWLZX 0(R15), DX +LBB7_65: + MOVWLZX 0(R10), DX MOVW DX, 0(R8) - ADDQ $2, R15 + ADDQ $2, R10 ADDQ $2, R8 - ADDQ $-2, DI + ADDQ $-2, CX -LBB7_45: - TESTQ DI, DI - JLE LBB7_47 - MOVB 0(R15), CX +LBB7_66: + TESTQ CX, CX + JLE LBB7_68 + MOVB 0(R10), CX MOVB CX, 0(R8) -LBB7_47: - ADDQ BX, R10 +LBB7_68: + ADDQ R15, R14 -LBB7_48: - INCQ AX - MOVQ AX, R15 +LBB7_69: + ADDQ $1, AX + LEAQ -1(SI), CX + MOVQ AX, R12 CMPQ SI, $1 - LEAQ -1(SI), SI + MOVQ CX, SI JG LBB7_2 - JMP LBB7_59 + JMP LBB7_70 -LBB7_51: +LBB7_53: CMPQ SI, $3 - JL LBB7_55 - CMPB 1(R15), $-128 - JNE LBB7_55 - MOVB 2(R15), CX - MOVL CX, AX - ANDB $-2, AX - CMPB AX, $-88 - JNE LBB7_55 - LEAQ 2(R15), AX - ADDQ $-2, SI - JMP LBB7_40 - -LBB7_55: - TESTQ R9, R9 - JLE LBB7_61 - MOVB $-30, 0(R10) - INCQ R10 - DECQ R9 - MOVQ R15, AX - JMP LBB7_48 - -LBB7_57: - SUBQ -56(BP), R10 - NOTQ R12 - ADDQ R12, R10 - MOVQ -64(BP), AX - MOVQ R10, 0(AX) - SUBQ -48(BP), R15 - ADDQ R12, R15 - NOTQ R15 - JMP LBB7_62 - -LBB7_58: - MOVQ R15, AX + JL LBB7_59 + CMPB 1(R12), $-128 + JNE LBB7_59 + MOVB 2(R12), CX + MOVL CX, AX + ANDB $-2, AX + CMPB AX, $-88 + JNE LBB7_59 + LEAQ 2(R12), AX + ADDQ $-2, SI + JMP LBB7_57 LBB7_59: - SUBQ -56(BP), R10 - MOVQ -64(BP), CX - MOVQ R10, 0(CX) - SUBQ -48(BP), AX - JMP LBB7_63 + TESTQ R9, R9 + JLE LBB7_3 + MOVB $-30, 0(R14) + ADDQ $1, R14 + ADDQ $-1, R9 + MOVQ R12, AX + JMP LBB7_69 -LBB7_60: - SUBQ -56(BP), R10 - MOVQ -64(BP), AX - MOVQ R10, 0(AX) +LBB7_48: + MOVQ -48(BP), CX + ADDQ R15, CX + NOTQ CX + ADDQ R14, CX + MOVQ -56(BP), AX + MOVQ CX, 0(AX) + SUBQ R12, DI + ADDQ R15, DI + MOVQ DI, AX + JMP LBB7_71 + +LBB7_50: + MOVQ R12, AX -LBB7_61: - NOTQ R15 - ADDQ -48(BP), R15 +LBB7_70: + SUBQ -48(BP), R14 + MOVQ -56(BP), CX + MOVQ R14, 0(CX) + SUBQ DI, AX + JMP LBB7_71 -LBB7_62: - MOVQ R15, AX +LBB7_58: + SUBQ -48(BP), R14 + MOVQ -56(BP), AX + MOVQ R14, 0(AX) -LBB7_63: - ADDQ $24, SP +LBB7_3: + NOTQ R12 + ADDQ DI, R12 + MOVQ R12, AX + +LBB7_71: + ADDQ $16, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -2920,73 +3181,74 @@ LBB8_5: SHLQ CX, DI MOVL AX, CX SHLQ $4, CX - LONG $0xbf3d8d4c; WORD $0x0031; BYTE $0x00 // leaq $12735(%rip), %r15 /* _POW10_M128_TAB(%rip) */ + LONG $0xd73d8d4c; WORD $0x0030; BYTE $0x00 // leaq $12503(%rip), %r15 /* _POW10_M128_TAB(%rip) */ MOVQ DI, AX MULQ 8(CX)(R15*1) MOVQ AX, R11 MOVQ DX, R14 ANDL $511, DX + CMPQ DX, $511 + JNE LBB8_11 MOVQ DI, BX NOTQ BX - CMPQ AX, BX + CMPQ R11, BX JBE LBB8_11 - CMPL DX, $511 - JNE LBB8_11 MOVQ DI, AX MULQ 0(CX)(R15*1) ADDQ DX, R11 ADCQ $0, R14 MOVL R14, DX ANDL $511, DX - CMPQ AX, BX - JBE LBB8_11 + CMPQ DX, $511 + JNE LBB8_11 CMPQ R11, $-1 JNE LBB8_11 - CMPL DX, $511 - JE LBB8_1 + CMPQ AX, BX + JA LBB8_1 LBB8_11: - MOVQ R14, DI - SHRQ $63, DI - LEAL 9(DI), CX - SHRQ CX, R14 - ORQ R11, DX - JNE LBB8_14 - MOVL R14, AX - ANDL $3, AX - CMPL AX, $1 - JE LBB8_1 - -LBB8_14: - LONG $0x526ac669; WORD $0x0003 // imull $217706, %esi, %eax - SARL $16, AX - ADDL $1087, AX - WORD $0x9848 // cltq - SUBQ R10, AX - XORQ $1, DI - SUBQ DI, AX - MOVL R14, DX - ANDL $1, DX - ADDQ R14, DX - MOVQ $126100789566373888, CX - ANDQ DX, CX - CMPQ CX, $1 - SBBQ $-1, AX - LEAQ -1(AX), SI - CMPQ SI, $2045 - JBE LBB8_16 + MOVQ R14, AX + SHRQ $63, AX + LEAL 9(AX), CX + SHRQ CX, R14 + TESTQ R11, R11 + JNE LBB8_15 + TESTQ DX, DX + JNE LBB8_15 + MOVL R14, CX + ANDL $3, CX + CMPL CX, $1 + JE LBB8_1 + +LBB8_15: + LONG $0x526ace69; WORD $0x0003 // imull $217706, %esi, %ecx + SARL $16, CX + ADDL $1087, CX + MOVLQSX CX, SI + SUBQ R10, SI + MOVL R14, DX + ANDL $1, DX + ADDQ R14, DX + MOVQ DX, CX + SHRQ $54, CX + ADDQ SI, AX + CMPQ CX, $1 + SBBQ $0, AX + LEAQ -1(AX), SI + CMPQ SI, $2045 + JBE LBB8_17 LBB8_1: XORL AX, AX -LBB8_17: +LBB8_18: BYTE $0x5b // popq %rbx WORD $0x5e41 // popq %r14 WORD $0x5f41 // popq %r15 BYTE $0x5d // popq %rbp RET -LBB8_16: +LBB8_17: CMPQ CX, $1 MOVB $2, CX SBBB $0, CX @@ -3001,7 +3263,7 @@ LBB8_16: LONG $0xc1450f48 // cmovneq %rcx, %rax MOVQ AX, 0(R8) MOVB $1, AX - JMP LBB8_17 + JMP LBB8_18 _decimal_to_f64: BYTE $0x55 // pushq %rbp @@ -3013,321 +3275,259 @@ _decimal_to_f64: BYTE $0x53 // pushq %rbx BYTE $0x50 // pushq %rax MOVQ SI, BX - MOVQ DI, R12 + MOVQ DI, R15 MOVQ $4503599627370496, R13 CMPL 16(DI), $0 JE LBB9_4 MOVQ $9218868437227405312, R14 - MOVL 20(R12), AX - XORL R15, R15 + MOVL 20(R15), AX + XORL R12, R12 CMPL AX, $310 - JG LBB9_78 + JG LBB9_64 CMPL AX, $-330 JGE LBB9_5 XORL R14, R14 - JMP LBB9_78 + JMP LBB9_64 LBB9_4: XORL R14, R14 - XORL R15, R15 - JMP LBB9_78 + XORL R12, R12 + JMP LBB9_64 LBB9_5: TESTL AX, AX MOVQ BX, -48(BP) - JLE LBB9_22 - XORL R15, R15 - JMP LBB9_9 + JLE LBB9_12 + XORL R12, R12 + LONG $0x04358d4c; WORD $0x005b; BYTE $0x00 // leaq $23300(%rip), %r14 /* _POW_TAB(%rip) */ + JMP LBB9_8 -LBB9_7: - NEGL BX - MOVQ R12, DI +LBB9_10: + MOVL AX, AX + MOVL 0(R14)(AX*4), BX + CMPL 16(R15), $0 + JE LBB9_7 + +LBB9_11: + MOVQ R15, DI MOVL BX, SI - LONG $0x002f0ae8; BYTE $0x00 // callq _right_shift + LONG $0x002df1e8; BYTE $0x00 // callq _right_shift -LBB9_8: - ADDL R14, R15 - MOVL 20(R12), AX +LBB9_7: + ADDL BX, R12 + MOVL 20(R15), AX TESTL AX, AX - JLE LBB9_22 + JLE LBB9_12 -LBB9_9: - MOVL $27, R14 +LBB9_8: CMPL AX, $8 - JG LBB9_11 - MOVL AX, AX - LONG $0xbb0d8d48; WORD $0x005b; BYTE $0x00 // leaq $23483(%rip), %rcx /* _POW_TAB(%rip) */ - MOVL 0(CX)(AX*4), R14 - -LBB9_11: - TESTL R14, R14 - JE LBB9_8 - CMPL 16(R12), $0 - JE LBB9_8 - MOVL R14, BX - NEGL BX - TESTL R14, R14 - JS LBB9_16 - CMPL R14, $61 - JL LBB9_7 - -LBB9_15: - MOVQ R12, DI - MOVL $60, SI - LONG $0x002eb8e8; BYTE $0x00 // callq _right_shift - LEAL 60(BX), AX - CMPL BX, $-120 - MOVL AX, BX - JL LBB9_15 + JLE LBB9_10 + MOVL $27, BX + CMPL 16(R15), $0 + JNE LBB9_11 JMP LBB9_7 -LBB9_16: - CMPL R14, $-61 - JG LBB9_18 - -LBB9_17: - MOVQ R12, DI - MOVL $60, SI - LONG $0x002d2ae8; BYTE $0x00 // callq _left_shift - LEAL -60(BX), SI - CMPL BX, $120 - MOVL SI, BX - JG LBB9_17 - JMP LBB9_19 +LBB9_12: + LONG $0xc6358d4c; WORD $0x005a; BYTE $0x00 // leaq $23238(%rip), %r14 /* _POW_TAB(%rip) */ + JMP LBB9_14 LBB9_18: - MOVL BX, SI - -LBB9_19: - MOVQ R12, DI - LONG $0x002d14e8; BYTE $0x00 // callq _left_shift - JMP LBB9_8 + MOVL $27, BX + CMPL 16(R15), $0 + JE LBB9_13 LBB9_20: - MOVQ R12, DI - LONG $0x002d07e8; BYTE $0x00 // callq _left_shift + MOVQ R15, DI + MOVL BX, SI + LONG $0x002c15e8; BYTE $0x00 // callq _left_shift + MOVL 20(R15), AX -LBB9_21: - SUBL R14, R15 - MOVL 20(R12), AX +LBB9_13: + SUBL BX, R12 -LBB9_22: +LBB9_14: TESTL AX, AX - JS LBB9_25 - JNE LBB9_36 - MOVQ 0(R12), CX + JS LBB9_17 + JNE LBB9_21 + MOVQ 0(R15), CX CMPB 0(CX), $53 - JL LBB9_26 - JMP LBB9_36 + JL LBB9_19 + JMP LBB9_21 -LBB9_25: - MOVL $27, R14 +LBB9_17: CMPL AX, $-8 - JL LBB9_27 - -LBB9_26: - NEGL AX - WORD $0x9848 // cltq - LONG $0x150d8d48; WORD $0x005b; BYTE $0x00 // leaq $23317(%rip), %rcx /* _POW_TAB(%rip) */ - MOVL 0(CX)(AX*4), R14 - -LBB9_27: - TESTL R14, R14 - JE LBB9_21 - CMPL 16(R12), $0 - JE LBB9_21 - TESTL R14, R14 - JLE LBB9_33 - MOVL R14, SI - CMPL R14, $61 - JL LBB9_20 - MOVL R14, BX - -LBB9_32: - MOVQ R12, DI - MOVL $60, SI - LONG $0x002ca2e8; BYTE $0x00 // callq _left_shift - LEAL -60(BX), SI - CMPL BX, $120 - MOVL SI, BX - JG LBB9_32 - JMP LBB9_20 - -LBB9_33: - MOVL R14, BX - CMPL R14, $-61 - JG LBB9_35 - -LBB9_34: - MOVQ R12, DI - MOVL $60, SI - LONG $0x002defe8; BYTE $0x00 // callq _right_shift - LEAL 60(BX), AX - CMPL BX, $-120 - MOVL AX, BX - JL LBB9_34 + JL LBB9_18 -LBB9_35: - NEGL BX - MOVQ R12, DI - MOVL BX, SI - LONG $0x002dd9e8; BYTE $0x00 // callq _right_shift - JMP LBB9_21 +LBB9_19: + MOVL AX, CX + NEGL CX + MOVL 0(R14)(CX*4), BX + CMPL 16(R15), $0 + JNE LBB9_20 + JMP LBB9_13 -LBB9_36: - CMPL R15, $-1022 - JG LBB9_42 - CMPL 16(R12), $0 +LBB9_21: + CMPL R12, $-1022 + JG LBB9_27 + CMPL 16(R15), $0 MOVQ -48(BP), BX - JE LBB9_44 - CMPL R15, $-1082 - JG LBB9_45 - ADDL $961, R15 + JE LBB9_29 + CMPL R12, $-1082 + JG LBB9_30 + ADDL $961, R12 -LBB9_40: - MOVQ R12, DI +LBB9_25: + MOVQ R15, DI MOVL $60, SI - LONG $0x002da2e8; BYTE $0x00 // callq _right_shift - ADDL $60, R15 - CMPL R15, $-120 - JL LBB9_40 - ADDL $60, R15 - JMP LBB9_46 - -LBB9_42: - CMPL R15, $1024 + LONG $0x002d56e8; BYTE $0x00 // callq _right_shift + ADDL $60, R12 + CMPL R12, $-120 + JL LBB9_25 + ADDL $60, R12 + JMP LBB9_31 + +LBB9_27: + CMPL R12, $1024 MOVQ -48(BP), BX - JG LBB9_75 - DECL R15 - MOVL R15, R14 - JMP LBB9_47 + JG LBB9_61 + ADDL $-1, R12 + MOVL R12, R14 + JMP LBB9_32 -LBB9_44: +LBB9_29: MOVL $-1022, R14 - JMP LBB9_49 + JMP LBB9_34 -LBB9_45: - ADDL $1021, R15 +LBB9_30: + ADDL $1021, R12 -LBB9_46: - NEGL R15 - MOVQ R12, DI - MOVL R15, SI - LONG $0x002d5ce8; BYTE $0x00 // callq _right_shift +LBB9_31: + NEGL R12 + MOVQ R15, DI + MOVL R12, SI + LONG $0x002d0fe8; BYTE $0x00 // callq _right_shift MOVL $-1022, R14 -LBB9_47: - CMPL 16(R12), $0 - JE LBB9_49 - MOVQ R12, DI +LBB9_32: + CMPL 16(R15), $0 + JE LBB9_34 + MOVQ R15, DI MOVL $53, SI - LONG $0x002bd2e8; BYTE $0x00 // callq _left_shift + LONG $0x002b56e8; BYTE $0x00 // callq _left_shift -LBB9_49: - MOVLQSX 20(R12), R8 - MOVQ $-1, R15 - CMPQ R8, $20 - JG LBB9_77 - MOVL R8, CX - TESTL CX, CX - JLE LBB9_54 - MOVLQSX 16(R12), SI - XORL DX, DX - XORL R15, R15 +LBB9_34: + MOVL 20(R15), AX + MOVQ $-1, R12 + CMPL AX, $20 + JG LBB9_63 + TESTL AX, AX + JLE LBB9_40 + MOVL 16(R15), DX + XORL SI, SI + TESTL DX, DX + WORD $0x4e0f; BYTE $0xd6 // cmovlel %esi, %edx + LEAQ -1(AX), R9 + CMPQ R9, DX + LONG $0xca430f4c // cmovaeq %rdx, %r9 + LEAL 1(R9), R8 + XORL R12, R12 -LBB9_52: +LBB9_37: CMPQ DX, SI - JGE LBB9_55 - LEAQ 0(R15)(R15*4), AX - MOVQ 0(R12), DI - MOVBQSX 0(DI)(DX*1), DI - LEAQ -48(DI)(AX*2), R15 - INCQ DX - CMPQ CX, DX - JNE LBB9_52 - JMP LBB9_55 - -LBB9_54: - XORL DX, DX - XORL R15, R15 + JE LBB9_41 + LEAQ 0(R12)(R12*4), DI + MOVQ 0(R15), CX + MOVBQSX 0(CX)(SI*1), CX + LEAQ 0(CX)(DI*2), R12 + ADDQ $-48, R12 + ADDQ $1, SI + CMPQ AX, SI + JNE LBB9_37 + MOVL R8, R9 + JMP LBB9_41 -LBB9_55: - CMPL R8, DX - JLE LBB9_63 - MOVL CX, DI - SUBL DX, DI - MOVL DX, SI - NOTL SI - ADDL R8, SI - ANDL $7, DI - JE LBB9_60 - NEGL DI - XORL AX, AX +LBB9_40: + XORL R9, R9 + XORL R12, R12 -LBB9_58: - ADDQ R15, R15 - LEAQ 0(R15)(R15*4), R15 - DECL AX - CMPL DI, AX - JNE LBB9_58 - SUBL AX, DX - -LBB9_60: - CMPL SI, $7 - JB LBB9_63 - MOVL CX, AX - SUBL DX, AX +LBB9_41: + CMPL AX, R9 + JLE LBB9_49 + MOVL AX, SI + SUBL R9, SI + MOVL R9, DX + NOTL DX + ADDL AX, DX + ANDL $7, SI + JE LBB9_46 + NEGL SI + XORL DI, DI -LBB9_62: - IMUL3Q $100000000, R15, R15 - ADDL $-8, AX - JNE LBB9_62 +LBB9_44: + ADDQ R12, R12 + LEAQ 0(R12)(R12*4), R12 + ADDL $-1, DI + CMPL SI, DI + JNE LBB9_44 + SUBL DI, R9 -LBB9_63: - TESTL CX, CX - JS LBB9_71 - MOVL 16(R12), DX - CMPL DX, R8 - JLE LBB9_71 - MOVQ 0(R12), SI - MOVB 0(SI)(CX*1), AX - LEAL 1(CX), DI - CMPL DI, DX - JNE LBB9_72 - CMPB AX, $53 - JNE LBB9_72 - CMPL 28(R12), $0 - SETNE DX - JNE LBB9_73 - TESTL CX, CX - JLE LBB9_73 - MOVB -1(R8)(SI*1), DX - ANDB $1, DX - JMP LBB9_73 +LBB9_46: + CMPL DX, $7 + JB LBB9_49 + MOVL AX, DX + SUBL R9, DX -LBB9_71: - XORL DX, DX +LBB9_48: + IMUL3Q $100000000, R12, R12 + ADDL $-8, DX + JNE LBB9_48 + +LBB9_49: + TESTL AX, AX + JS LBB9_57 + MOVL 16(R15), CX + CMPL CX, AX + JLE LBB9_57 + MOVQ 0(R15), SI + MOVB 0(SI)(AX*1), DX + CMPB DX, $53 + JNE LBB9_58 + LEAL 1(AX), DI + CMPL DI, CX + JNE LBB9_58 + CMPL 28(R15), $0 + SETNE CX + JNE LBB9_59 + TESTL AX, AX + JLE LBB9_59 + ADDL $-1, AX + MOVB 0(SI)(AX*1), CX + ANDB $1, CX + JMP LBB9_59 + +LBB9_57: + XORL CX, CX -LBB9_73: - MOVBLZX DX, AX - ADDQ AX, R15 +LBB9_59: + MOVBLZX CX, AX + ADDQ AX, R12 MOVQ $9007199254740992, AX - CMPQ R15, AX - JNE LBB9_77 + CMPQ R12, AX + JNE LBB9_63 CMPL R14, $1022 - JLE LBB9_76 + JLE LBB9_62 -LBB9_75: - XORL R15, R15 +LBB9_61: + XORL R12, R12 MOVQ $9218868437227405312, R14 - JMP LBB9_78 + JMP LBB9_64 -LBB9_76: - INCL R14 - MOVQ R13, R15 +LBB9_62: + ADDL $1, R14 + MOVQ R13, R12 -LBB9_77: - MOVQ R15, AX +LBB9_63: + MOVQ R12, AX ANDQ R13, AX ADDL $1023, R14 ANDL $2047, R14 @@ -3335,13 +3535,13 @@ LBB9_77: TESTQ AX, AX LONG $0xf0440f4c // cmoveq %rax, %r14 -LBB9_78: - DECQ R13 - ANDQ R15, R13 +LBB9_64: + ADDQ $-1, R13 + ANDQ R12, R13 ORQ R14, R13 MOVQ $-9223372036854775808, AX ORQ R13, AX - CMPL 24(R12), $0 + CMPL 24(R15), $0 LONG $0xc5440f49 // cmoveq %r13, %rax MOVQ AX, 0(BX) XORL AX, AX @@ -3354,10 +3554,10 @@ LBB9_78: BYTE $0x5d // popq %rbp RET -LBB9_72: - CMPB AX, $52 - SETGT DX - JMP LBB9_73 +LBB9_58: + CMPB DX, $53 + SETGE CX + JMP LBB9_59 _atof_native: BYTE $0x55 // pushq %rbp @@ -3379,162 +3579,164 @@ _atof_native: LBB10_4: MOVQ -32(BP), CX MOVB $0, 0(CX)(AX*1) - INCQ AX + ADDQ $1, AX CMPQ -24(BP), AX JA LBB10_4 LBB10_5: LONG $0xc057f8c5 // vxorps %xmm0, %xmm0, %xmm0 LONG $0x4511f8c5; BYTE $0xf0 // vmovups %xmm0, $-16(%rbp) + XORL DX, DX CMPB 0(DI), $45 - JNE LBB10_6 + JNE LBB10_7 MOVL $1, -8(BP) MOVL $1, AX CMPQ AX, SI - JL LBB10_9 + JL LBB10_8 + JMP LBB10_39 -LBB10_41: - MOVL $0, -12(BP) - JMP LBB10_40 - -LBB10_6: +LBB10_7: XORL AX, AX CMPQ AX, SI - JGE LBB10_41 + JGE LBB10_39 -LBB10_9: +LBB10_8: MOVB $1, R11 XORL R9, R9 XORL R10, R10 XORL R8, R8 - JMP LBB10_10 + JMP LBB10_12 -LBB10_13: - DECL -12(BP) - XORL R10, R10 +LBB10_20: + MOVL $1, -4(BP) -LBB10_22: - INCQ AX +LBB10_11: + ADDQ $1, AX CMPQ AX, SI SETLT R11 CMPQ SI, AX - JE LBB10_23 + JE LBB10_22 -LBB10_10: +LBB10_12: MOVBLZX 0(DI)(AX*1), CX LEAL -48(CX), DX CMPB DX, $9 - JA LBB10_19 - TESTL R10, R10 - JNE LBB10_14 + JA LBB10_17 CMPB CX, $48 - JE LBB10_13 - -LBB10_14: - MOVLQSX R9, R10 - CMPQ -24(BP), R10 - JBE LBB10_16 - MOVQ -32(BP), DX - MOVB CX, 0(DX)(R10*1) - MOVL -16(BP), R9 - INCL R9 - MOVL R9, -16(BP) - MOVL R9, R10 - JMP LBB10_22 + JNE LBB10_19 + TESTL R10, R10 + JE LBB10_21 + MOVLQSX R9, R11 + CMPQ -24(BP), R11 + JA LBB10_9 + JMP LBB10_10 -LBB10_19: +LBB10_17: CMPB CX, $46 - JNE LBB10_20 + JNE LBB10_23 MOVL R10, -12(BP) MOVL $1, R8 - JMP LBB10_22 + JMP LBB10_11 -LBB10_16: - CMPB CX, $48 - JNE LBB10_18 - MOVL R9, R10 - JMP LBB10_22 +LBB10_19: + MOVLQSX R10, R11 + CMPQ -24(BP), R11 + JBE LBB10_20 -LBB10_18: - MOVL $1, -4(BP) +LBB10_9: + MOVQ -32(BP), DX + MOVB CX, 0(DX)(R11*1) + MOVL -16(BP), R9 + ADDL $1, R9 + MOVL R9, -16(BP) + +LBB10_10: MOVL R9, R10 - JMP LBB10_22 + JMP LBB10_11 + +LBB10_21: + ADDL $-1, -12(BP) + XORL R10, R10 + JMP LBB10_11 + +LBB10_22: + MOVQ SI, AX LBB10_23: - MOVL SI, CX - MOVQ SI, AX TESTL R8, R8 + JE LBB10_25 + TESTB $1, R11 JNE LBB10_26 + JMP LBB10_40 LBB10_25: - MOVL R9, -12(BP) - -LBB10_26: + MOVL R9, -12(BP) TESTB $1, R11 JE LBB10_40 - MOVB 0(DI)(CX*1), CX - ORB $32, CX - CMPB CX, $101 - JNE LBB10_40 - MOVL AX, DX - MOVB 1(DI)(DX*1), CX - CMPB CX, $45 - JE LBB10_32 - MOVL $1, R8 - CMPB CX, $43 - JNE LBB10_30 - ADDL $2, AX - JMP LBB10_33 -LBB10_20: - MOVQ AX, CX - TESTL R8, R8 - JNE LBB10_26 - JMP LBB10_25 +LBB10_26: + MOVL AX, DX + MOVB 0(DI)(DX*1), CX + ORB $32, CX + CMPB CX, $101 + JNE LBB10_40 + MOVB 1(DI)(DX*1), CX + CMPB CX, $45 + JE LBB10_30 + MOVL $1, R8 + CMPB CX, $43 + JNE LBB10_32 + ADDL $2, AX + JMP LBB10_31 -LBB10_32: +LBB10_30: ADDL $2, AX MOVL $-1, R8 -LBB10_33: +LBB10_31: MOVL AX, DX - MOVLQSX DX, DX - XORL R9, R9 - CMPQ DX, SI - JL LBB10_35 - JMP LBB10_39 + MOVLQSX DX, AX + XORL DX, DX + CMPQ AX, SI + JL LBB10_33 + JMP LBB10_38 -LBB10_30: - INCQ DX - MOVLQSX DX, DX - XORL R9, R9 - CMPQ DX, SI - JGE LBB10_39 +LBB10_32: + ADDQ $1, DX + MOVLQSX DX, AX + XORL DX, DX + CMPQ AX, SI + JGE LBB10_38 -LBB10_35: - XORL R9, R9 +LBB10_33: + XORL DX, DX -LBB10_36: - CMPL R9, $9999 - JG LBB10_39 - MOVBLZX 0(DI)(DX*1), CX - LEAL -48(CX), AX - CMPB AX, $9 - JA LBB10_39 - LEAL 0(R9)(R9*4), AX - LEAL -48(CX)(AX*2), R9 - INCQ DX - CMPQ SI, DX - JNE LBB10_36 +LBB10_34: + MOVBLSX 0(DI)(AX*1), CX + CMPL CX, $48 + JL LBB10_38 + CMPB CX, $57 + JG LBB10_38 + CMPL DX, $9999 + JG LBB10_38 + LEAL 0(DX)(DX*4), DX + LEAL 0(CX)(DX*2), DX + ADDL $-48, DX + ADDQ $1, AX + CMPQ SI, AX + JNE LBB10_34 + +LBB10_38: + IMULL R8, DX + ADDL -12(BP), DX LBB10_39: - IMULL R8, R9 - ADDL R9, -12(BP) + MOVL DX, -12(BP) LBB10_40: LEAQ -32(BP), DI LEAQ -40(BP), SI - LONG $0xfffabde8; BYTE $0xff // callq _decimal_to_f64 + LONG $0xfffb65e8; BYTE $0xff // callq _decimal_to_f64 LONG $0x4510fbc5; BYTE $0xd8 // vmovsd $-40(%rbp), %xmm0 ADDQ $48, SP BYTE $0x5d // popq %rbp @@ -3557,11 +3759,11 @@ _value: MOVQ DI, -80(BP) MOVQ SI, -72(BP) LEAQ -48(BP), DX - LONG $0x00054de8; BYTE $0x00 // callq _advance_ns + LONG $0x000554e8; BYTE $0x00 // callq _advance_ns MOVBLSX AX, AX CMPL AX, $125 JA LBB11_11 - LONG $0x020d8d48; WORD $0x0003; BYTE $0x00 // leaq $770(%rip), %rcx /* LJTI11_0(%rip) */ + LONG $0x090d8d48; WORD $0x0003; BYTE $0x00 // leaq $777(%rip), %rcx /* LJTI11_0(%rip) */ MOVLQSX 0(CX)(AX*4), AX ADDQ CX, AX JMP AX @@ -3576,9 +3778,9 @@ LBB11_2: LEAQ -80(BP), DI LEAQ -48(BP), SI MOVQ -56(BP), DX - LONG $0x000b2ce8; BYTE $0x00 // callq _vnumber - MOVQ -48(BP), BX - JMP LBB11_50 + LONG $0x000ae9e8; BYTE $0x00 // callq _vnumber + MOVQ -48(BP), R12 + JMP LBB11_49 LBB11_4: XORL AX, AX @@ -3587,41 +3789,39 @@ LBB11_4: SETEQ AX ADDQ AX, R12 SUBQ AX, BX - JE LBB11_45 + JE LBB11_44 CMPQ R13, BX JAE LBB11_7 MOVB 0(R12), AX ADDB $-48, AX - CMPB AX, $10 - JAE LBB11_47 + CMPB AX, $9 + JA LBB11_46 LBB11_7: MOVQ R12, DI MOVQ BX, SI - LONG $0x00210be8; BYTE $0x00 // callq _do_skip_number + LONG $0x002109e8; BYTE $0x00 // callq _do_skip_number TESTQ AX, AX - JS LBB11_46 + JS LBB11_45 ADDQ AX, R12 - MOVQ R12, BX - SUBQ R15, BX - MOVQ BX, -48(BP) + SUBQ R15, R12 TESTQ R14, R14 - JLE LBB11_49 + JLE LBB11_48 MOVQ -56(BP), AX MOVQ $8, 0(AX) MOVQ R13, 24(AX) - JMP LBB11_50 + JMP LBB11_49 LBB11_10: MOVQ $1, 0(R14) - MOVQ -48(BP), BX - JMP LBB11_50 + MOVQ -48(BP), R12 + JMP LBB11_49 LBB11_11: MOVQ $-2, 0(R14) - MOVQ -48(BP), BX - DECQ BX - JMP LBB11_50 + MOVQ -48(BP), R12 + ADDQ $-1, R12 + JMP LBB11_49 LBB11_12: MOVQ $-1, -64(BP) @@ -3629,195 +3829,190 @@ LBB11_12: LEAQ -80(BP), DI LEAQ -64(BP), DX MOVQ R15, SI - LONG $0x0006fde8; BYTE $0x00 // callq _advance_string + LONG $0x0006d1e8; BYTE $0x00 // callq _advance_string + MOVQ AX, R12 TESTQ AX, AX - JS LBB11_31 - MOVQ AX, -48(BP) + JS LBB11_33 + MOVQ R12, -48(BP) MOVQ R15, 16(R14) - MOVQ -64(BP), CX - CMPQ CX, AX - MOVQ $-1, DX - LONG $0xd14c0f48 // cmovlq %rcx, %rdx - MOVQ DX, 24(R14) - MOVL $7, CX - MOVQ CX, 0(R14) - MOVQ AX, BX - JMP LBB11_50 + MOVQ -64(BP), AX + CMPQ AX, R12 + MOVQ $-1, CX + LONG $0xc84c0f48 // cmovlq %rax, %rcx + MOVQ CX, 24(R14) + MOVL $7, AX + MOVQ AX, 0(R14) + JMP LBB11_49 LBB11_14: - XORL AX, AX TESTL R12, R12 + MOVQ $-2, AX MOVL $11, CX - JMP LBB11_28 + JMP LBB11_32 LBB11_15: - XORL AX, AX TESTL R12, R12 + MOVQ $-2, AX MOVL $10, CX - JMP LBB11_28 + JMP LBB11_32 LBB11_16: MOVQ $5, 0(R14) - MOVQ -48(BP), BX - JMP LBB11_50 + MOVQ -48(BP), R12 + JMP LBB11_49 LBB11_17: - XORL AX, AX TESTL R12, R12 + MOVQ $-2, AX MOVL $12, CX - JMP LBB11_28 + JMP LBB11_32 LBB11_18: - MOVQ -48(BP), AX + MOVQ -48(BP), R12 LEAQ -4(BX), CX - CMPQ AX, CX - JA LBB11_22 - MOVL 0(R15)(AX*1), DX - CMPL DX, $1702063201 - JNE LBB11_32 - ADDQ $4, AX - MOVQ AX, -48(BP) - MOVL $4, CX - JMP LBB11_40 + MOVQ $-1, AX + CMPQ R12, CX + JA LBB11_25 + MOVL 0(R15)(R12*1), CX + CMPL CX, $1702063201 + JNE LBB11_34 + ADDQ $4, R12 + MOVL $4, AX + JMP LBB11_24 LBB11_21: - MOVQ -48(BP), AX - LEAQ -3(BX), CX - CMPQ AX, CX - JA LBB11_22 - MOVL -1(R15)(AX*1), DX - CMPL DX, $1819047278 - JNE LBB11_36 - ADDQ $3, AX - MOVQ AX, -48(BP) - MOVL $2, CX - JMP LBB11_40 - -LBB11_23: - MOVQ -48(BP), AX + MOVQ -48(BP), R12 LEAQ -3(BX), CX - CMPQ AX, CX - JBE LBB11_24 - -LBB11_22: - MOVQ BX, -48(BP) - MOVQ $-1, CX - MOVQ CX, 0(R14) - JMP LBB11_50 - -LBB11_26: - MOVQ $6, 0(R14) - MOVQ -48(BP), BX - JMP LBB11_50 + MOVQ $-1, AX + CMPQ R12, CX + JA LBB11_25 + MOVL -1(R15)(R12*1), CX + CMPL CX, $1819047278 + JNE LBB11_37 + ADDQ $3, R12 + MOVL $2, AX + JMP LBB11_24 LBB11_27: - XORL AX, AX - TESTL R12, R12 - MOVL $13, CX - -LBB11_28: - MOVQ $-2, DX - LONG $0xd1480f48 // cmovsq %rcx, %rdx - SETPL AX - MOVQ DX, 0(R14) - MOVQ -48(BP), BX - SUBQ AX, BX - JMP LBB11_50 + MOVQ -48(BP), R12 + LEAQ -3(BX), CX + MOVQ $-1, AX + CMPQ R12, CX + JA LBB11_25 + MOVL -1(R15)(R12*1), CX + CMPL CX, $1702195828 + JNE LBB11_41 + ADDQ $3, R12 + MOVL $3, AX LBB11_24: - MOVL -1(R15)(AX*1), DX - CMPL DX, $1702195828 - JNE LBB11_41 - ADDQ $3, AX - MOVQ AX, -48(BP) - MOVL $3, CX - JMP LBB11_40 + MOVQ R12, BX -LBB11_31: +LBB11_25: MOVQ BX, -48(BP) + MOVQ BX, R12 + +LBB11_26: MOVQ AX, 0(R14) - JMP LBB11_50 + JMP LBB11_49 + +LBB11_30: + MOVQ $6, 0(R14) + MOVQ -48(BP), R12 + JMP LBB11_49 + +LBB11_31: + TESTL R12, R12 + MOVQ $-2, AX + MOVL $13, CX LBB11_32: - MOVQ $-2, CX - CMPB DX, $97 - JNE LBB11_40 - MOVL $1702063201, DX + LONG $0xc8490f48 // cmovnsq %rax, %rcx + MOVQ CX, 0(R14) + SARL $31, R12 + NOTL R12 + MOVLQSX R12, R12 + ADDQ -48(BP), R12 + JMP LBB11_49 + +LBB11_33: + MOVQ BX, -48(BP) + MOVQ R12, 0(R14) + MOVQ BX, R12 + JMP LBB11_49 LBB11_34: - SHRL $8, DX - MOVBLSX 1(R15)(AX*1), SI - INCQ AX - MOVBLZX DX, DI - CMPL DI, SI - JE LBB11_34 - JMP LBB11_39 + MOVQ $-2, AX + CMPB CX, $97 + JNE LBB11_26 + MOVL $1702063201, CX LBB11_36: - DECQ AX - MOVQ AX, -48(BP) - MOVQ $-2, CX - CMPB DX, $110 - JNE LBB11_40 - MOVL $1819047278, DX + SHRL $8, CX + MOVBLSX 1(R15)(R12*1), DX + ADDQ $1, R12 + MOVBLZX CX, SI + CMPL SI, DX + JE LBB11_36 + JMP LBB11_40 + +LBB11_37: + ADDQ $-1, R12 + MOVQ $-2, AX + CMPB CX, $110 + JNE LBB11_26 + MOVL $1819047278, CX -LBB11_38: - SHRL $8, DX - MOVBLSX 1(R15)(AX*1), SI - INCQ AX - MOVBLZX DX, DI - CMPL DI, SI - JE LBB11_38 - JMP LBB11_39 +LBB11_39: + SHRL $8, CX + MOVBLSX 1(R15)(R12*1), DX + ADDQ $1, R12 + MOVBLZX CX, SI + CMPL SI, DX + JE LBB11_39 + JMP LBB11_40 LBB11_41: - DECQ AX - MOVQ AX, -48(BP) - MOVQ $-2, CX - CMPB DX, $116 - JNE LBB11_40 - MOVL $1702195828, DX + ADDQ $-1, R12 + MOVQ $-2, AX + CMPB CX, $116 + JNE LBB11_26 + MOVL $1702195828, CX LBB11_43: - SHRL $8, DX - MOVBLSX 1(R15)(AX*1), SI - INCQ AX - MOVBLZX DX, DI - CMPL DI, SI + SHRL $8, CX + MOVBLSX 1(R15)(R12*1), DX + ADDQ $1, R12 + MOVBLZX CX, SI + CMPL SI, DX JE LBB11_43 -LBB11_39: - MOVQ AX, -48(BP) - LBB11_40: - MOVQ AX, BX - MOVQ CX, 0(R14) - JMP LBB11_50 - -LBB11_45: - SUBQ R15, R12 MOVQ R12, -48(BP) + MOVQ AX, 0(R14) + JMP LBB11_49 + +LBB11_44: MOVQ $-1, R13 - JMP LBB11_48 + JMP LBB11_47 -LBB11_46: +LBB11_45: NOTQ AX ADDQ AX, R12 +LBB11_46: + MOVQ $-2, R13 + LBB11_47: SUBQ R15, R12 MOVQ R12, -48(BP) - MOVQ $-2, R13 LBB11_48: - MOVQ R12, BX - -LBB11_49: MOVQ -56(BP), AX MOVQ R13, 0(AX) -LBB11_50: - MOVQ BX, AX +LBB11_49: + MOVQ R12, AX ADDQ $40, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 @@ -3837,136 +4032,136 @@ LBB11_50: // .set L11_0_set_17, LBB11_17-LJTI11_0 // .set L11_0_set_18, LBB11_18-LJTI11_0 // .set L11_0_set_21, LBB11_21-LJTI11_0 -// .set L11_0_set_23, LBB11_23-LJTI11_0 -// .set L11_0_set_26, LBB11_26-LJTI11_0 // .set L11_0_set_27, LBB11_27-LJTI11_0 +// .set L11_0_set_30, LBB11_30-LJTI11_0 +// .set L11_0_set_31, LBB11_31-LJTI11_0 LJTI11_0: - LONG $0xfffffda2 // .long L11_0_set_10 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdc5 // .long L11_0_set_12 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffe18 // .long L11_0_set_14 - LONG $0xfffffd07 // .long L11_0_set_2 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffd07 // .long L11_0_set_2 - LONG $0xfffffd07 // .long L11_0_set_2 - LONG $0xfffffd07 // .long L11_0_set_2 - LONG $0xfffffd07 // .long L11_0_set_2 - LONG $0xfffffd07 // .long L11_0_set_2 - LONG $0xfffffd07 // .long L11_0_set_2 - LONG $0xfffffd07 // .long L11_0_set_2 - LONG $0xfffffd07 // .long L11_0_set_2 - LONG $0xfffffd07 // .long L11_0_set_2 - LONG $0xfffffd07 // .long L11_0_set_2 - LONG $0xfffffe27 // .long L11_0_set_15 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffe36 // .long L11_0_set_16 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffe46 // .long L11_0_set_17 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffe55 // .long L11_0_set_18 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffe84 // .long L11_0_set_21 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffeb4 // .long L11_0_set_23 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffed4 // .long L11_0_set_26 - LONG $0xfffffdb2 // .long L11_0_set_11 - LONG $0xfffffee4 // .long L11_0_set_27 + LONG $0xfffffd94 // .long L11_0_set_10 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffdb8 // .long L11_0_set_12 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffe0b // .long L11_0_set_14 + LONG $0xfffffd00 // .long L11_0_set_2 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffd00 // .long L11_0_set_2 + LONG $0xfffffd00 // .long L11_0_set_2 + LONG $0xfffffd00 // .long L11_0_set_2 + LONG $0xfffffd00 // .long L11_0_set_2 + LONG $0xfffffd00 // .long L11_0_set_2 + LONG $0xfffffd00 // .long L11_0_set_2 + LONG $0xfffffd00 // .long L11_0_set_2 + LONG $0xfffffd00 // .long L11_0_set_2 + LONG $0xfffffd00 // .long L11_0_set_2 + LONG $0xfffffd00 // .long L11_0_set_2 + LONG $0xfffffe1f // .long L11_0_set_15 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffe33 // .long L11_0_set_16 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffe43 // .long L11_0_set_17 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffe57 // .long L11_0_set_18 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffe86 // .long L11_0_set_21 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffeb6 // .long L11_0_set_27 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xfffffef6 // .long L11_0_set_30 + LONG $0xfffffda4 // .long L11_0_set_11 + LONG $0xffffff06 // .long L11_0_set_31 LCPI12_0: QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' @@ -3983,7 +4178,6 @@ LCPI12_3: _advance_ns: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - BYTE $0x53 // pushq %rbx MOVQ 0(DX), R8 CMPQ R8, SI JAE LBB12_6 @@ -3995,14 +4189,14 @@ _advance_ns: ADDB $-9, AX CMPB AX, $1 JBE LBB12_6 - MOVQ R8, R10 + MOVQ R8, AX JMP LBB12_5 LBB12_6: - LEAQ 1(R8), R10 - CMPQ R10, SI + LEAQ 1(R8), AX + CMPQ AX, SI JAE LBB12_10 - MOVB 0(DI)(R10*1), CX + MOVB 0(DI)(AX*1), CX CMPB CX, $13 JE LBB12_10 CMPB CX, $32 @@ -4012,10 +4206,10 @@ LBB12_6: JA LBB12_5 LBB12_10: - LEAQ 2(R8), R10 - CMPQ R10, SI + LEAQ 2(R8), AX + CMPQ AX, SI JAE LBB12_14 - MOVB 0(DI)(R10*1), CX + MOVB 0(DI)(AX*1), CX CMPB CX, $13 JE LBB12_14 CMPB CX, $32 @@ -4025,10 +4219,10 @@ LBB12_10: JA LBB12_5 LBB12_14: - LEAQ 3(R8), R10 - CMPQ R10, SI + LEAQ 3(R8), AX + CMPQ AX, SI JAE LBB12_18 - MOVB 0(DI)(R10*1), CX + MOVB 0(DI)(AX*1), CX CMPB CX, $13 JE LBB12_18 CMPB CX, $32 @@ -4038,59 +4232,55 @@ LBB12_14: JA LBB12_5 LBB12_18: - LEAQ 4(R8), R9 - CMPQ R9, SI + ADDQ $4, R8 + CMPQ R8, SI JAE LBB12_19 - LEAQ 0(DI)(R9*1), R10 - MOVQ SI, BX - SUBQ R9, BX + LEAQ 0(DI)(R8*1), R9 + MOVQ SI, R11 + SUBQ R8, R11 JE LBB12_27 - MOVL R10, AX + MOVL R9, AX ANDL $15, AX TESTQ AX, AX JE LBB12_27 - LEAQ 0(DI)(R8*1), R10 - MOVQ SI, BX - SUBQ R8, BX - LEAQ -5(BX), R8 - XORL AX, AX + MOVL $5, R10 + SUBQ SI, R10 MOVQ $4294977024, R9 LBB12_23: - MOVBLSX 4(R10)(AX*1), CX + MOVBLSX 0(DI)(R8*1), CX CMPL CX, $32 - JA LBB12_25 + JA LBB12_42 BTQ CX, R9 - JAE LBB12_25 - LEAQ 1(AX), R11 - CMPQ R8, AX + JAE LBB12_42 + LEAQ 0(R10)(R8*1), AX + LEAQ 1(R8), CX + CMPQ AX, $4 JE LBB12_26 - LEAQ 5(R10)(AX*1), CX - ANDL $15, CX - MOVQ R11, AX - TESTQ CX, CX + LEAL 0(DI)(R8*1), AX + ADDL $1, AX + ANDL $15, AX + MOVQ CX, R8 + TESTQ AX, AX JNE LBB12_23 LBB12_26: - LEAQ 4(R11)(R10*1), R10 - SUBQ R11, BX - ADDQ $-4, BX + LEAQ 0(DI)(CX*1), R9 + MOVQ SI, R11 + SUBQ CX, R11 LBB12_27: - CMPQ BX, $16 + CMPQ R11, $16 JB LBB12_33 - LEAQ -16(BX), R8 - MOVQ R8, AX - ANDQ $-16, AX - LEAQ 16(AX)(R10*1), R9 - ANDL $15, R8 - QUAD $0xfffffe92056ffac5 // vmovdqu $-366(%rip), %xmm0 /* LCPI12_0(%rip) */ - QUAD $0xfffffe9a0d6ffac5 // vmovdqu $-358(%rip), %xmm1 /* LCPI12_1(%rip) */ - QUAD $0xfffffea2156ffac5 // vmovdqu $-350(%rip), %xmm2 /* LCPI12_2(%rip) */ - QUAD $0xfffffeaa1d6ffac5 // vmovdqu $-342(%rip), %xmm3 /* LCPI12_3(%rip) */ + MOVQ DI, CX + SUBQ R9, CX + QUAD $0xfffffea7056ffac5 // vmovdqu $-345(%rip), %xmm0 /* LCPI12_0(%rip) */ + QUAD $0xfffffeaf0d6ffac5 // vmovdqu $-337(%rip), %xmm1 /* LCPI12_1(%rip) */ + QUAD $0xfffffeb7156ffac5 // vmovdqu $-329(%rip), %xmm2 /* LCPI12_2(%rip) */ + QUAD $0xfffffebf1d6ffac5 // vmovdqu $-321(%rip), %xmm3 /* LCPI12_3(%rip) */ LBB12_29: - LONG $0x6f79c1c4; BYTE $0x22 // vmovdqa (%r10), %xmm4 + LONG $0x6f79c1c4; BYTE $0x21 // vmovdqa (%r9), %xmm4 LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 @@ -4101,83 +4291,71 @@ LBB12_29: LONG $0xc4d7f9c5 // vpmovmskb %xmm4, %eax CMPW AX, $-1 JNE LBB12_30 - ADDQ $16, R10 - ADDQ $-16, BX - CMPQ BX, $15 + ADDQ $16, R9 + ADDQ $-16, R11 + ADDQ $-16, CX + CMPQ R11, $15 JA LBB12_29 - MOVQ R8, BX - MOVQ R9, R10 LBB12_33: - TESTQ BX, BX - JE LBB12_42 - LEAQ 0(R10)(BX*1), R8 - INCQ R10 - MOVQ $4294977024, R9 + TESTQ R11, R11 + JE LBB12_40 + LEAQ 0(R9)(R11*1), R8 + XORL CX, CX + MOVQ $4294977024, R10 LBB12_35: - MOVBLSX -1(R10), AX + MOVBLSX 0(R9)(CX*1), AX CMPL AX, $32 JA LBB12_37 - BTQ AX, R9 + BTQ AX, R10 JAE LBB12_37 - DECQ BX - INCQ R10 - TESTQ BX, BX + ADDQ $1, CX + CMPQ R11, CX JNE LBB12_35 - MOVQ R8, R10 + MOVQ R8, R9 -LBB12_42: - SUBQ DI, R10 - CMPQ R10, SI - JB LBB12_5 - JMP LBB12_44 +LBB12_40: + SUBQ DI, R9 + +LBB12_41: + MOVQ R9, R8 + JMP LBB12_42 LBB12_19: - MOVQ R9, 0(DX) - JMP LBB12_44 + MOVQ R8, 0(DX) + JMP LBB12_43 LBB12_30: MOVWLZX AX, AX - SUBQ DI, R10 NOTL AX - BSFL AX, AX - ADDQ AX, R10 - CMPQ R10, SI - JB LBB12_5 - JMP LBB12_44 + BSFL AX, R8 + SUBQ CX, R8 -LBB12_25: - ADDQ AX, R10 - MOVQ DI, AX - NOTQ AX - LEAQ 5(AX)(R10*1), R10 - CMPQ R10, SI - JAE LBB12_44 +LBB12_42: + MOVQ R8, AX + CMPQ R8, SI + JAE LBB12_43 LBB12_5: - LEAQ 1(R10), AX - MOVQ AX, 0(DX) - MOVB 0(DI)(R10*1), AX + LEAQ 1(AX), CX + MOVQ CX, 0(DX) + MOVB 0(DI)(AX*1), AX MOVBLSX AX, AX - BYTE $0x5b // popq %rbx - BYTE $0x5d // popq %rbp + BYTE $0x5d // popq %rbp RET -LBB12_37: - MOVQ DI, AX - NOTQ AX - ADDQ AX, R10 - CMPQ R10, SI - JB LBB12_5 - -LBB12_44: +LBB12_43: XORL AX, AX MOVBLSX AX, AX - BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET +LBB12_37: + SUBQ DI, R9 + ADDQ CX, R9 + JMP LBB12_41 + _vstring: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp @@ -4234,156 +4412,106 @@ _advance_string: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $16, SP - MOVQ 8(DI), R12 - SUBQ SI, R12 + BYTE $0x50 // pushq %rax + MOVQ 8(DI), R15 + SUBQ SI, R15 JE LBB14_17 - MOVQ 0(DI), AX - MOVQ AX, -48(BP) - ADDQ AX, SI - MOVQ DX, -56(BP) + MOVQ 0(DI), R9 MOVQ $-1, 0(DX) - CMPQ R12, $64 + CMPQ R15, $64 JB LBB14_18 - MOVL R12, R9 - ANDL $63, R9 - MOVQ $-1, R14 - XORL R15, R15 - QUAD $0xffffff8a056ffac5 // vmovdqu $-118(%rip), %xmm0 /* LCPI14_0(%rip) */ - QUAD $0xffffff920d6ffac5 // vmovdqu $-110(%rip), %xmm1 /* LCPI14_1(%rip) */ - MOVQ -48(BP), DX + MOVQ SI, DI + NOTQ DI + MOVQ $-1, -48(BP) + XORL R14, R14 + QUAD $0xffffff98056ffac5 // vmovdqu $-104(%rip), %xmm0 /* LCPI14_0(%rip) */ + QUAD $0xffffffa00d6ffac5 // vmovdqu $-96(%rip), %xmm1 /* LCPI14_1(%rip) */ LBB14_3: - LONG $0x166ffac5 // vmovdqu (%rsi), %xmm2 - LONG $0x5e6ffac5; BYTE $0x10 // vmovdqu $16(%rsi), %xmm3 - LONG $0x666ffac5; BYTE $0x20 // vmovdqu $32(%rsi), %xmm4 - LONG $0x6e6ffac5; BYTE $0x30 // vmovdqu $48(%rsi), %xmm5 - LONG $0xf074e9c5 // vpcmpeqb %xmm0, %xmm2, %xmm6 - LONG $0xded7f9c5 // vpmovmskb %xmm6, %ebx - LONG $0xf074e1c5 // vpcmpeqb %xmm0, %xmm3, %xmm6 - LONG $0xced7f9c5 // vpmovmskb %xmm6, %ecx - LONG $0xf074d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm6 - LONG $0xc6d7f9c5 // vpmovmskb %xmm6, %eax - LONG $0xf074d1c5 // vpcmpeqb %xmm0, %xmm5, %xmm6 - LONG $0xeed779c5 // vpmovmskb %xmm6, %r13d - LONG $0xd174e9c5 // vpcmpeqb %xmm1, %xmm2, %xmm2 - LONG $0xfad7f9c5 // vpmovmskb %xmm2, %edi - LONG $0xd174e1c5 // vpcmpeqb %xmm1, %xmm3, %xmm2 - LONG $0xd2d779c5 // vpmovmskb %xmm2, %r10d - LONG $0xd174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm2 - LONG $0xdad779c5 // vpmovmskb %xmm2, %r11d - LONG $0xd174d1c5 // vpcmpeqb %xmm1, %xmm5, %xmm2 - LONG $0xc2d779c5 // vpmovmskb %xmm2, %r8d - SHLQ $48, R13 - SHLQ $32, AX - SHLQ $16, CX - ORQ CX, BX - ORQ AX, BX - SHLQ $48, R8 - SHLQ $32, R11 - SHLQ $16, R10 - ORQ R10, DI - ORQ R11, DI - ORQ R8, DI - JE LBB14_5 - CMPQ R14, $-1 - JE LBB14_8 - -LBB14_5: - ORQ R13, BX - MOVQ DI, AX - ORQ R15, AX - JNE LBB14_9 + LONG $0x6f7ac1c4; WORD $0x3114 // vmovdqu (%r9,%rsi), %xmm2 + LONG $0x6f7ac1c4; WORD $0x315c; BYTE $0x10 // vmovdqu $16(%r9,%rsi), %xmm3 + LONG $0x6f7ac1c4; WORD $0x3164; BYTE $0x20 // vmovdqu $32(%r9,%rsi), %xmm4 + LONG $0x6f7ac1c4; WORD $0x316c; BYTE $0x30 // vmovdqu $48(%r9,%rsi), %xmm5 + LONG $0xf074e9c5 // vpcmpeqb %xmm0, %xmm2, %xmm6 + LONG $0xe6d779c5 // vpmovmskb %xmm6, %r12d + LONG $0xf074e1c5 // vpcmpeqb %xmm0, %xmm3, %xmm6 + LONG $0xded7f9c5 // vpmovmskb %xmm6, %ebx + LONG $0xf074d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm6 + LONG $0xc6d7f9c5 // vpmovmskb %xmm6, %eax + LONG $0xf074d1c5 // vpcmpeqb %xmm0, %xmm5, %xmm6 + LONG $0xc6d779c5 // vpmovmskb %xmm6, %r8d + LONG $0xd174e9c5 // vpcmpeqb %xmm1, %xmm2, %xmm2 + LONG $0xead779c5 // vpmovmskb %xmm2, %r13d + LONG $0xd174e1c5 // vpcmpeqb %xmm1, %xmm3, %xmm2 + LONG $0xcad7f9c5 // vpmovmskb %xmm2, %ecx + LONG $0xd174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm2 + LONG $0xd2d779c5 // vpmovmskb %xmm2, %r10d + LONG $0xd174d1c5 // vpcmpeqb %xmm1, %xmm5, %xmm2 + LONG $0xdad779c5 // vpmovmskb %xmm2, %r11d + SHLQ $48, R8 + SHLQ $32, AX + ORQ R8, AX + SHLQ $16, BX + ORQ AX, BX + ORQ BX, R12 + SHLQ $48, R11 + SHLQ $32, R10 + ORQ R11, R10 + SHLQ $16, CX + ORQ R10, CX + ORQ CX, R13 + JNE LBB14_7 + TESTQ R14, R14 + JNE LBB14_9 + XORL R14, R14 + TESTQ R12, R12 + JNE LBB14_10 LBB14_6: - TESTQ BX, BX - JNE LBB14_15 - -LBB14_7: + ADDQ $-64, R15 + ADDQ $-64, DI ADDQ $64, SI - ADDQ $-64, R12 - CMPQ R12, $63 + CMPQ R15, $63 JA LBB14_3 - JMP LBB14_10 + JMP LBB14_12 -LBB14_8: - MOVQ SI, AX - SUBQ DX, AX - BSFQ DI, R14 - ADDQ AX, R14 - MOVQ -56(BP), AX - MOVQ R14, 0(AX) - ORQ R13, BX - MOVQ DI, AX - ORQ R15, AX - JE LBB14_6 +LBB14_7: + CMPQ -48(BP), $-1 + JNE LBB14_9 + BSFQ R13, AX + ADDQ SI, AX + MOVQ AX, -48(BP) + MOVQ AX, 0(DX) LBB14_9: - MOVQ R15, AX + MOVQ R14, AX NOTQ AX - ANDQ DI, AX + ANDQ R13, AX LEAQ 0(AX)(AX*1), R8 - ORQ R15, R8 + ORQ R14, R8 MOVQ R8, CX NOTQ CX - ANDQ DI, CX - MOVQ $-6148914691236517206, DI - ANDQ DI, CX - XORL R15, R15 + ANDQ R13, CX + MOVQ $-6148914691236517206, BX + ANDQ BX, CX + XORL R14, R14 ADDQ AX, CX - SETCS R15 + SETCS R14 ADDQ CX, CX MOVQ $6148914691236517205, AX XORQ AX, CX ANDQ R8, CX NOTQ CX - ANDQ CX, BX - TESTQ BX, BX - JE LBB14_7 - JMP LBB14_15 + ANDQ CX, R12 + TESTQ R12, R12 + JE LBB14_6 LBB14_10: - MOVQ R9, R12 - CMPQ R12, $32 - JB LBB14_22 + BSFQ R12, AX + SUBQ DI, AX LBB14_11: - LONG $0x066ffac5 // vmovdqu (%rsi), %xmm0 - LONG $0x4e6ffac5; BYTE $0x10 // vmovdqu $16(%rsi), %xmm1 - QUAD $0xfffffe43156ffac5 // vmovdqu $-445(%rip), %xmm2 /* LCPI14_0(%rip) */ - QUAD $0xfffffe4b1d6ffac5 // vmovdqu $-437(%rip), %xmm3 /* LCPI14_1(%rip) */ - LONG $0xe274f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm4 - LONG $0xfcd7f9c5 // vpmovmskb %xmm4, %edi - LONG $0xd274f1c5 // vpcmpeqb %xmm2, %xmm1, %xmm2 - LONG $0xdad7f9c5 // vpmovmskb %xmm2, %ebx - LONG $0xc374f9c5 // vpcmpeqb %xmm3, %xmm0, %xmm0 - LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax - LONG $0xc374f1c5 // vpcmpeqb %xmm3, %xmm1, %xmm0 - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - SHLQ $16, BX - SHLQ $16, CX - ORQ CX, AX - JE LBB14_13 - CMPQ R14, $-1 - JE LBB14_19 - -LBB14_13: - ORQ DI, BX - MOVQ AX, CX - ORQ R15, CX - JNE LBB14_20 - -LBB14_14: - TESTQ BX, BX - JE LBB14_21 - -LBB14_15: - BSFQ BX, AX - SUBQ DX, SI - LEAQ 1(SI)(AX*1), AX - -LBB14_16: - ADDQ $16, SP + ADDQ $8, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -4392,123 +4520,169 @@ LBB14_16: BYTE $0x5d // popq %rbp RET +LBB14_12: + ADDQ R9, SI + CMPQ R15, $32 + JB LBB14_23 + +LBB14_13: + LONG $0x066ffac5 // vmovdqu (%rsi), %xmm0 + LONG $0x4e6ffac5; BYTE $0x10 // vmovdqu $16(%rsi), %xmm1 + QUAD $0xfffffe4d156ffac5 // vmovdqu $-435(%rip), %xmm2 /* LCPI14_0(%rip) */ + QUAD $0xfffffe551d6ffac5 // vmovdqu $-427(%rip), %xmm3 /* LCPI14_1(%rip) */ + LONG $0xe274f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm4 + LONG $0xfcd7f9c5 // vpmovmskb %xmm4, %edi + LONG $0xd274f1c5 // vpcmpeqb %xmm2, %xmm1, %xmm2 + LONG $0xcad7f9c5 // vpmovmskb %xmm2, %ecx + LONG $0xc374f9c5 // vpcmpeqb %xmm3, %xmm0, %xmm0 + LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax + LONG $0xc374f1c5 // vpcmpeqb %xmm3, %xmm1, %xmm0 + LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx + SHLQ $16, CX + ORQ CX, DI + SHLQ $16, BX + ORQ BX, AX + JNE LBB14_19 + TESTQ R14, R14 + JNE LBB14_21 + XORL R14, R14 + TESTQ DI, DI + JE LBB14_22 + +LBB14_16: + BSFQ DI, AX + SUBQ R9, SI + ADDQ SI, AX + ADDQ $1, AX + JMP LBB14_11 + LBB14_18: - MOVQ $-1, R14 - XORL R15, R15 - MOVQ -48(BP), DX - CMPQ R12, $32 - JAE LBB14_11 - JMP LBB14_22 + ADDQ R9, SI + MOVQ $-1, -48(BP) + XORL R14, R14 + CMPQ R15, $32 + JAE LBB14_13 + JMP LBB14_23 LBB14_19: + CMPQ -48(BP), $-1 + JNE LBB14_21 MOVQ SI, CX - SUBQ DX, CX - BSFQ AX, R14 - ADDQ CX, R14 - MOVQ -56(BP), CX - MOVQ R14, 0(CX) - ORQ DI, BX - MOVQ AX, CX - ORQ R15, CX - JE LBB14_14 + SUBQ R9, CX + BSFQ AX, BX + ADDQ CX, BX + MOVQ BX, -48(BP) + MOVQ BX, 0(DX) -LBB14_20: - MOVL R15, CX +LBB14_21: + MOVL R14, CX NOTL CX ANDL AX, CX - LEAL 0(CX)(CX*1), R8 - ORL R15, R8 - MOVL R8, DI - NOTL DI - ANDL AX, DI - ANDL $-1431655766, DI - XORL R15, R15 - ADDL CX, DI - SETCS R15 - ADDL DI, DI - XORL $1431655765, DI - ANDL R8, DI - NOTL DI - ANDL DI, BX - TESTQ BX, BX - JNE LBB14_15 + LEAL 0(R14)(CX*2), R8 + LEAL 0(CX)(CX*1), BX + NOTL BX + ANDL AX, BX + ANDL $-1431655766, BX + XORL R14, R14 + ADDL CX, BX + SETCS R14 + ADDL BX, BX + XORL $1431655765, BX + ANDL R8, BX + NOTL BX + ANDL BX, DI + TESTQ DI, DI + JNE LBB14_16 -LBB14_21: +LBB14_22: ADDQ $32, SI - ADDQ $-32, R12 + ADDQ $-32, R15 -LBB14_22: +LBB14_23: + TESTQ R14, R14 + JNE LBB14_37 TESTQ R15, R15 - JNE LBB14_33 - MOVQ $-1, AX - TESTQ R12, R12 - JE LBB14_16 - -LBB14_24: - MOVQ DX, R9 - NOTQ R9 - MOVQ -56(BP), CX + JE LBB14_36 LBB14_25: - LEAQ 1(SI), DI - MOVBLZX 0(SI), BX - CMPB BX, $34 - JE LBB14_32 - LEAQ -1(R12), R10 - CMPB BX, $92 - JE LBB14_28 - MOVQ R10, R12 - MOVQ DI, SI - TESTQ R10, R10 - JNE LBB14_25 - JMP LBB14_16 + MOVQ R9, DI + NOTQ DI + ADDQ $1, DI -LBB14_28: - TESTQ R10, R10 - JE LBB14_16 - CMPQ R14, $-1 - JNE LBB14_31 - ADDQ R9, DI - MOVQ DI, 0(CX) - MOVQ DI, R14 - -LBB14_31: - ADDQ $2, SI - ADDQ $-2, R12 - MOVQ R12, R10 - MOVQ -48(BP), DX - TESTQ R10, R10 - JNE LBB14_25 - JMP LBB14_16 +LBB14_26: + XORL AX, AX -LBB14_32: - SUBQ DX, DI - MOVQ DI, AX - JMP LBB14_16 +LBB14_27: + MOVQ AX, BX + MOVBLZX 0(SI)(AX*1), CX + CMPB CX, $34 + JE LBB14_35 + CMPB CX, $92 + JE LBB14_30 + LEAQ 1(BX), AX + CMPQ R15, AX + JNE LBB14_27 + JMP LBB14_34 + +LBB14_30: + LEAQ -1(R15), CX + MOVQ $-1, AX + CMPQ CX, BX + JE LBB14_11 + CMPQ -48(BP), $-1 + JNE LBB14_33 + LEAQ 0(DI)(SI*1), CX + ADDQ BX, CX + MOVQ CX, -48(BP) + MOVQ CX, 0(DX) LBB14_33: - TESTQ R12, R12 - JE LBB14_17 - CMPQ R14, $-1 - JNE LBB14_36 - MOVQ -48(BP), R14 - NOTQ R14 - ADDQ SI, R14 - MOVQ -56(BP), AX - MOVQ R14, 0(AX) + ADDQ BX, SI + ADDQ $2, SI + MOVQ R15, CX + SUBQ BX, CX + ADDQ $-2, CX + ADDQ $-2, R15 + CMPQ R15, BX + MOVQ CX, R15 + JNE LBB14_26 + JMP LBB14_11 + +LBB14_34: + MOVQ $-1, AX + CMPB CX, $34 + JNE LBB14_11 + +LBB14_35: + ADDQ BX, SI + ADDQ $1, SI LBB14_36: - INCQ SI - DECQ R12 - MOVQ -48(BP), DX - MOVQ $-1, AX - TESTQ R12, R12 - JNE LBB14_24 - JMP LBB14_16 + SUBQ R9, SI + MOVQ SI, AX + JMP LBB14_11 + +LBB14_37: + TESTQ R15, R15 + JE LBB14_17 + CMPQ -48(BP), $-1 + JNE LBB14_40 + MOVQ R9, AX + NOTQ AX + ADDQ SI, AX + MOVQ AX, -48(BP) + MOVQ AX, 0(DX) + +LBB14_40: + ADDQ $1, SI + ADDQ $-1, R15 + TESTQ R15, R15 + JNE LBB14_25 + JMP LBB14_36 LBB14_17: MOVQ $-1, AX - JMP LBB14_16 + JMP LBB14_11 LCPI15_0: LONG $0x43300000 // .long 1127219200 @@ -4517,14 +4691,14 @@ LCPI15_0: LONG $0x00000000 // .long 0 LCPI15_1: - QUAD $0x4330000000000000 // .quad 4841369599423283200 - QUAD $0x4530000000000000 // .quad 4985484787499139072 + QUAD $0x4330000000000000 // .quad 0x4330000000000000 + QUAD $0x4530000000000000 // .quad 0x4530000000000000 LCPI15_2: - QUAD $0x430c6bf526340000 // .quad 4831355200913801216 + QUAD $0x430c6bf526340000 // .quad 0x430c6bf526340000 LCPI15_3: - QUAD $0xc30c6bf526340000 // .quad -4392016835940974592 + QUAD $0xc30c6bf526340000 // .quad 0xc30c6bf526340000 _vnumber: BYTE $0x55 // pushq %rbp @@ -4537,7 +4711,6 @@ _vnumber: SUBQ $56, SP MOVQ DX, BX MOVQ SI, R14 - MOVQ $0, -48(BP) MOVQ 0(SI), AX MOVQ 0(DI), R15 MOVQ 8(DI), R13 @@ -4551,14 +4724,14 @@ _vnumber: CMPQ AX, R13 JAE LBB15_52 MOVB 0(R15)(AX*1), DI - MOVL $1, R9 + MOVL $1, DX CMPB DI, $45 JNE LBB15_4 - INCQ AX + ADDQ $1, AX CMPQ AX, R13 JAE LBB15_52 MOVB 0(R15)(AX*1), DI - MOVL $-1, R9 + MOVL $-1, DX LBB15_4: LEAL -48(DI), CX @@ -4573,130 +4746,122 @@ LBB15_5: LBB15_6: CMPB DI, $48 JNE LBB15_10 - LEAQ 1(AX), SI + LEAQ 1(AX), R8 CMPQ AX, R13 - JAE LBB15_19 - MOVB 0(R15)(SI*1), DX - ADDB $-46, DX - CMPB DX, $55 - JA LBB15_19 - MOVBLZX DX, R8 - MOVQ $36028797027352577, DX - BTQ R8, DX - JAE LBB15_19 + JAE LBB15_22 + MOVB 0(R15)(R8*1), CX + ADDB $-46, CX + CMPB CX, $55 + JA LBB15_22 + MOVBLZX CX, CX + MOVQ $36028797027352577, SI + BTQ CX, SI + JAE LBB15_22 LBB15_10: + MOVL DX, -44(BP) + MOVB $1, CX + MOVL CX, -56(BP) CMPQ AX, R13 - JAE LBB15_18 - CMPB CX, $9 - JA LBB15_20 - LEAQ -1(R13), R8 + JAE LBB15_21 + MOVL $4294967248, R9 + ADDQ $1, AX XORL CX, CX - XORL SI, SI + XORL R8, R8 XORL R12, R12 -LBB15_13: - CMPL SI, $18 - JG LBB15_15 - MOVBQSX DI, DI +LBB15_12: + CMPL R8, $18 + JG LBB15_14 LEAQ 0(R12)(R12*4), DX - LEAQ -48(DI)(DX*2), R12 - INCL SI - JMP LBB15_16 + MOVBLZX DI, DI + ADDL R9, DI + LEAQ 0(DI)(DX*2), R12 + ADDL $1, R8 + JMP LBB15_15 -LBB15_15: - INCL CX +LBB15_14: + ADDL $1, CX -LBB15_16: - CMPQ R8, AX - JE LBB15_24 - MOVBLZX 1(R15)(AX*1), DI - INCQ AX +LBB15_15: + CMPQ R13, AX + JE LBB15_23 + MOVBLZX 0(R15)(AX*1), DI LEAL -48(DI), DX + ADDQ $1, AX CMPB DX, $10 - JB LBB15_13 - JMP LBB15_21 + JB LBB15_12 + CMPB DI, $46 + JNE LBB15_24 + MOVQ $8, 0(BX) + CMPQ AX, R13 + JAE LBB15_52 + MOVB 0(R15)(AX*1), DX + ADDB $-48, DX + CMPB DX, $10 + JAE LBB15_5 + MOVL $0, -56(BP) + JMP LBB15_25 -LBB15_18: +LBB15_21: XORL CX, CX - XORL SI, SI + XORL R8, R8 XORL R12, R12 JMP LBB15_25 -LBB15_19: - MOVQ SI, 0(R14) +LBB15_22: + MOVQ R8, 0(R14) JMP LBB15_53 -LBB15_20: - XORL R12, R12 - XORL SI, SI - XORL CX, CX - -LBB15_21: - XORL DX, DX - TESTL CX, CX - SETGT DX - MOVL DX, -52(BP) - MOVL $9, R8 - CMPB DI, $46 - JNE LBB15_26 - INCQ AX - MOVQ $8, 0(BX) - CMPQ AX, R13 - JAE LBB15_52 - MOVB 0(R15)(AX*1), DX - ADDB $-48, DX - MOVL $8, R8 - CMPB DX, $10 - JAE LBB15_5 - JMP LBB15_26 +LBB15_23: + MOVQ R13, AX + JMP LBB15_25 LBB15_24: - MOVQ R13, AX + ADDQ $-1, AX LBB15_25: XORL DX, DX TESTL CX, CX SETGT DX - MOVL DX, -52(BP) - MOVL $9, R8 - -LBB15_26: - TESTL CX, CX - JNE LBB15_35 + MOVL DX, -68(BP) TESTQ R12, R12 - JNE LBB15_35 + JNE LBB15_34 + TESTL CX, CX + JNE LBB15_34 CMPQ AX, R13 - JAE LBB15_33 - MOVL AX, DI - SUBL R13, DI - XORL SI, SI + JAE LBB15_32 + MOVL AX, SI + SUBL R13, SI + XORL R8, R8 XORL CX, CX -LBB15_30: - CMPB 0(R15)(AX*1), $48 - JNE LBB15_34 - INCQ AX - DECL CX - CMPQ R13, AX - JNE LBB15_30 - XORL R12, R12 - CMPL R8, $9 - JE LBB15_55 - JMP LBB15_59 +LBB15_29: + CMPB 0(R15)(AX*1), $48 + JNE LBB15_33 + ADDQ $1, AX + ADDL $-1, CX + CMPQ R13, AX + JNE LBB15_29 + XORL R12, R12 + MOVL -56(BP), AX + TESTB AX, AX + JNE LBB15_55 + JMP LBB15_60 -LBB15_33: +LBB15_32: XORL CX, CX - XORL SI, SI + XORL R8, R8 -LBB15_34: +LBB15_33: XORL R12, R12 -LBB15_35: +LBB15_34: CMPQ AX, R13 JAE LBB15_40 - CMPL SI, $18 + CMPL R8, $18 JG LBB15_40 + MOVL $4294967248, R9 LBB15_37: MOVBLZX 0(R15)(AX*1), DI @@ -4704,38 +4869,39 @@ LBB15_37: CMPB DX, $9 JA LBB15_40 LEAQ 0(R12)(R12*4), DX - LEAQ -48(DI)(DX*2), R12 - DECL CX - INCQ AX + ADDL R9, DI + LEAQ 0(DI)(DX*2), R12 + ADDL $-1, CX + ADDQ $1, AX CMPQ AX, R13 JAE LBB15_40 - LEAL 1(SI), DX - CMPL SI, $18 - MOVL DX, SI + LEAL 1(R8), DX + CMPL R8, $18 + MOVL DX, R8 JL LBB15_37 LBB15_40: CMPQ AX, R13 JAE LBB15_54 - MOVB 0(R15)(AX*1), SI - LEAL -48(SI), DX - CMPB DX, $9 + MOVB 0(R15)(AX*1), DX + LEAL -48(DX), SI + CMPB SI, $9 JA LBB15_46 - LEAQ -1(R13), DI + LEAQ -1(R13), SI LBB15_43: - CMPQ DI, AX - JE LBB15_76 - MOVBLZX 1(R15)(AX*1), SI - INCQ AX - LEAL -48(SI), DX - CMPB DX, $9 + CMPQ SI, AX + JE LBB15_59 + MOVBLZX 1(R15)(AX*1), DX + LEAL -48(DX), DI + ADDQ $1, AX + CMPB DI, $9 JBE LBB15_43 - MOVL $1, -52(BP) + MOVL $1, -68(BP) LBB15_46: - ORB $32, SI - CMPB SI, $101 + ORB $32, DX + CMPB DX, $101 JNE LBB15_54 LEAQ 1(AX), DI MOVQ $8, 0(BX) @@ -4746,7 +4912,7 @@ LBB15_46: JE LBB15_50 MOVL $1, R8 CMPB SI, $43 - JNE LBB15_85 + JNE LBB15_87 LBB15_50: ADDQ $2, AX @@ -4755,9 +4921,10 @@ LBB15_50: XORL DX, DX CMPB SI, $43 SETEQ DX - LEAL -1(DX)(DX*1), R8 + LEAL 0(DX)(DX*1), R8 + ADDL $-1, R8 MOVB 0(R15)(AX*1), SI - JMP LBB15_86 + JMP LBB15_88 LBB15_52: MOVQ R13, 0(R14) @@ -4774,147 +4941,42 @@ LBB15_53: RET LBB15_54: - MOVL CX, DI - MOVQ AX, R13 - CMPL R8, $9 - JNE LBB15_59 + MOVL CX, SI + MOVQ AX, R13 + MOVL -56(BP), AX + TESTB AX, AX + JE LBB15_60 LBB15_55: - TESTL DI, DI + TESTL SI, SI + MOVL -44(BP), DX JNE LBB15_58 MOVQ $-9223372036854775808, AX - MOVLQSX R9, CX + MOVLQSX DX, CX TESTQ R12, R12 - JNS LBB15_80 - MOVQ R12, DX - ANDQ CX, DX - CMPQ DX, AX - JE LBB15_80 + JNS LBB15_69 + MOVQ R12, DI + ANDQ CX, DI + CMPQ DI, AX + JE LBB15_69 LBB15_58: MOVQ $8, 0(BX) + JMP LBB15_61 LBB15_59: - MOVQ $0, -64(BP) - LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 - QUAD $0xfffffcf60562f9c5 // vpunpckldq $-778(%rip), %xmm0, %xmm0 /* LCPI15_0(%rip) */ - QUAD $0xfffffcfe055cf9c5 // vsubpd $-770(%rip), %xmm0, %xmm0 /* LCPI15_1(%rip) */ - LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 - LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 - LONG $0x4511fbc5; BYTE $0xd0 // vmovsd %xmm0, $-48(%rbp) - MOVQ R12, AX - SHRQ $52, AX - JE LBB15_71 - -LBB15_60: - MOVQ R11, -80(BP) - MOVQ R10, -88(BP) - LEAQ -48(BP), CX - MOVQ DI, SI - MOVQ R12, DI - MOVQ SI, -72(BP) - MOVL R9, DX - MOVL R9, -56(BP) - LONG $0xffeab1e8; BYTE $0xff // callq _atof_eisel_lemire64 - TESTB AX, AX - JE LBB15_64 - MOVQ -72(BP), SI - MOVL -56(BP), DX - CMPL -52(BP), $0 - JE LBB15_79 - INCQ R12 - LEAQ -64(BP), CX - MOVQ R12, DI - LONG $0xffea8de8; BYTE $0xff // callq _atof_eisel_lemire64 + MOVL $1, -68(BP) + MOVL CX, SI + MOVL -56(BP), AX TESTB AX, AX - JE LBB15_64 - LONG $0x4d10fbc5; BYTE $0xc0 // vmovsd $-64(%rbp), %xmm1 - LONG $0x4510fbc5; BYTE $0xd0 // vmovsd $-48(%rbp), %xmm0 - LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 - JNE LBB15_64 - JNP LBB15_66 - -LBB15_64: - MOVQ 0(R14), AX - ADDQ AX, R15 - MOVQ R13, SI - SUBQ AX, SI - MOVQ R15, DI - MOVQ -88(BP), DX - MOVQ -80(BP), CX - LONG $0xffef39e8; BYTE $0xff // callq _atof_native - -LBB15_65: - LONG $0x4511fbc5; BYTE $0xd0 // vmovsd %xmm0, $-48(%rbp) - -LBB15_66: - LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax - -LBB15_67: - MOVQ $-9223372036854775808, CX - DECQ CX - ANDQ AX, CX - MOVQ $9218868437227405312, DX - CMPQ CX, DX - JNE LBB15_69 - MOVQ $-8, 0(BX) + JNE LBB15_55 + JMP LBB15_60 LBB15_69: - MOVQ AX, 8(BX) - -LBB15_70: - MOVQ R13, 0(R14) - JMP LBB15_53 - -LBB15_71: - LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx - MOVL R9, AX - SHRL $31, AX - SHLQ $63, AX - ORQ CX, AX - MOVQ AX, -48(BP) - TESTQ R12, R12 - JE LBB15_67 - TESTL DI, DI - JE LBB15_67 - LONG $0x6ef9e1c4; BYTE $0xc0 // vmovq %rax, %xmm0 - LEAL -1(DI), AX - CMPL AX, $36 - JA LBB15_77 - CMPL DI, $23 - JL LBB15_81 - MOVLQSX DI, AX - LONG $0x870d8d48; WORD $0x00bc; BYTE $0x00 // leaq $48263(%rip), %rcx /* _P10_TAB(%rip) */ - QUAD $0xffff50c18459fbc5; BYTE $0xff // vmulsd $-176(%rcx,%rax,8), %xmm0, %xmm0 - LONG $0x4511fbc5; BYTE $0xd0 // vmovsd %xmm0, $-48(%rbp) - MOVL $22, AX - JMP LBB15_82 - -LBB15_76: - MOVL $1, -52(BP) - MOVL CX, DI - CMPL R8, $9 - JE LBB15_55 - JMP LBB15_59 - -LBB15_77: - CMPL DI, $-22 - JB LBB15_60 - NEGL DI - MOVLQSX DI, AX - LONG $0x450d8d48; WORD $0x00bc; BYTE $0x00 // leaq $48197(%rip), %rcx /* _P10_TAB(%rip) */ - LONG $0x045efbc5; BYTE $0xc1 // vdivsd (%rcx,%rax,8), %xmm0, %xmm0 - JMP LBB15_65 - -LBB15_79: - MOVQ -48(BP), AX - JMP LBB15_67 - -LBB15_80: LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 IMULQ CX, R12 - QUAD $0xfffffb790562f9c5 // vpunpckldq $-1159(%rip), %xmm0, %xmm0 /* LCPI15_0(%rip) */ - QUAD $0xfffffb81055cf9c5 // vsubpd $-1151(%rip), %xmm0, %xmm0 /* LCPI15_1(%rip) */ + QUAD $0xfffffcd10562f9c5 // vpunpckldq $-815(%rip), %xmm0, %xmm0 /* LCPI15_0(%rip) */ + QUAD $0xfffffcd9055cf9c5 // vsubpd $-807(%rip), %xmm0, %xmm0 /* LCPI15_1(%rip) */ MOVQ R12, 16(BX) LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 @@ -4922,51 +4984,34 @@ LBB15_80: LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx ORQ AX, CX MOVQ CX, 8(BX) - JMP LBB15_70 - -LBB15_81: - MOVL DI, AX - -LBB15_82: - QUAD $0xfffffb65052ef9c5 // vucomisd $-1179(%rip), %xmm0 /* LCPI15_2(%rip) */ - JA LBB15_60 - QUAD $0xfffffb5f0d10fbc5 // vmovsd $-1185(%rip), %xmm1 /* LCPI15_3(%rip) */ - LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 - JA LBB15_60 - MOVL AX, AX - LONG $0xcc0d8d48; WORD $0x00bb; BYTE $0x00 // leaq $48076(%rip), %rcx /* _P10_TAB(%rip) */ - LONG $0x0459fbc5; BYTE $0xc1 // vmulsd (%rcx,%rax,8), %xmm0, %xmm0 - JMP LBB15_65 + JMP LBB15_86 -LBB15_85: +LBB15_87: MOVQ DI, AX -LBB15_86: +LBB15_88: LEAL -48(SI), DI CMPB DI, $9 JA LBB15_5 - MOVL R9, -56(BP) CMPQ AX, R13 JAE LBB15_93 - CMPB DI, $9 - JA LBB15_93 LEAQ -1(R13), R9 XORL DI, DI -LBB15_90: +LBB15_91: MOVL DI, DX + MOVBLZX SI, SI CMPL DI, $10000 LEAL 0(DX)(DX*4), DI - MOVBLZX SI, SI LEAL -48(SI)(DI*2), DI WORD $0x4d0f; BYTE $0xfa // cmovgel %edx, %edi CMPQ R9, AX JE LBB15_94 MOVBLZX 1(R15)(AX*1), SI - INCQ AX LEAL -48(SI), DX + ADDQ $1, AX CMPB DX, $10 - JB LBB15_90 + JB LBB15_91 JMP LBB15_95 LBB15_93: @@ -4977,48 +5022,172 @@ LBB15_94: MOVQ R13, AX LBB15_95: - IMULL R8, DI - ADDL CX, DI + MOVQ DI, SI + IMULL R8, SI + ADDL CX, SI MOVQ AX, R13 - MOVL -56(BP), R9 - JMP LBB15_59 + +LBB15_60: + MOVL -44(BP), DX + +LBB15_61: + MOVQ $0, -80(BP) + LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 + QUAD $0xfffffc320562f9c5 // vpunpckldq $-974(%rip), %xmm0, %xmm0 /* LCPI15_0(%rip) */ + QUAD $0xfffffc3a055cf9c5 // vsubpd $-966(%rip), %xmm0, %xmm0 /* LCPI15_1(%rip) */ + LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 + LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 + LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) + MOVQ R12, AX + SHRQ $52, AX + JNE LBB15_74 + LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx + MOVL DX, AX + SHRL $31, AX + SHLQ $63, AX + ORQ CX, AX + MOVQ AX, -64(BP) + TESTL SI, SI + JE LBB15_82 + TESTQ R12, R12 + JE LBB15_82 + LONG $0x6ef9e1c4; BYTE $0xc0 // vmovq %rax, %xmm0 + LEAL -1(SI), AX + CMPL AX, $36 + JA LBB15_67 + CMPL SI, $23 + JL LBB15_70 + LEAL -22(SI), AX + LONG $0x780d8d48; WORD $0x00bd; BYTE $0x00 // leaq $48504(%rip), %rcx /* _P10_TAB(%rip) */ + LONG $0x0459fbc5; BYTE $0xc1 // vmulsd (%rcx,%rax,8), %xmm0, %xmm0 + LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) + MOVL $22, AX + JMP LBB15_71 + +LBB15_67: + CMPL SI, $-22 + JB LBB15_74 + NEGL SI + LONG $0x59058d48; WORD $0x00bd; BYTE $0x00 // leaq $48473(%rip), %rax /* _P10_TAB(%rip) */ + LONG $0x045efbc5; BYTE $0xf0 // vdivsd (%rax,%rsi,8), %xmm0, %xmm0 + LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) + JMP LBB15_78 + +LBB15_70: + MOVL SI, AX + +LBB15_71: + QUAD $0xfffffbb7052ef9c5 // vucomisd $-1097(%rip), %xmm0 /* LCPI15_2(%rip) */ + JA LBB15_74 + QUAD $0xfffffbb50d10fbc5 // vmovsd $-1099(%rip), %xmm1 /* LCPI15_3(%rip) */ + LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 + JA LBB15_74 + MOVL AX, AX + LONG $0x2a0d8d48; WORD $0x00bd; BYTE $0x00 // leaq $48426(%rip), %rcx /* _P10_TAB(%rip) */ + LONG $0x0459fbc5; BYTE $0xc1 // vmulsd (%rcx,%rax,8), %xmm0, %xmm0 + LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) + JMP LBB15_78 + +LBB15_74: + MOVQ R11, -96(BP) + MOVQ R10, -56(BP) + LEAQ -64(BP), CX + MOVQ R12, DI + MOVQ SI, -88(BP) + LONG $0xffea47e8; BYTE $0xff // callq _atof_eisel_lemire64 + TESTB AX, AX + JE LBB15_80 + MOVQ -88(BP), SI + CMPL -68(BP), $0 + JE LBB15_81 + ADDQ $1, R12 + LEAQ -80(BP), CX + MOVQ R12, DI + MOVL -44(BP), DX + LONG $0xffea26e8; BYTE $0xff // callq _atof_eisel_lemire64 + TESTB AX, AX + JE LBB15_80 + LONG $0x4d10fbc5; BYTE $0xb0 // vmovsd $-80(%rbp), %xmm1 + LONG $0x4510fbc5; BYTE $0xc0 // vmovsd $-64(%rbp), %xmm0 + LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 + JNE LBB15_80 + JP LBB15_80 + +LBB15_78: + LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax + JMP LBB15_82 + +LBB15_80: + MOVQ 0(R14), AX + ADDQ AX, R15 + MOVQ R13, SI + SUBQ AX, SI + MOVQ R15, DI + MOVQ -56(BP), DX + MOVQ -96(BP), CX + LONG $0xffee23e8; BYTE $0xff // callq _atof_native + LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) + LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax + JMP LBB15_83 + +LBB15_81: + MOVQ -64(BP), AX + +LBB15_82: + LONG $0x6ef9e1c4; BYTE $0xc0 // vmovq %rax, %xmm0 + +LBB15_83: + MOVQ $-9223372036854775808, CX + ADDQ $-1, CX + ANDQ AX, CX + MOVQ $9218868437227405312, AX + CMPQ CX, AX + JNE LBB15_85 + MOVQ $-8, 0(BX) + +LBB15_85: + LONG $0x4311fbc5; BYTE $0x08 // vmovsd %xmm0, $8(%rbx) + +LBB15_86: + MOVQ R13, 0(R14) + JMP LBB15_53 _vsigned: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp BYTE $0x53 // pushq %rbx - MOVQ 0(SI), BX + MOVQ 0(SI), AX MOVQ 0(DI), R8 - MOVQ 8(DI), R10 + MOVQ 8(DI), R11 MOVQ $9, 0(DX) LONG $0xc057f8c5 // vxorps %xmm0, %xmm0, %xmm0 LONG $0x4211f8c5; BYTE $0x08 // vmovups %xmm0, $8(%rdx) MOVQ 0(SI), CX MOVQ CX, 24(DX) - CMPQ BX, R10 + CMPQ AX, R11 JAE LBB16_1 - MOVB 0(R8)(BX*1), CX + MOVB 0(R8)(AX*1), CX MOVL $1, R9 CMPB CX, $45 JNE LBB16_5 - INCQ BX - CMPQ BX, R10 + ADDQ $1, AX + CMPQ AX, R11 JAE LBB16_1 - MOVB 0(R8)(BX*1), CX + MOVB 0(R8)(AX*1), CX MOVQ $-1, R9 LBB16_5: LEAL -48(CX), DI CMPB DI, $10 JB LBB16_7 - MOVQ BX, 0(SI) + MOVQ AX, 0(SI) MOVQ $-2, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET LBB16_1: - MOVQ R10, 0(SI) + MOVQ R11, 0(SI) MOVQ $-1, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp @@ -5026,68 +5195,76 @@ LBB16_1: LBB16_7: CMPB CX, $48 - JNE LBB16_8 - LEAQ 1(BX), DI - CMPQ BX, R10 - JAE LBB16_17 + JNE LBB16_12 + LEAQ 1(AX), DI + CMPQ AX, R11 + JAE LBB16_11 MOVB 0(R8)(DI*1), CX ADDB $-46, CX CMPB CX, $55 - JA LBB16_17 - MOVBLZX CX, R11 + JA LBB16_11 + MOVBLZX CX, R10 MOVQ $36028797027352577, CX - BTQ R11, CX - JAE LBB16_17 + BTQ R10, CX + JAE LBB16_11 -LBB16_8: +LBB16_12: + CMPQ AX, R11 + MOVQ R11, R10 + LONG $0xd0470f4c // cmovaq %rax, %r10 XORL DI, DI -LBB16_9: - CMPQ BX, R10 - JAE LBB16_22 - MOVBQSX 0(R8)(BX*1), CX - LEAL -48(CX), AX - CMPB AX, $9 +LBB16_13: + CMPQ R10, AX + JE LBB16_23 + MOVBQSX 0(R8)(AX*1), CX + LEAL -48(CX), BX + CMPB BX, $9 JA LBB16_18 IMUL3Q $10, DI, DI - JO LBB16_13 - INCQ BX - ADDQ $-48, CX + JO LBB16_17 + ADDQ $1, AX + ADDL $-48, CX IMULQ R9, CX ADDQ CX, DI - JNO LBB16_9 + JNO LBB16_13 -LBB16_13: - DECQ BX - MOVQ BX, 0(SI) +LBB16_17: + ADDQ $-1, AX + MOVQ AX, 0(SI) MOVQ $-5, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB16_17: +LBB16_11: MOVQ DI, 0(SI) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET LBB16_18: - CMPB CX, $101 - JE LBB16_21 - CMPB CX, $69 - JE LBB16_21 + CMPQ AX, R11 + JAE LBB16_22 CMPB CX, $46 + JE LBB16_25 + CMPB CX, $69 + JE LBB16_25 + CMPB CX, $101 JNE LBB16_22 -LBB16_21: - MOVQ BX, 0(SI) +LBB16_25: + MOVQ AX, 0(SI) MOVQ $-6, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET LBB16_22: - MOVQ BX, 0(SI) + MOVQ AX, R10 + +LBB16_23: + MOVQ R10, 0(SI) MOVQ DI, 16(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp @@ -5096,16 +5273,18 @@ LBB16_22: _vunsigned: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + WORD $0x5641 // pushq %r14 + BYTE $0x53 // pushq %rbx MOVQ DX, R8 MOVQ 0(SI), CX MOVQ 0(DI), R9 - MOVQ 8(DI), R11 + MOVQ 8(DI), R14 MOVQ $9, 0(DX) LONG $0xc057f8c5 // vxorps %xmm0, %xmm0, %xmm0 LONG $0x4211f8c5; BYTE $0x08 // vmovups %xmm0, $8(%rdx) MOVQ 0(SI), AX MOVQ AX, 24(DX) - CMPQ CX, R11 + CMPQ CX, R14 JAE LBB17_1 MOVB 0(R9)(CX*1), AX CMPB AX, $45 @@ -5114,12 +5293,16 @@ _vunsigned: LBB17_3: MOVQ CX, 0(SI) MOVQ $-6, 0(R8) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET LBB17_1: - MOVQ R11, 0(SI) + MOVQ R14, 0(SI) MOVQ $-1, 0(R8) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET @@ -5129,73 +5312,86 @@ LBB17_4: JB LBB17_6 MOVQ CX, 0(SI) MOVQ $-2, 0(R8) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET LBB17_6: CMPB AX, $48 - JNE LBB17_7 + JNE LBB17_10 MOVB 1(R9)(CX*1), AX ADDB $-46, AX CMPB AX, $55 - JA LBB17_16 + JA LBB17_9 MOVBLZX AX, AX MOVQ $36028797027352577, DX BTQ AX, DX - JAE LBB17_16 + JAE LBB17_9 -LBB17_7: +LBB17_10: + CMPQ R14, CX + MOVQ CX, R10 + LONG $0xd6470f4d // cmovaq %r14, %r10 XORL AX, AX - MOVL $10, R10 + MOVL $10, R11 -LBB17_8: - CMPQ CX, R11 - JAE LBB17_20 - MOVBLSX 0(R9)(CX*1), DI - LEAL -48(DI), DX +LBB17_11: + CMPQ R10, CX + JE LBB17_22 + MOVBLSX 0(R9)(CX*1), BX + LEAL -48(BX), DX CMPB DX, $9 JA LBB17_17 - MULQ R10 - JO LBB17_13 - INCQ CX - ADDL $-48, DI - MOVLQSX DI, DX - MOVQ DX, DI - SARQ $63, DI - ADDQ DX, AX - ADCQ $0, DI - MOVL DI, DX - ANDL $1, DX + MULQ R11 + JO LBB17_16 + ADDQ $1, CX + ADDL $-48, BX + XORL DI, DI + ADDQ BX, AX + SETCS DI + MOVQ DI, DX NEGQ DX XORQ DX, DI - JNE LBB17_13 + JNE LBB17_16 TESTQ DX, DX - JNS LBB17_8 + JNS LBB17_11 -LBB17_13: - DECQ CX +LBB17_16: + ADDQ $-1, CX MOVQ CX, 0(SI) MOVQ $-5, 0(R8) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET LBB17_17: - CMPB DI, $101 + CMPQ CX, R14 + JAE LBB17_21 + CMPB BX, $46 JE LBB17_3 - CMPB DI, $69 + CMPB BX, $69 JE LBB17_3 - CMPB DI, $46 + CMPB BX, $101 JE LBB17_3 -LBB17_20: - MOVQ CX, 0(SI) +LBB17_21: + MOVQ CX, R10 + +LBB17_22: + MOVQ R10, 0(SI) MOVQ AX, 16(R8) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET -LBB17_16: - INCQ CX +LBB17_9: + ADDQ $1, CX MOVQ CX, 0(SI) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET @@ -5220,570 +5416,560 @@ _fsm_exec: WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx SUBQ $40, SP + MOVL CX, -60(BP) CMPL 0(DI), $0 JE LBB19_2 MOVQ DI, R12 - MOVL CX, -60(BP) - MOVQ SI, -48(BP) - MOVQ DX, -56(BP) + MOVQ SI, -56(BP) + MOVQ DX, -48(BP) MOVQ $-1, R14 - JMP LBB19_4 + JMP LBB19_6 LBB19_2: MOVQ $-1, R13 - JMP LBB19_68 + JMP LBB19_71 LBB19_3: LEAQ 3(AX), CX - MOVQ CX, 0(BX) + MOVQ -48(BP), DX + MOVQ CX, 0(DX) TESTQ AX, AX - JLE LBB19_64 + JLE LBB19_71 -LBB19_40: +LBB19_4: MOVL 0(R12), CX MOVQ R14, R13 TESTL CX, CX - JE LBB19_68 + JE LBB19_71 -LBB19_4: - MOVQ -48(BP), R13 - MOVQ 0(R13), DI - MOVQ 8(R13), SI +LBB19_6: MOVQ -56(BP), BX - MOVQ BX, DX - LONG $0xfff220e8; BYTE $0xff // callq _advance_ns + MOVQ 0(BX), DI + MOVQ 8(BX), SI + MOVQ -48(BP), R13 + MOVQ R13, DX + LONG $0xfff23ae8; BYTE $0xff // callq _advance_ns MOVLQSX 0(R12), DX LEAQ -1(DX), CX - MOVL 0(R12)(DX*4), SI CMPQ R14, $-1 - JNE LBB19_6 - MOVQ 0(BX), R14 - DECQ R14 + JNE LBB19_8 + MOVQ 0(R13), R14 + ADDQ $-1, R14 -LBB19_6: - DECL SI +LBB19_8: + MOVL 0(R12)(DX*4), SI + ADDL $-1, SI CMPL SI, $5 - JA LBB19_11 - LONG $0x823d8d48; WORD $0x0004; BYTE $0x00 // leaq $1154(%rip), %rdi /* LJTI19_0(%rip) */ + JA LBB19_13 + LONG $0x773d8d48; WORD $0x0004; BYTE $0x00 // leaq $1143(%rip), %rdi /* LJTI19_0(%rip) */ MOVLQSX 0(DI)(SI*4), SI ADDQ DI, SI JMP SI -LBB19_8: +LBB19_10: MOVBLSX AX, AX CMPL AX, $44 - JE LBB19_29 + JE LBB19_28 CMPL AX, $93 - JNE LBB19_67 - MOVL CX, 0(R12) - MOVQ R14, R13 - TESTL CX, CX - JNE LBB19_4 - JMP LBB19_68 + JE LBB19_12 + JMP LBB19_66 -LBB19_11: +LBB19_13: MOVL CX, 0(R12) MOVBLSX AX, AX CMPL AX, $123 - JBE LBB19_27 - JMP LBB19_67 + JBE LBB19_24 + JMP LBB19_66 -LBB19_12: +LBB19_14: MOVBLSX AX, AX CMPL AX, $44 - JE LBB19_31 - CMPL AX, $125 - JNE LBB19_67 - MOVL CX, 0(R12) - MOVQ R14, R13 - TESTL CX, CX - JNE LBB19_4 - JMP LBB19_68 - -LBB19_15: - CMPB AX, $34 - JNE LBB19_67 - MOVL $4, 0(R12)(DX*4) - MOVQ 0(BX), R15 - MOVQ R13, DI - -LBB19_17: + JNE LBB19_15 + CMPL DX, $65535 + JG LBB19_70 + LEAL 1(DX), AX + MOVL AX, 0(R12) + MOVL $3, 4(R12)(DX*4) + JMP LBB19_4 + +LBB19_16: + CMPB AX, $34 + JNE LBB19_66 + MOVL $4, 0(R12)(DX*4) + MOVQ 0(R13), R15 + MOVQ BX, DI MOVQ R15, SI LEAQ -72(BP), DX - LONG $0xfff412e8; BYTE $0xff // callq _advance_string + LONG $0xfff3f6e8; BYTE $0xff // callq _advance_string + MOVQ R13, BX MOVQ AX, R13 TESTQ AX, AX - JS LBB19_59 + JS LBB19_63 + +LBB19_18: MOVQ R13, 0(BX) TESTQ R15, R15 - JG LBB19_40 - JMP LBB19_60 + JG LBB19_4 + JMP LBB19_19 -LBB19_19: +LBB19_20: CMPB AX, $58 - JNE LBB19_67 + JNE LBB19_66 MOVL $0, 0(R12)(DX*4) - JMP LBB19_40 - -LBB19_21: - CMPB AX, $93 - JNE LBB19_26 - MOVL CX, 0(R12) - MOVQ R14, R13 - TESTL CX, CX - JNE LBB19_4 - JMP LBB19_68 - -LBB19_23: - MOVBLSX AX, AX - CMPL AX, $34 - JE LBB19_33 - CMPL AX, $125 - JNE LBB19_67 - MOVL CX, 0(R12) - MOVQ R14, R13 - TESTL CX, CX - JNE LBB19_4 - JMP LBB19_68 + JMP LBB19_4 -LBB19_26: +LBB19_22: + CMPB AX, $93 + JE LBB19_12 MOVL $1, 0(R12)(DX*4) MOVBLSX AX, AX CMPL AX, $123 - JA LBB19_67 + JA LBB19_66 -LBB19_27: +LBB19_24: MOVQ $-1, R13 - LONG $0x770d8d48; WORD $0x0003; BYTE $0x00 // leaq $887(%rip), %rcx /* LJTI19_1(%rip) */ + LONG $0xa80d8d48; WORD $0x0003; BYTE $0x00 // leaq $936(%rip), %rcx /* LJTI19_1(%rip) */ MOVLQSX 0(CX)(AX*4), AX ADDQ CX, AX JMP AX -LBB19_28: - MOVQ -56(BP), BX +LBB19_27: + MOVQ -48(BP), BX MOVQ 0(BX), R15 LEAQ -1(R15), R13 - MOVQ -48(BP), AX + MOVQ -56(BP), AX MOVQ 0(AX), DI ADDQ R13, DI MOVQ 8(AX), SI SUBQ R13, SI - LONG $0x000cefe8; BYTE $0x00 // callq _do_skip_number - MOVQ $-2, CX - SUBQ AX, CX + LONG $0x000d39e8; BYTE $0x00 // callq _do_skip_number + LEAQ -1(AX), CX + MOVQ $-2, DX + SUBQ AX, DX TESTQ AX, AX - LEAQ -1(AX), AX - LONG $0xc1480f48 // cmovsq %rcx, %rax - MOVQ $-2, CX - LONG $0xe9480f4c // cmovsq %rcx, %r13 - ADDQ R15, AX - MOVQ AX, 0(BX) + LONG $0xd1490f48 // cmovnsq %rcx, %rdx + MOVQ $-2, AX + LONG $0xe8480f4c // cmovsq %rax, %r13 + ADDQ R15, DX + MOVQ DX, 0(BX) TESTQ R13, R13 - JNS LBB19_40 - JMP LBB19_68 + JNS LBB19_4 + JMP LBB19_71 -LBB19_29: - CMPL DX, $65535 - JG LBB19_61 - LEAL 1(DX), AX - MOVL AX, 0(R12) - MOVL $0, 4(R12)(DX*4) - JMP LBB19_40 +LBB19_25: + MOVBLSX AX, AX + CMPL AX, $34 + JE LBB19_32 + +LBB19_15: + CMPL AX, $125 + JNE LBB19_66 + +LBB19_12: + MOVL CX, 0(R12) + MOVQ R14, R13 + TESTL CX, CX + JNE LBB19_6 + JMP LBB19_71 -LBB19_31: +LBB19_28: CMPL DX, $65535 - JG LBB19_61 + JG LBB19_70 LEAL 1(DX), AX MOVL AX, 0(R12) - MOVL $3, 4(R12)(DX*4) - JMP LBB19_40 + MOVL $0, 4(R12)(DX*4) + JMP LBB19_4 -LBB19_33: +LBB19_32: MOVL $2, 0(R12)(DX*4) - MOVL -60(BP), AX - CMPL AX, $1 - JE LBB19_37 - TESTL AX, AX - JNE LBB19_38 - MOVQ -56(BP), BX - MOVQ 0(BX), R15 - MOVQ -48(BP), DI + CMPL -60(BP), $0 + JE LBB19_35 + MOVQ BX, DI + MOVQ R13, SI + LONG $0x000597e8; BYTE $0x00 // callq _validate_string + TESTQ AX, AX + JNS LBB19_37 + JMP LBB19_34 + +LBB19_35: + MOVQ 0(R13), R15 + MOVQ BX, DI MOVQ R15, SI LEAQ -72(BP), DX - LONG $0xfff2b2e8; BYTE $0xff // callq _advance_string + LONG $0xfff2bde8; BYTE $0xff // callq _advance_string + MOVQ R13, BX MOVQ AX, R13 TESTQ AX, AX - JS LBB19_59 + JS LBB19_63 MOVQ R13, 0(BX) TESTQ R15, R15 - JG LBB19_38 - JMP LBB19_60 + JLE LBB19_19 LBB19_37: - MOVQ R13, DI - MOVQ BX, SI - LONG $0x000533e8; BYTE $0x00 // callq _validate_string - TESTQ AX, AX - JS LBB19_65 - -LBB19_38: MOVLQSX 0(R12), AX CMPQ AX, $65535 - JG LBB19_61 + JG LBB19_70 LEAL 1(AX), CX MOVL CX, 0(R12) MOVL $4, 4(R12)(AX*4) - JMP LBB19_40 + JMP LBB19_4 -LBB19_41: - MOVL -60(BP), AX - CMPL AX, $1 - JE LBB19_57 - TESTL AX, AX - JNE LBB19_40 - MOVQ -56(BP), BX - MOVQ 0(BX), R15 - MOVQ -48(BP), DI - JMP LBB19_17 +LBB19_39: + CMPL -60(BP), $0 + JE LBB19_62 + MOVQ -56(BP), DI + MOVQ -48(BP), SI + LONG $0x000523e8; BYTE $0x00 // callq _validate_string + TESTQ AX, AX + JNS LBB19_4 + JMP LBB19_34 -LBB19_44: - MOVQ -56(BP), BX +LBB19_41: + MOVQ -48(BP), BX MOVQ 0(BX), R13 - MOVQ -48(BP), AX + MOVQ -56(BP), AX MOVQ 0(AX), DI ADDQ R13, DI MOVQ 8(AX), SI SUBQ R13, SI - LONG $0x000bbbe8; BYTE $0x00 // callq _do_skip_number + LONG $0x000c07e8; BYTE $0x00 // callq _do_skip_number TESTQ AX, AX - JS LBB19_66 + JS LBB19_65 ADDQ R13, AX MOVQ AX, 0(BX) TESTQ R13, R13 - JG LBB19_40 - JMP LBB19_69 + JG LBB19_4 + JMP LBB19_43 -LBB19_46: +LBB19_44: MOVLQSX 0(R12), AX CMPQ AX, $65535 - JG LBB19_61 + JG LBB19_70 LEAL 1(AX), CX MOVL CX, 0(R12) MOVL $5, 4(R12)(AX*4) - JMP LBB19_40 + JMP LBB19_4 -LBB19_48: - MOVQ -56(BP), BX - MOVQ 0(BX), AX - MOVQ -48(BP), SI +LBB19_46: + MOVQ -48(BP), AX + MOVQ 0(AX), AX + MOVQ -56(BP), SI MOVQ 8(SI), CX LEAQ -4(CX), DX CMPQ AX, DX - JA LBB19_75 + JA LBB19_64 MOVQ 0(SI), CX MOVL 0(CX)(AX*1), DX CMPL DX, $1702063201 - JNE LBB19_72 + JNE LBB19_67 LEAQ 4(AX), CX - MOVQ CX, 0(BX) + MOVQ -48(BP), DX + MOVQ CX, 0(DX) TESTQ AX, AX - JG LBB19_40 - JMP LBB19_64 + JG LBB19_4 + JMP LBB19_49 -LBB19_51: - MOVQ -56(BP), BX - MOVQ 0(BX), AX - MOVQ -48(BP), SI +LBB19_50: + MOVQ -48(BP), AX + MOVQ 0(AX), AX + MOVQ -56(BP), SI MOVQ 8(SI), CX LEAQ -3(CX), DX CMPQ AX, DX - JA LBB19_75 + JA LBB19_64 MOVQ 0(SI), CX + LEAQ -1(AX), R13 CMPL -1(CX)(AX*1), $1819047278 JE LBB19_3 - JMP LBB19_76 + JMP LBB19_52 -LBB19_53: - MOVQ -56(BP), BX - MOVQ 0(BX), AX - MOVQ -48(BP), SI +LBB19_55: + MOVQ -48(BP), AX + MOVQ 0(AX), AX + MOVQ -56(BP), SI MOVQ 8(SI), CX LEAQ -3(CX), DX CMPQ AX, DX - JA LBB19_75 + JA LBB19_64 MOVQ 0(SI), CX + LEAQ -1(AX), R13 CMPL -1(CX)(AX*1), $1702195828 JE LBB19_3 - JMP LBB19_80 + JMP LBB19_57 -LBB19_55: +LBB19_60: MOVLQSX 0(R12), AX CMPQ AX, $65535 - JG LBB19_61 + JG LBB19_70 LEAL 1(AX), CX MOVL CX, 0(R12) MOVL $6, 4(R12)(AX*4) - JMP LBB19_40 + JMP LBB19_4 -LBB19_57: - MOVQ -48(BP), DI - MOVQ -56(BP), SI - LONG $0x0003b1e8; BYTE $0x00 // callq _validate_string +LBB19_62: + MOVQ -48(BP), BX + MOVQ 0(BX), R15 + MOVQ -56(BP), DI + MOVQ R15, SI + LEAQ -72(BP), DX + LONG $0xfff10ee8; BYTE $0xff // callq _advance_string + MOVQ AX, R13 TESTQ AX, AX - JNS LBB19_40 - JMP LBB19_65 - -LBB19_61: - MOVQ $-7, R13 - JMP LBB19_68 + JNS LBB19_18 -LBB19_59: - MOVQ -48(BP), AX +LBB19_63: + MOVQ -56(BP), AX MOVQ 8(AX), AX MOVQ AX, 0(BX) - JMP LBB19_68 + JMP LBB19_71 -LBB19_60: - DECQ R15 +LBB19_70: + MOVQ $-7, R13 + JMP LBB19_71 + +LBB19_19: + ADDQ $-1, R15 MOVQ R15, R13 - JMP LBB19_68 + JMP LBB19_71 -LBB19_75: - MOVQ CX, 0(BX) - JMP LBB19_68 +LBB19_34: + MOVQ AX, R13 + JMP LBB19_71 LBB19_64: - DECQ AX + MOVQ -48(BP), AX + MOVQ CX, 0(AX) + JMP LBB19_71 LBB19_65: - MOVQ AX, R13 - JMP LBB19_68 - -LBB19_66: NOTQ AX ADDQ AX, R13 MOVQ R13, 0(BX) + JMP LBB19_66 -LBB19_67: - MOVQ $-2, R13 - -LBB19_68: - MOVQ R13, AX - ADDQ $40, SP - BYTE $0x5b // popq %rbx - WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 - WORD $0x5e41 // popq %r14 - WORD $0x5f41 // popq %r15 - BYTE $0x5d // popq %rbp - RET - -LBB19_69: - DECQ R13 - JMP LBB19_68 +LBB19_43: + ADDQ $-1, R13 + JMP LBB19_71 -LBB19_72: +LBB19_67: MOVQ $-2, R13 CMPB DX, $97 - JNE LBB19_68 - INCQ AX + JNE LBB19_71 + ADDQ $1, AX MOVL $1702063201, DX + MOVQ -48(BP), BX -LBB19_74: +LBB19_69: SHRL $8, DX MOVQ AX, 0(BX) MOVBLSX 0(CX)(AX*1), SI MOVBLZX DX, DI - INCQ AX + ADDQ $1, AX CMPL DI, SI - JE LBB19_74 - JMP LBB19_68 + JE LBB19_69 + JMP LBB19_71 -LBB19_76: - LEAQ -1(AX), DX - MOVQ DX, 0(BX) - MOVQ $-2, R13 - CMPB -1(CX)(AX*1), $110 - JNE LBB19_68 +LBB19_52: + MOVQ -48(BP), BX + MOVQ R13, 0(BX) + CMPB 0(CX)(R13*1), $110 + JNE LBB19_66 MOVL $1819047278, DX -LBB19_78: +LBB19_54: SHRL $8, DX MOVQ AX, 0(BX) MOVBLSX 0(CX)(AX*1), SI MOVBLZX DX, DI - INCQ AX + ADDQ $1, AX CMPL DI, SI - JE LBB19_78 - JMP LBB19_68 + JE LBB19_54 + JMP LBB19_66 -LBB19_80: - LEAQ -1(AX), DX - MOVQ DX, 0(BX) - MOVQ $-2, R13 - CMPB -1(CX)(AX*1), $116 - JNE LBB19_68 +LBB19_49: + ADDQ $-1, AX + MOVQ AX, R13 + JMP LBB19_71 + +LBB19_57: + MOVQ -48(BP), BX + MOVQ R13, 0(BX) + CMPB 0(CX)(R13*1), $116 + JNE LBB19_66 MOVL $1702195828, DX -LBB19_82: +LBB19_59: SHRL $8, DX MOVQ AX, 0(BX) MOVBLSX 0(CX)(AX*1), SI MOVBLZX DX, DI - INCQ AX + ADDQ $1, AX CMPL DI, SI - JE LBB19_82 - JMP LBB19_68 - -// .set L19_0_set_8, LBB19_8-LJTI19_0 -// .set L19_0_set_12, LBB19_12-LJTI19_0 -// .set L19_0_set_15, LBB19_15-LJTI19_0 -// .set L19_0_set_19, LBB19_19-LJTI19_0 -// .set L19_0_set_21, LBB19_21-LJTI19_0 -// .set L19_0_set_23, LBB19_23-LJTI19_0 + JE LBB19_59 + +LBB19_66: + MOVQ $-2, R13 + +LBB19_71: + MOVQ R13, AX + ADDQ $40, SP + BYTE $0x5b // popq %rbx + WORD $0x5c41 // popq %r12 + WORD $0x5d41 // popq %r13 + WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 + BYTE $0x5d // popq %rbp + RET + +// .set L19_0_set_10, LBB19_10-LJTI19_0 +// .set L19_0_set_14, LBB19_14-LJTI19_0 +// .set L19_0_set_16, LBB19_16-LJTI19_0 +// .set L19_0_set_20, LBB19_20-LJTI19_0 +// .set L19_0_set_22, LBB19_22-LJTI19_0 +// .set L19_0_set_25, LBB19_25-LJTI19_0 LJTI19_0: - LONG $0xfffffb87 // .long L19_0_set_8 - LONG $0xfffffbc1 // .long L19_0_set_12 - LONG $0xfffffbea // .long L19_0_set_15 - LONG $0xfffffc29 // .long L19_0_set_19 - LONG $0xfffffc3e // .long L19_0_set_21 - LONG $0xfffffc56 // .long L19_0_set_23 - - // .set L19_1_set_68, LBB19_68-LJTI19_1 - // .set L19_1_set_67, LBB19_67-LJTI19_1 + LONG $0xfffffb92 // .long L19_0_set_10 + LONG $0xfffffbc1 // .long L19_0_set_14 + LONG $0xfffffbee // .long L19_0_set_16 + LONG $0xfffffc31 // .long L19_0_set_20 + LONG $0xfffffc46 // .long L19_0_set_22 + LONG $0xfffffcce // .long L19_0_set_25 + + // .set L19_1_set_71, LBB19_71-LJTI19_1 + // .set L19_1_set_66, LBB19_66-LJTI19_1 + // .set L19_1_set_39, LBB19_39-LJTI19_1 // .set L19_1_set_41, LBB19_41-LJTI19_1 + // .set L19_1_set_27, LBB19_27-LJTI19_1 // .set L19_1_set_44, LBB19_44-LJTI19_1 - // .set L19_1_set_28, LBB19_28-LJTI19_1 // .set L19_1_set_46, LBB19_46-LJTI19_1 - // .set L19_1_set_48, LBB19_48-LJTI19_1 - // .set L19_1_set_51, LBB19_51-LJTI19_1 - // .set L19_1_set_53, LBB19_53-LJTI19_1 + // .set L19_1_set_50, LBB19_50-LJTI19_1 // .set L19_1_set_55, LBB19_55-LJTI19_1 + // .set L19_1_set_60, LBB19_60-LJTI19_1 LJTI19_1: - LONG $0xffffff40 // .long L19_1_set_68 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xfffffda6 // .long L19_1_set_41 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xfffffdca // .long L19_1_set_44 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xfffffc92 // .long L19_1_set_28 - LONG $0xfffffc92 // .long L19_1_set_28 - LONG $0xfffffc92 // .long L19_1_set_28 - LONG $0xfffffc92 // .long L19_1_set_28 - LONG $0xfffffc92 // .long L19_1_set_28 - LONG $0xfffffc92 // .long L19_1_set_28 - LONG $0xfffffc92 // .long L19_1_set_28 - LONG $0xfffffc92 // .long L19_1_set_28 - LONG $0xfffffc92 // .long L19_1_set_28 - LONG $0xfffffc92 // .long L19_1_set_28 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xfffffe04 // .long L19_1_set_46 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xfffffe29 // .long L19_1_set_48 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xfffffe6c // .long L19_1_set_51 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xfffffe9e // .long L19_1_set_53 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xffffff39 // .long L19_1_set_67 - LONG $0xfffffecc // .long L19_1_set_55 + LONG $0xffffffd6 // .long L19_1_set_71 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xfffffd72 // .long L19_1_set_39 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xfffffd97 // .long L19_1_set_41 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xfffffc61 // .long L19_1_set_27 + LONG $0xfffffc61 // .long L19_1_set_27 + LONG $0xfffffc61 // .long L19_1_set_27 + LONG $0xfffffc61 // .long L19_1_set_27 + LONG $0xfffffc61 // .long L19_1_set_27 + LONG $0xfffffc61 // .long L19_1_set_27 + LONG $0xfffffc61 // .long L19_1_set_27 + LONG $0xfffffc61 // .long L19_1_set_27 + LONG $0xfffffc61 // .long L19_1_set_27 + LONG $0xfffffc61 // .long L19_1_set_27 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xfffffdd1 // .long L19_1_set_44 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xfffffdf6 // .long L19_1_set_46 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xfffffe3d // .long L19_1_set_50 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xfffffe73 // .long L19_1_set_55 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xffffffcf // .long L19_1_set_66 + LONG $0xfffffea9 // .long L19_1_set_60 _skip_array: BYTE $0x55 // pushq %rbp @@ -5823,10 +6009,10 @@ _skip_string: MOVQ 0(SI), BX LEAQ -32(BP), DX MOVQ BX, SI - LONG $0xffedade8; BYTE $0xff // callq _advance_string + LONG $0xffed96e8; BYTE $0xff // callq _advance_string TESTQ AX, AX JS LBB22_2 - DECQ BX + ADDQ $-1, BX MOVQ AX, CX MOVQ BX, AX JMP LBB22_3 @@ -5860,176 +6046,172 @@ _validate_string: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $40, SP - MOVQ SI, R14 - MOVQ 0(SI), R15 - MOVQ 8(DI), R12 - MOVQ R12, -64(BP) - SUBQ R15, R12 - JE LBB23_16 - MOVQ R14, -48(BP) + SUBQ $56, SP + MOVQ SI, -88(BP) + MOVQ 0(SI), DX + MOVQ 8(DI), CX + MOVQ CX, -72(BP) + SUBQ DX, CX + JE LBB23_17 MOVQ 0(DI), AX MOVQ AX, -56(BP) - LEAQ 0(AX)(R15*1), SI - CMPQ R12, $64 - MOVQ SI, -72(BP) - JB LBB23_31 - MOVL R12, R9 - ANDL $63, R9 - MOVQ $-1, R13 - XORL R14, R14 - QUAD $0xffffff72056f7ac5 // vmovdqu $-142(%rip), %xmm8 /* LCPI23_0(%rip) */ - QUAD $0xffffff7a0d6ffac5 // vmovdqu $-134(%rip), %xmm1 /* LCPI23_1(%rip) */ - QUAD $0xffffff82156ffac5 // vmovdqu $-126(%rip), %xmm2 /* LCPI23_2(%rip) */ + ADDQ DX, AX + CMPQ CX, $64 + MOVQ DX, -64(BP) + MOVQ AX, -80(BP) + JB LBB23_26 + MOVQ $-1, -48(BP) + XORL R13, R13 + QUAD $0xffffff78056f7ac5 // vmovdqu $-136(%rip), %xmm8 /* LCPI23_0(%rip) */ + QUAD $0xffffff800d6ffac5 // vmovdqu $-128(%rip), %xmm1 /* LCPI23_1(%rip) */ + QUAD $0xffffff88156ffac5 // vmovdqu $-120(%rip), %xmm2 /* LCPI23_2(%rip) */ LONG $0xdb76e1c5 // vpcmpeqd %xmm3, %xmm3, %xmm3 LBB23_3: - LONG $0x3e6ffac5 // vmovdqu (%rsi), %xmm7 - LONG $0x766ffac5; BYTE $0x10 // vmovdqu $16(%rsi), %xmm6 - LONG $0x6e6ffac5; BYTE $0x20 // vmovdqu $32(%rsi), %xmm5 - LONG $0x666ffac5; BYTE $0x30 // vmovdqu $48(%rsi), %xmm4 - LONG $0xc774b9c5 // vpcmpeqb %xmm7, %xmm8, %xmm0 - LONG $0xd0d7f9c5 // vpmovmskb %xmm0, %edx - LONG $0xc674b9c5 // vpcmpeqb %xmm6, %xmm8, %xmm0 - LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax - LONG $0xc574b9c5 // vpcmpeqb %xmm5, %xmm8, %xmm0 - LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx - LONG $0xc474b9c5 // vpcmpeqb %xmm4, %xmm8, %xmm0 - LONG $0xd8d779c5 // vpmovmskb %xmm0, %r11d - LONG $0xc174c1c5 // vpcmpeqb %xmm1, %xmm7, %xmm0 - LONG $0xd0d779c5 // vpmovmskb %xmm0, %r10d - LONG $0xc174c9c5 // vpcmpeqb %xmm1, %xmm6, %xmm0 - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - LONG $0xc174d1c5 // vpcmpeqb %xmm1, %xmm5, %xmm0 - SHLQ $16, AX - ORQ AX, DX - LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax - LONG $0xc174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm0 - SHLQ $32, BX - ORQ BX, DX - LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx - LONG $0xc764e9c5 // vpcmpgtb %xmm7, %xmm2, %xmm0 - LONG $0xfb64c1c5 // vpcmpgtb %xmm3, %xmm7, %xmm7 - LONG $0xc0dbc1c5 // vpand %xmm0, %xmm7, %xmm0 - SHLQ $16, CX - ORQ CX, R10 - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - LONG $0xc664e9c5 // vpcmpgtb %xmm6, %xmm2, %xmm0 - LONG $0xf364c9c5 // vpcmpgtb %xmm3, %xmm6, %xmm6 - LONG $0xc0dbc9c5 // vpand %xmm0, %xmm6, %xmm0 - SHLQ $32, AX - ORQ AX, R10 - LONG $0xf8d7f9c5 // vpmovmskb %xmm0, %edi - LONG $0xc564e9c5 // vpcmpgtb %xmm5, %xmm2, %xmm0 - LONG $0xeb64d1c5 // vpcmpgtb %xmm3, %xmm5, %xmm5 - LONG $0xc0dbd1c5 // vpand %xmm0, %xmm5, %xmm0 - SHLQ $48, BX - ORQ BX, R10 - LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax - LONG $0xc464e9c5 // vpcmpgtb %xmm4, %xmm2, %xmm0 - LONG $0xe364d9c5 // vpcmpgtb %xmm3, %xmm4, %xmm4 - LONG $0xc0dbd9c5 // vpand %xmm0, %xmm4, %xmm0 - SHLQ $16, DI - ORQ DI, CX - LONG $0xc0d779c5 // vpmovmskb %xmm0, %r8d - SHLQ $48, R11 - SHLQ $32, AX - CMPQ R13, $-1 - JNE LBB23_5 - TESTQ R10, R10 - JNE LBB23_10 - -LBB23_5: - SHLQ $48, R8 - ORQ AX, CX - ORQ R11, DX - MOVQ R10, AX + MOVQ -56(BP), AX + LONG $0x246ffac5; BYTE $0x10 // vmovdqu (%rax,%rdx), %xmm4 + LONG $0x6c6ffac5; WORD $0x1010 // vmovdqu $16(%rax,%rdx), %xmm5 + LONG $0x746ffac5; WORD $0x2010 // vmovdqu $32(%rax,%rdx), %xmm6 + LONG $0x7c6ffac5; WORD $0x3010 // vmovdqu $48(%rax,%rdx), %xmm7 + LONG $0xc474b9c5 // vpcmpeqb %xmm4, %xmm8, %xmm0 + LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax + LONG $0xc574b9c5 // vpcmpeqb %xmm5, %xmm8, %xmm0 + LONG $0xf0d779c5 // vpmovmskb %xmm0, %r14d + LONG $0xc674b9c5 // vpcmpeqb %xmm6, %xmm8, %xmm0 + LONG $0xf8d779c5 // vpmovmskb %xmm0, %r15d + LONG $0xc774b9c5 // vpcmpeqb %xmm7, %xmm8, %xmm0 + LONG $0xe0d779c5 // vpmovmskb %xmm0, %r12d + LONG $0xc174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm0 + LONG $0xf0d7f9c5 // vpmovmskb %xmm0, %esi + LONG $0xc174d1c5 // vpcmpeqb %xmm1, %xmm5, %xmm0 + LONG $0xc8d779c5 // vpmovmskb %xmm0, %r9d + LONG $0xc174c9c5 // vpcmpeqb %xmm1, %xmm6, %xmm0 + LONG $0xc0d779c5 // vpmovmskb %xmm0, %r8d + LONG $0xc174c1c5 // vpcmpeqb %xmm1, %xmm7, %xmm0 + LONG $0xf8d7f9c5 // vpmovmskb %xmm0, %edi + LONG $0xc564e9c5 // vpcmpgtb %xmm5, %xmm2, %xmm0 + LONG $0xeb64d1c5 // vpcmpgtb %xmm3, %xmm5, %xmm5 + LONG $0xc5dbf9c5 // vpand %xmm5, %xmm0, %xmm0 + LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx + LONG $0xc664e9c5 // vpcmpgtb %xmm6, %xmm2, %xmm0 + LONG $0xeb64c9c5 // vpcmpgtb %xmm3, %xmm6, %xmm5 + LONG $0xc5dbf9c5 // vpand %xmm5, %xmm0, %xmm0 + LONG $0xd0d779c5 // vpmovmskb %xmm0, %r10d + LONG $0xc764e9c5 // vpcmpgtb %xmm7, %xmm2, %xmm0 + LONG $0xeb64c1c5 // vpcmpgtb %xmm3, %xmm7, %xmm5 + LONG $0xc5dbf9c5 // vpand %xmm5, %xmm0, %xmm0 + LONG $0xd8d779c5 // vpmovmskb %xmm0, %r11d + SHLQ $48, R12 + SHLQ $32, R15 + ORQ R12, R15 + SHLQ $16, R14 + ORQ R15, R14 ORQ R14, AX + SHLQ $48, DI + SHLQ $32, R8 + ORQ DI, R8 + SHLQ $16, R9 + ORQ R8, R9 + SHLQ $48, R11 + SHLQ $32, R10 + ORQ R11, R10 + SHLQ $16, BX + ORQ R10, BX + ORQ R9, SI JNE LBB23_9 - ORQ R8, CX - TESTQ DX, DX + TESTQ R13, R13 JNE LBB23_11 + XORL R13, R13 -LBB23_7: - TESTQ CX, CX - JNE LBB23_18 - ADDQ $64, SI - ADDQ $-64, R12 - CMPQ R12, $63 +LBB23_6: + LONG $0xc464e9c5 // vpcmpgtb %xmm4, %xmm2, %xmm0 + LONG $0xe364d9c5 // vpcmpgtb %xmm3, %xmm4, %xmm4 + LONG $0xc4dbf9c5 // vpand %xmm4, %xmm0, %xmm0 + LONG $0xf0d7f9c5 // vpmovmskb %xmm0, %esi + ORQ SI, BX + TESTQ AX, AX + JNE LBB23_12 + TESTQ BX, BX + JNE LBB23_19 + ADDQ $-64, CX + ADDQ $64, DX + CMPQ CX, $63 JA LBB23_3 - JMP LBB23_20 + JMP LBB23_21 LBB23_9: - MOVQ R14, AX - NOTQ AX - ANDQ R10, AX - LEAQ 0(AX)(AX*1), R11 - ORQ R14, R11 - MOVQ R11, DI - NOTQ DI - ANDQ R10, DI - MOVQ $-6148914691236517206, BX - ANDQ BX, DI - XORL R14, R14 - ADDQ AX, DI - SETCS R14 - ADDQ DI, DI - MOVQ $6148914691236517205, AX - XORQ AX, DI - ANDQ R11, DI - NOTQ DI - ANDQ DI, DX - ORQ R8, CX - TESTQ DX, DX - JE LBB23_7 - JMP LBB23_11 - -LBB23_10: - MOVQ SI, DI - SUBQ -56(BP), DI - BSFQ R10, R13 - ADDQ DI, R13 - JMP LBB23_5 + CMPQ -48(BP), $-1 + JNE LBB23_11 + BSFQ SI, DI + ADDQ DX, DI + MOVQ DI, -48(BP) LBB23_11: - SUBQ -56(BP), SI - BSFQ DX, DX - LEAQ 1(SI)(DX*1), BX - TESTQ CX, CX - MOVQ -48(BP), R14 - JE LBB23_13 - BSFQ CX, AX - CMPQ AX, DX - JBE LBB23_27 + MOVQ R13, R9 + NOTQ R9 + ANDQ SI, R9 + LEAQ 0(R9)(R9*1), R8 + ORQ R13, R8 + MOVQ R8, R10 + NOTQ R10 + ANDQ SI, R10 + MOVQ $-6148914691236517206, SI + ANDQ SI, R10 + XORL R13, R13 + ADDQ R9, R10 + SETCS R13 + ADDQ R10, R10 + MOVQ $6148914691236517205, SI + XORQ SI, R10 + ANDQ R8, R10 + NOTQ R10 + ANDQ R10, AX + JMP LBB23_6 -LBB23_13: +LBB23_12: + BSFQ AX, CX + LEAQ 0(CX)(DX*1), R12 + ADDQ $1, R12 TESTQ BX, BX - JS LBB23_15 - MOVQ R15, SI + JE LBB23_14 + +LBB23_13: + BSFQ BX, AX + CMPQ AX, CX + JBE LBB23_27 + +LBB23_14: + MOVQ -64(BP), BX + TESTQ R12, R12 + JS LBB23_16 + LEAQ -1(BX), R14 + MOVQ BX, SI NOTQ SI - ADDQ BX, SI - MOVQ -72(BP), DI - LONG $0x0002bee8; BYTE $0x00 // callq _utf8_validate - LEAQ 0(AX)(R15*1), R13 + ADDQ R12, SI + MOVQ -80(BP), DI + LONG $0x0002cde8; BYTE $0x00 // callq _utf8_validate + ADDQ AX, BX TESTQ AX, AX - LONG $0xeb480f4c // cmovsq %rbx, %r13 - LEAQ -1(R15), BX - MOVQ $-2, AX - LONG $0xd8490f48 // cmovnsq %rax, %rbx - JMP LBB23_17 - -LBB23_15: - CMPQ BX, $-1 - JNE LBB23_17 + LONG $0xdc480f49 // cmovsq %r12, %rbx + MOVQ $-2, R12 + LONG $0xe6480f4d // cmovsq %r14, %r12 + MOVQ BX, CX + JMP LBB23_18 LBB23_16: - MOVQ $-1, BX - MOVQ -64(BP), R13 + CMPQ R12, $-1 + MOVQ -48(BP), CX + JNE LBB23_18 LBB23_17: - MOVQ R13, 0(R14) - MOVQ BX, AX - ADDQ $40, SP + MOVQ $-1, R12 + MOVQ -72(BP), CX + +LBB23_18: + MOVQ -88(BP), AX + MOVQ CX, 0(AX) + MOVQ R12, AX + ADDQ $56, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -6038,217 +6220,218 @@ LBB23_17: BYTE $0x5d // popq %rbp RET -LBB23_18: - MOVQ $-2, BX - CMPQ R13, $-1 - JE LBB23_28 - LBB23_19: - MOVQ -48(BP), R14 - JMP LBB23_17 + MOVQ $-2, R12 + MOVQ -48(BP), CX + CMPQ CX, $-1 + JNE LBB23_18 LBB23_20: - MOVQ R9, R12 - CMPQ R12, $32 - JB LBB23_35 + BSFQ BX, CX + ADDQ DX, CX + JMP LBB23_18 LBB23_21: - LONG $0x066ffac5 // vmovdqu (%rsi), %xmm0 - LONG $0x4e6ffac5; BYTE $0x10 // vmovdqu $16(%rsi), %xmm1 - QUAD $0xfffffd35156ffac5 // vmovdqu $-715(%rip), %xmm2 /* LCPI23_0(%rip) */ + ADDQ -56(BP), DX + CMPQ CX, $32 + JB LBB23_33 + +LBB23_22: + LONG $0x026ffac5 // vmovdqu (%rdx), %xmm0 + LONG $0x4a6ffac5; BYTE $0x10 // vmovdqu $16(%rdx), %xmm1 + QUAD $0xfffffd39156ffac5 // vmovdqu $-711(%rip), %xmm2 /* LCPI23_0(%rip) */ LONG $0xda74f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm3 LONG $0xdbd779c5 // vpmovmskb %xmm3, %r11d LONG $0xd274f1c5 // vpcmpeqb %xmm2, %xmm1, %xmm2 - LONG $0xd2d7f9c5 // vpmovmskb %xmm2, %edx - QUAD $0xfffffd2d156ffac5 // vmovdqu $-723(%rip), %xmm2 /* LCPI23_1(%rip) */ + LONG $0xc2d7f9c5 // vpmovmskb %xmm2, %eax + QUAD $0xfffffd31156ffac5 // vmovdqu $-719(%rip), %xmm2 /* LCPI23_1(%rip) */ LONG $0xda74f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm3 - LONG $0xcbd7f9c5 // vpmovmskb %xmm3, %ecx + LONG $0xc3d779c5 // vpmovmskb %xmm3, %r8d LONG $0xd274f1c5 // vpcmpeqb %xmm2, %xmm1, %xmm2 - LONG $0xc2d7f9c5 // vpmovmskb %xmm2, %eax - QUAD $0xfffffd25156ffac5 // vmovdqu $-731(%rip), %xmm2 /* LCPI23_2(%rip) */ + LONG $0xf2d7f9c5 // vpmovmskb %xmm2, %esi + QUAD $0xfffffd29156ffac5 // vmovdqu $-727(%rip), %xmm2 /* LCPI23_2(%rip) */ LONG $0xd864e9c5 // vpcmpgtb %xmm0, %xmm2, %xmm3 LONG $0xe476d9c5 // vpcmpeqd %xmm4, %xmm4, %xmm4 LONG $0xc464f9c5 // vpcmpgtb %xmm4, %xmm0, %xmm0 - LONG $0xc3dbf9c5 // vpand %xmm3, %xmm0, %xmm0 - LONG $0xc0d779c5 // vpmovmskb %xmm0, %r8d + LONG $0xc0dbe1c5 // vpand %xmm0, %xmm3, %xmm0 + LONG $0xc8d779c5 // vpmovmskb %xmm0, %r9d LONG $0xc164e9c5 // vpcmpgtb %xmm1, %xmm2, %xmm0 LONG $0xcc64f1c5 // vpcmpgtb %xmm4, %xmm1, %xmm1 - LONG $0xc0dbf1c5 // vpand %xmm0, %xmm1, %xmm0 - LONG $0xd0d779c5 // vpmovmskb %xmm0, %r10d - SHLQ $16, DX + LONG $0xc1dbf9c5 // vpand %xmm1, %xmm0, %xmm0 + LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx SHLQ $16, AX - ORQ AX, CX - CMPQ R13, $-1 - JNE LBB23_23 - TESTQ CX, CX - JNE LBB23_47 - -LBB23_23: - SHLQ $16, R10 - ORQ R11, DX - MOVQ CX, AX - ORQ R14, AX - JNE LBB23_32 - ORQ R8, R10 - TESTQ DX, DX - JE LBB23_33 + ORQ AX, R11 + SHLQ $16, SI + SHLQ $16, BX + ORQ SI, R8 + JNE LBB23_28 + TESTQ R13, R13 + JNE LBB23_30 + XORL R13, R13 + ORQ R9, BX + TESTQ R11, R11 + JE LBB23_31 LBB23_25: - SUBQ -56(BP), SI - BSFQ DX, CX - LEAQ 1(SI)(CX*1), BX - TESTQ R10, R10 - JE LBB23_30 - BSFQ R10, AX - CMPQ AX, CX - MOVQ -48(BP), R14 - JA LBB23_13 + SUBQ -56(BP), DX + BSFQ R11, CX + LEAQ 0(DX)(CX*1), R12 + ADDQ $1, R12 + TESTQ BX, BX + JNE LBB23_13 + JMP LBB23_14 + +LBB23_26: + MOVQ $-1, -48(BP) + XORL R13, R13 + MOVQ AX, DX + CMPQ CX, $32 + JAE LBB23_22 + JMP LBB23_33 LBB23_27: - ADDQ SI, AX - CMPQ R13, $-1 - LONG $0xe8440f4c // cmoveq %rax, %r13 - MOVQ $-2, BX - JMP LBB23_17 + ADDQ DX, AX + MOVQ -48(BP), CX + CMPQ CX, $-1 + LONG $0xc8440f48 // cmoveq %rax, %rcx + MOVQ $-2, R12 + JMP LBB23_18 LBB23_28: - SUBQ -56(BP), SI - BSFQ CX, R13 - -LBB23_29: - ADDQ SI, R13 - MOVQ -48(BP), R14 - JMP LBB23_17 + CMPQ -48(BP), $-1 + JNE LBB23_30 + MOVQ DX, R10 + SUBQ -56(BP), R10 + BSFQ R8, DI + ADDQ R10, DI + MOVQ DI, -48(BP) LBB23_30: - MOVQ -48(BP), R14 - JMP LBB23_13 - -LBB23_31: - MOVQ $-1, R13 - XORL R14, R14 - CMPQ R12, $32 - JAE LBB23_21 - JMP LBB23_35 - -LBB23_32: - MOVL R14, AX + MOVL R13, SI + NOTL SI + ANDL R8, SI + LEAL 0(R13)(SI*2), R10 + LEAL 0(SI)(SI*1), AX NOTL AX - ANDL CX, AX - LEAL 0(AX)(AX*1), BX - ORL R14, BX - MOVL BX, DI - NOTL DI - ANDL CX, DI - ANDL $-1431655766, DI - XORL R14, R14 - ADDL AX, DI - SETCS R14 - ADDL DI, DI - XORL $1431655765, DI - ANDL BX, DI - NOTL DI - ANDL DI, DX - ORQ R8, R10 - TESTQ DX, DX + ANDL R8, AX + ANDL $-1431655766, AX + XORL R13, R13 + ADDL SI, AX + SETCS R13 + ADDL AX, AX + XORL $1431655765, AX + ANDL R10, AX + NOTL AX + ANDL AX, R11 + ORQ R9, BX + TESTQ R11, R11 JNE LBB23_25 +LBB23_31: + TESTQ BX, BX + JNE LBB23_47 + ADDQ $32, DX + ADDQ $-32, CX + LBB23_33: - TESTQ R10, R10 - JNE LBB23_48 - ADDQ $32, SI - ADDQ $-32, R12 + MOVQ -56(BP), SI + NOTQ SI + TESTQ R13, R13 + JNE LBB23_49 + TESTQ CX, CX + JE LBB23_44 LBB23_35: - TESTQ R14, R14 - JNE LBB23_50 - MOVQ -48(BP), R14 - TESTQ R12, R12 - JE LBB23_46 + LEAQ 1(SI), R9 + MOVQ -48(BP), AX -LBB23_37: - MOVQ -56(BP), CX - NOTQ CX +LBB23_36: + MOVQ AX, -48(BP) + XORL BX, BX -LBB23_38: - LEAQ 1(SI), AX - MOVBLZX 0(SI), DX - CMPB DX, $34 - JE LBB23_45 - LEAQ -1(R12), BX - CMPB DX, $92 - JE LBB23_42 - CMPB DX, $31 - JBE LBB23_52 - MOVQ AX, SI - MOVQ BX, R12 - TESTQ BX, BX - JNE LBB23_38 - JMP LBB23_44 +LBB23_37: + MOVBLZX 0(DX)(BX*1), AX + CMPB AX, $34 + JE LBB23_43 + CMPB AX, $92 + JE LBB23_41 + CMPB AX, $31 + JBE LBB23_51 + ADDQ $1, BX + CMPQ CX, BX + JNE LBB23_37 + JMP LBB23_45 + +LBB23_41: + LEAQ -1(CX), AX + CMPQ AX, BX + MOVQ -48(BP), AX + JE LBB23_17 + LEAQ 0(R9)(DX*1), R8 + ADDQ BX, R8 + CMPQ AX, $-1 + LONG $0xc0440f49 // cmoveq %r8, %rax + ADDQ BX, DX + ADDQ $2, DX + MOVQ CX, R8 + SUBQ BX, R8 + ADDQ $-2, R8 + ADDQ $-2, CX + CMPQ CX, BX + MOVQ R8, CX + JNE LBB23_36 + JMP LBB23_17 -LBB23_42: - TESTQ BX, BX - JE LBB23_16 - ADDQ CX, AX - CMPQ R13, $-1 - LONG $0xe8440f4c // cmoveq %rax, %r13 - ADDQ $2, SI - ADDQ $-2, R12 - MOVQ R12, BX - TESTQ BX, BX - JNE LBB23_38 +LBB23_43: + ADDQ BX, DX + ADDQ $1, DX LBB23_44: - CMPB DX, $34 - JNE LBB23_16 - JMP LBB23_46 + SUBQ -56(BP), DX + MOVQ DX, R12 + JMP LBB23_14 LBB23_45: - MOVQ AX, SI - -LBB23_46: - SUBQ -56(BP), SI - MOVQ SI, BX - JMP LBB23_13 + CMPB AX, $34 + JNE LBB23_17 + ADDQ CX, DX + JMP LBB23_44 LBB23_47: - MOVQ SI, AX - SUBQ -56(BP), AX - BSFQ CX, R13 - ADDQ AX, R13 - JMP LBB23_23 + MOVQ $-2, R12 + CMPQ -48(BP), $-1 + JNE LBB23_48 + SUBQ -56(BP), DX + JMP LBB23_20 -LBB23_48: - MOVQ $-2, BX - CMPQ R13, $-1 - JNE LBB23_19 - SUBQ -56(BP), SI - BSFQ R10, R13 - JMP LBB23_29 +LBB23_49: + TESTQ CX, CX + MOVQ -48(BP), DI + JE LBB23_17 + LEAQ 0(DX)(SI*1), AX + CMPQ DI, $-1 + LONG $0xf8440f48 // cmoveq %rax, %rdi + MOVQ DI, -48(BP) + ADDQ $1, DX + ADDQ $-1, CX + TESTQ CX, CX + JNE LBB23_35 + JMP LBB23_44 -LBB23_50: - TESTQ R12, R12 - MOVQ -48(BP), R14 - JE LBB23_16 - MOVQ -56(BP), AX - NOTQ AX - ADDQ SI, AX - CMPQ R13, $-1 - LONG $0xe8440f4c // cmoveq %rax, %r13 - INCQ SI - DECQ R12 - TESTQ R12, R12 - JNE LBB23_37 - JMP LBB23_46 +LBB23_51: + MOVQ $-2, R12 + CMPQ -48(BP), $-1 + JE LBB23_54 -LBB23_52: - MOVQ $-2, BX - CMPQ R13, $-1 - JNE LBB23_19 - ADDQ CX, AX - MOVQ AX, R13 - MOVQ -48(BP), R14 - JMP LBB23_17 +LBB23_48: + MOVQ -48(BP), CX + JMP LBB23_18 + +LBB23_54: + ADDQ DX, SI + LEAQ 0(BX)(SI*1), CX + ADDQ $1, CX + JMP LBB23_18 _utf8_validate: BYTE $0x55 // pushq %rbp @@ -6258,137 +6441,143 @@ _utf8_validate: BYTE $0x53 // pushq %rbx MOVQ $-1, AX TESTQ SI, SI - JLE LBB24_27 - LONG $0x050d8d4c; WORD $0x00ad; BYTE $0x00 // leaq $44293(%rip), %r9 /* _first(%rip) */ - LONG $0xfe058d4c; WORD $0x00ad; BYTE $0x00 // leaq $44542(%rip), %r8 /* _ranges(%rip) */ - LONG $0x19158d4c; WORD $0x0001; BYTE $0x00 // leaq $281(%rip), %r10 /* LJTI24_0(%rip) */ - MOVQ DI, R14 + JLE LBB24_28 + LONG $0xdf058d4c; WORD $0x00ad; BYTE $0x00 // leaq $44511(%rip), %r8 /* _first(%rip) */ + LONG $0xd80d8d4c; WORD $0x00ae; BYTE $0x00 // leaq $44760(%rip), %r9 /* _ranges(%rip) */ + LONG $0x2f158d4c; WORD $0x0001; BYTE $0x00 // leaq $303(%rip), %r10 /* LJTI24_0(%rip) */ + MOVQ DI, R11 LBB24_2: - CMPB 0(R14), $0 + CMPB 0(R11), $0 JS LBB24_3 MOVQ SI, DX - MOVQ R14, CX + MOVQ R11, CX CMPQ SI, $16 - JL LBB24_5 + JL LBB24_15 + XORL BX, BX + XORL DX, DX -LBB24_10: - LONG $0x016ffac5 // vmovdqu (%rcx), %xmm0 - LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx - TESTW BX, BX - JNE LBB24_11 +LBB24_6: + LONG $0x6f7ac1c4; WORD $0x1b04 // vmovdqu (%r11,%rbx), %xmm0 + LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx + TESTL CX, CX + JNE LBB24_7 + ADDQ $16, BX + LEAQ 0(SI)(DX*1), CX + ADDQ $-16, CX + ADDQ $-16, DX ADDQ $16, CX - CMPQ DX, $31 - LEAQ -16(DX), DX - JG LBB24_10 + CMPQ CX, $31 + JG LBB24_6 + MOVQ R11, CX + SUBQ DX, CX + MOVQ SI, DX + SUBQ BX, DX -LBB24_5: +LBB24_15: TESTQ DX, DX - JLE LBB24_27 - INCQ DX + JLE LBB24_28 + ADDQ $1, DX + MOVQ CX, BX + SUBQ R11, BX -LBB24_7: +LBB24_17: CMPB 0(CX), $0 - JS LBB24_12 - INCQ CX - DECQ DX + JS LBB24_8 + ADDQ $1, CX + ADDQ $-1, DX + ADDQ $1, BX CMPQ DX, $1 - JG LBB24_7 - JMP LBB24_27 + JG LBB24_17 + JMP LBB24_28 LBB24_3: - XORL DX, DX - CMPQ DX, $-1 - JNE LBB24_14 - JMP LBB24_27 - -LBB24_12: - SUBQ R14, CX - MOVQ CX, DX - CMPQ DX, $-1 - JE LBB24_27 + XORL BX, BX -LBB24_14: - SUBQ DX, SI - JLE LBB24_27 - LEAQ 0(R14)(DX*1), R11 - MOVBLZX 0(R14)(DX*1), R14 - MOVBLZX 0(R14)(R9*1), R15 - MOVL R15, DX +LBB24_8: + CMPQ BX, $-1 + JE LBB24_28 + +LBB24_9: + SUBQ BX, SI + JLE LBB24_28 + LEAQ 0(R11)(BX*1), R14 + MOVBLZX 0(R11)(BX*1), R11 + MOVBLZX 0(R11)(R8*1), BX + MOVL BX, DX ANDL $7, DX CMPQ SI, DX - JB LBB24_25 + JB LBB24_26 CMPB DX, $4 - JA LBB24_25 - MOVL $1, BX + JA LBB24_26 + MOVL $1, R15 MOVBLZX DX, CX MOVLQSX 0(R10)(CX*4), CX ADDQ R10, CX JMP CX -LBB24_18: - MOVB 3(R11), BX - TESTB BX, BX - JNS LBB24_25 - CMPB BX, $-65 - JA LBB24_25 +LBB24_19: + MOVB 3(R14), CX + TESTB CX, CX + JNS LBB24_26 + CMPB CX, $-65 + JA LBB24_26 -LBB24_20: - MOVB 2(R11), BX - TESTB BX, BX - JNS LBB24_25 - CMPB BX, $-65 - JA LBB24_25 - -LBB24_22: - TESTB R14, R14 - JNS LBB24_25 - SHRQ $4, R15 - MOVB 1(R11), R14 - CMPB R14, 0(R8)(R15*2) - JB LBB24_25 - MOVQ DX, BX - CMPB 1(R8)(R15*2), R14 - JB LBB24_25 +LBB24_21: + MOVB 2(R14), CX + TESTB CX, CX + JNS LBB24_26 + CMPB CX, $-65 + JA LBB24_26 + +LBB24_23: + SHRQ $4, BX + MOVB 1(R14), CX + CMPB CX, 0(R9)(BX*2) + JB LBB24_26 + CMPB 1(R9)(BX*2), CX + JB LBB24_26 + MOVQ DX, R15 + TESTB R11, R11 + JNS LBB24_26 -LBB24_26: - ADDQ BX, R11 - MOVQ R11, R14 - SUBQ BX, SI +LBB24_27: + ADDQ R15, R14 + MOVQ R14, R11 + SUBQ R15, SI JG LBB24_2 - JMP LBB24_27 - -LBB24_11: - MOVWLZX BX, DX - SUBQ R14, CX - BSFQ DX, DX - ADDQ CX, DX - CMPQ DX, $-1 - JNE LBB24_14 - JMP LBB24_27 - -LBB24_25: - SUBQ DI, R11 - MOVQ R11, AX + JMP LBB24_28 -LBB24_27: +LBB24_7: + BSFW CX, CX + MOVWLZX CX, BX + SUBQ DX, BX + CMPQ BX, $-1 + JNE LBB24_9 + JMP LBB24_28 + +LBB24_26: + SUBQ DI, R14 + MOVQ R14, AX + +LBB24_28: BYTE $0x5b // popq %rbx WORD $0x5e41 // popq %r14 WORD $0x5f41 // popq %r15 BYTE $0x5d // popq %rbp RET +// .set L24_0_set_27, LBB24_27-LJTI24_0 // .set L24_0_set_26, LBB24_26-LJTI24_0 -// .set L24_0_set_25, LBB24_25-LJTI24_0 -// .set L24_0_set_22, LBB24_22-LJTI24_0 -// .set L24_0_set_20, LBB24_20-LJTI24_0 -// .set L24_0_set_18, LBB24_18-LJTI24_0 +// .set L24_0_set_23, LBB24_23-LJTI24_0 +// .set L24_0_set_21, LBB24_21-LJTI24_0 +// .set L24_0_set_19, LBB24_19-LJTI24_0 LJTI24_0: - LONG $0xffffffc9 // .long L24_0_set_26 - LONG $0xfffffff3 // .long L24_0_set_25 - LONG $0xffffffac // .long L24_0_set_22 - LONG $0xffffff9f // .long L24_0_set_20 - LONG $0xffffff92 // .long L24_0_set_18 + LONG $0xffffffcc // .long L24_0_set_27 + LONG $0xfffffff3 // .long L24_0_set_26 + LONG $0xffffffaf // .long L24_0_set_23 + LONG $0xffffffa2 // .long L24_0_set_21 + LONG $0xffffff95 // .long L24_0_set_19 _skip_negative: BYTE $0x55 // pushq %rbp @@ -6402,12 +6591,12 @@ _skip_negative: MOVQ 8(DI), SI SUBQ BX, SI MOVQ AX, DI - LONG $0x000098e8; BYTE $0x00 // callq _do_skip_number + LONG $0x000099e8; BYTE $0x00 // callq _do_skip_number TESTQ AX, AX JS LBB25_1 ADDQ BX, AX MOVQ AX, 0(R14) - DECQ BX + ADDQ $-1, BX JMP LBB25_3 LBB25_1: @@ -6436,355 +6625,310 @@ LCPI26_3: QUAD $0x2d2d2d2d2d2d2d2d; QUAD $0x2d2d2d2d2d2d2d2d // .space 16, '----------------' LCPI26_4: - QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' + QUAD $0xdfdfdfdfdfdfdfdf; QUAD $0xdfdfdfdfdfdfdfdf // .space 16, '\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf' LCPI26_5: QUAD $0x2e2e2e2e2e2e2e2e; QUAD $0x2e2e2e2e2e2e2e2e // .space 16, '................' LCPI26_6: - QUAD $0x6565656565656565; QUAD $0x6565656565656565 // .space 16, 'eeeeeeeeeeeeeeee' + QUAD $0x4545454545454545; QUAD $0x4545454545454545 // .space 16, 'EEEEEEEEEEEEEEEE' _do_skip_number: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp WORD $0x5741 // pushq %r15 WORD $0x5641 // pushq %r14 - WORD $0x5541 // pushq %r13 - WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx TESTQ SI, SI - JE LBB26_34 + JE LBB26_1 CMPB 0(DI), $48 - JNE LBB26_5 - MOVL $1, DX + JNE LBB26_6 + MOVL $1, AX CMPQ SI, $1 - JE LBB26_52 - MOVB 1(DI), AX - ADDB $-46, AX - CMPB AX, $55 - JA LBB26_52 - MOVBLZX AX, AX - MOVQ $36028797027352577, CX - BTQ AX, CX - JAE LBB26_52 + JE LBB26_55 + MOVB 1(DI), CX + ADDB $-46, CX + CMPB CX, $55 + JA LBB26_55 + MOVBLZX CX, CX + MOVQ $36028797027352577, DX + BTQ CX, DX + JAE LBB26_55 -LBB26_5: +LBB26_6: CMPQ SI, $16 - JB LBB26_57 - LEAQ -16(SI), R11 - MOVQ R11, AX - ANDQ $-16, AX - LEAQ 16(AX)(DI*1), R10 - ANDL $15, R11 + JB LBB26_7 + MOVQ $-1, R10 + XORL AX, AX + QUAD $0xffffff29056f7ac5 // vmovdqu $-215(%rip), %xmm8 /* LCPI26_0(%rip) */ + QUAD $0xffffff310d6f7ac5 // vmovdqu $-207(%rip), %xmm9 /* LCPI26_1(%rip) */ + QUAD $0xffffff39156f7ac5 // vmovdqu $-199(%rip), %xmm10 /* LCPI26_2(%rip) */ + QUAD $0xffffff411d6f7ac5 // vmovdqu $-191(%rip), %xmm11 /* LCPI26_3(%rip) */ + QUAD $0xffffff49256ffac5 // vmovdqu $-183(%rip), %xmm4 /* LCPI26_4(%rip) */ + QUAD $0xffffff512d6ffac5 // vmovdqu $-175(%rip), %xmm5 /* LCPI26_5(%rip) */ + QUAD $0xffffff59356ffac5 // vmovdqu $-167(%rip), %xmm6 /* LCPI26_6(%rip) */ MOVQ $-1, R9 - QUAD $0xffffff15056f7ac5 // vmovdqu $-235(%rip), %xmm8 /* LCPI26_0(%rip) */ - QUAD $0xffffff1d0d6f7ac5 // vmovdqu $-227(%rip), %xmm9 /* LCPI26_1(%rip) */ - QUAD $0xffffff25156f7ac5 // vmovdqu $-219(%rip), %xmm10 /* LCPI26_2(%rip) */ - QUAD $0xffffff2d1d6f7ac5 // vmovdqu $-211(%rip), %xmm11 /* LCPI26_3(%rip) */ - QUAD $0xffffff35256ffac5 // vmovdqu $-203(%rip), %xmm4 /* LCPI26_4(%rip) */ - QUAD $0xffffff3d2d6ffac5 // vmovdqu $-195(%rip), %xmm5 /* LCPI26_5(%rip) */ - QUAD $0xffffff45356ffac5 // vmovdqu $-187(%rip), %xmm6 /* LCPI26_6(%rip) */ - MOVL $4294967295, R14 - MOVQ $-1, AX MOVQ $-1, R8 - MOVQ DI, R15 + MOVQ SI, R14 -LBB26_7: - LONG $0x6f7ac1c4; BYTE $0x3f // vmovdqu (%r15), %xmm7 +LBB26_9: + LONG $0x3c6ffac5; BYTE $0x07 // vmovdqu (%rdi,%rax), %xmm7 LONG $0x6441c1c4; BYTE $0xc0 // vpcmpgtb %xmm8, %xmm7, %xmm0 LONG $0xcf64b1c5 // vpcmpgtb %xmm7, %xmm9, %xmm1 LONG $0xc1dbf9c5 // vpand %xmm1, %xmm0, %xmm0 LONG $0xcf74a9c5 // vpcmpeqb %xmm7, %xmm10, %xmm1 LONG $0xd774a1c5 // vpcmpeqb %xmm7, %xmm11, %xmm2 LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 - LONG $0xd4ebc1c5 // vpor %xmm4, %xmm7, %xmm2 + LONG $0xd4dbc1c5 // vpand %xmm4, %xmm7, %xmm2 LONG $0xd674e9c5 // vpcmpeqb %xmm6, %xmm2, %xmm2 LONG $0xfd74c1c5 // vpcmpeqb %xmm5, %xmm7, %xmm7 LONG $0xdfebe9c5 // vpor %xmm7, %xmm2, %xmm3 LONG $0xc0ebf1c5 // vpor %xmm0, %xmm1, %xmm0 LONG $0xc0ebe1c5 // vpor %xmm0, %xmm3, %xmm0 LONG $0xd7d7f9c5 // vpmovmskb %xmm7, %edx - LONG $0xead779c5 // vpmovmskb %xmm2, %r13d - LONG $0xe1d779c5 // vpmovmskb %xmm1, %r12d + LONG $0xfad779c5 // vpmovmskb %xmm2, %r15d + LONG $0xd9d779c5 // vpmovmskb %xmm1, %r11d LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - XORQ R14, CX - BSFQ CX, CX + NOTL CX + BSFL CX, CX CMPL CX, $16 - JE LBB26_9 + JE LBB26_11 MOVL $-1, BX SHLL CX, BX NOTL BX ANDL BX, DX - ANDL BX, R13 - ANDL R12, BX - MOVL BX, R12 + ANDL BX, R15 + ANDL R11, BX + MOVL BX, R11 -LBB26_9: +LBB26_11: LEAL -1(DX), BX ANDL DX, BX - JNE LBB26_50 - LEAL -1(R13), BX - ANDL R13, BX - JNE LBB26_50 - LEAL -1(R12), BX - ANDL R12, BX - JNE LBB26_50 + JNE LBB26_12 + LEAL -1(R15), BX + ANDL R15, BX + JNE LBB26_12 + LEAL -1(R11), BX + ANDL R11, BX + JNE LBB26_12 TESTL DX, DX - JE LBB26_15 - MOVQ R15, BX - SUBQ DI, BX + JE LBB26_19 BSFL DX, DX - ADDQ BX, DX CMPQ R8, $-1 - JNE LBB26_51 + JNE LBB26_56 + ADDQ AX, DX MOVQ DX, R8 -LBB26_15: - TESTL R13, R13 - JE LBB26_18 - MOVQ R15, BX - SUBQ DI, BX - BSFL R13, DX - ADDQ BX, DX - CMPQ AX, $-1 - JNE LBB26_51 - MOVQ DX, AX - -LBB26_18: - TESTL R12, R12 - JE LBB26_21 - MOVQ R15, BX - SUBQ DI, BX - BSFL R12, DX - ADDQ BX, DX +LBB26_19: + TESTL R15, R15 + JE LBB26_22 + BSFL R15, DX CMPQ R9, $-1 - JNE LBB26_51 + JNE LBB26_56 + ADDQ AX, DX MOVQ DX, R9 -LBB26_21: - CMPL CX, $16 - JNE LBB26_35 - ADDQ $16, R15 - ADDQ $-16, SI - CMPQ SI, $15 - JA LBB26_7 - TESTQ R11, R11 - JE LBB26_36 - -LBB26_24: - LEAQ 0(R10)(R11*1), CX - LONG $0x5b358d48; WORD $0x0001; BYTE $0x00 // leaq $347(%rip), %rsi /* LJTI26_0(%rip) */ - JMP LBB26_26 +LBB26_22: + TESTL R11, R11 + JE LBB26_25 + BSFL R11, DX + CMPQ R10, $-1 + JNE LBB26_56 + ADDQ AX, DX + MOVQ DX, R10 LBB26_25: - MOVQ BX, R10 - DECQ R11 - JE LBB26_54 - -LBB26_26: - MOVBLSX 0(R10), DX - ADDL $-43, DX - CMPL DX, $58 - JA LBB26_36 - LEAQ 1(R10), BX - MOVLQSX 0(SI)(DX*4), DX - ADDQ SI, DX - JMP DX + CMPL CX, $16 + JNE LBB26_57 + ADDQ $-16, R14 + ADDQ $16, AX + CMPQ R14, $15 + JA LBB26_9 + LEAQ 0(DI)(AX*1), CX + MOVQ CX, R11 + CMPQ AX, SI + JE LBB26_41 LBB26_28: - MOVQ BX, DX - SUBQ DI, DX + LEAQ 0(CX)(R14*1), R11 + MOVQ CX, SI + SUBQ DI, SI + XORL AX, AX + LONG $0x3b3d8d4c; WORD $0x0001; BYTE $0x00 // leaq $315(%rip), %r15 /* LJTI26_0(%rip) */ + JMP LBB26_29 + +LBB26_31: + CMPL DX, $101 + JNE LBB26_40 + +LBB26_32: CMPQ R9, $-1 JNE LBB26_58 - DECQ DX - MOVQ DX, R9 - JMP LBB26_25 + LEAQ 0(SI)(AX*1), R9 + +LBB26_39: + ADDQ $1, AX + CMPQ R14, AX + JE LBB26_41 + +LBB26_29: + MOVBLSX 0(CX)(AX*1), DX + LEAL -48(DX), BX + CMPL BX, $10 + JB LBB26_39 + LEAL -43(DX), BX + CMPL BX, $26 + JA LBB26_31 + MOVLQSX 0(R15)(BX*4), DX + ADDQ R15, DX + JMP DX -LBB26_30: - MOVQ BX, DX - SUBQ DI, DX - CMPQ AX, $-1 +LBB26_37: + CMPQ R10, $-1 JNE LBB26_58 - DECQ DX - MOVQ DX, AX - JMP LBB26_25 + LEAQ 0(SI)(AX*1), R10 + JMP LBB26_39 -LBB26_32: - MOVQ BX, DX - SUBQ DI, DX +LBB26_35: CMPQ R8, $-1 JNE LBB26_58 - DECQ DX - MOVQ DX, R8 - JMP LBB26_25 + LEAQ 0(SI)(AX*1), R8 + JMP LBB26_39 -LBB26_34: +LBB26_1: MOVQ $-1, AX - JMP LBB26_53 - -LBB26_35: - ADDQ CX, R15 - MOVQ R15, R10 + JMP LBB26_55 -LBB26_36: - MOVQ $-1, DX - TESTQ AX, AX - JE LBB26_52 +LBB26_57: + MOVL CX, R11 + ADDQ DI, R11 + ADDQ AX, R11 -LBB26_37: - TESTQ R9, R9 - JE LBB26_52 +LBB26_41: + MOVQ $-1, AX TESTQ R8, R8 - JE LBB26_52 - SUBQ DI, R10 - LEAQ -1(R10), CX - CMPQ AX, CX - JE LBB26_45 - CMPQ R8, CX - JE LBB26_45 - CMPQ R9, CX - JE LBB26_45 - TESTQ R9, R9 - JLE LBB26_46 - LEAQ -1(R9), CX - CMPQ AX, CX - JE LBB26_46 - NOTQ R9 - MOVQ R9, DX - MOVQ R9, AX - JMP LBB26_53 + JNE LBB26_42 + JMP LBB26_55 -LBB26_45: - NEGQ R10 - MOVQ R10, DX - MOVQ R10, AX - JMP LBB26_53 +LBB26_40: + ADDQ AX, CX + MOVQ CX, R11 + MOVQ $-1, AX + TESTQ R8, R8 + JE LBB26_55 -LBB26_46: - MOVQ R8, CX - ORQ AX, CX +LBB26_42: + TESTQ R10, R10 + JE LBB26_55 + TESTQ R9, R9 + JE LBB26_55 + SUBQ DI, R11 + LEAQ -1(R11), AX CMPQ R8, AX - JL LBB26_49 - TESTQ CX, CX - JS LBB26_49 - NOTQ R8 - MOVQ R8, DX - MOVQ R8, AX - JMP LBB26_53 - -LBB26_49: - TESTQ CX, CX - LEAQ -1(AX), CX - NOTQ AX - LONG $0xc2480f49 // cmovsq %r10, %rax - CMPQ R8, CX - LONG $0xc2450f49 // cmovneq %r10, %rax - JMP LBB26_53 - -LBB26_50: - SUBQ DI, R15 - BSFL BX, DX - ADDQ R15, DX - -LBB26_51: - NOTQ DX + JE LBB26_47 + CMPQ R10, AX + JE LBB26_47 + CMPQ R9, AX + JE LBB26_47 + TESTQ R10, R10 + JLE LBB26_51 + LEAQ -1(R10), AX + CMPQ R9, AX + JE LBB26_51 + NOTQ R10 + MOVQ R10, AX + JMP LBB26_55 -LBB26_52: - MOVQ DX, AX +LBB26_47: + NEGQ R11 + MOVQ R11, AX -LBB26_53: +LBB26_55: BYTE $0x5b // popq %rbx - WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 WORD $0x5e41 // popq %r14 WORD $0x5f41 // popq %r15 BYTE $0x5d // popq %rbp RET +LBB26_51: + MOVQ R8, AX + ORQ R9, AX + SETPL AX + JS LBB26_54 + CMPQ R8, R9 + JL LBB26_54 + NOTQ R8 + MOVQ R8, AX + JMP LBB26_55 + LBB26_54: - MOVQ CX, R10 - MOVQ $-1, DX - TESTQ AX, AX - JNE LBB26_37 - JMP LBB26_52 + LEAQ -1(R9), CX + CMPQ R8, CX + NOTQ R9 + LONG $0xcb450f4d // cmovneq %r11, %r9 + TESTB AX, AX + LONG $0xcb440f4d // cmoveq %r11, %r9 + MOVQ R9, AX + JMP LBB26_55 + +LBB26_12: + BSFL BX, CX + JMP LBB26_13 LBB26_58: - NEGQ DX - JMP LBB26_52 + SUBQ CX, DI + NOTQ AX + ADDQ DI, AX + JMP LBB26_55 -LBB26_57: +LBB26_56: + MOVL DX, CX + +LBB26_13: + NOTQ AX + SUBQ CX, AX + JMP LBB26_55 + +LBB26_7: MOVQ $-1, R8 - MOVQ DI, R10 - MOVQ SI, R11 - MOVQ $-1, AX + MOVQ DI, CX + MOVQ SI, R14 MOVQ $-1, R9 - JMP LBB26_24 + MOVQ $-1, R10 + JMP LBB26_28 -// .set L26_0_set_28, LBB26_28-LJTI26_0 -// .set L26_0_set_36, LBB26_36-LJTI26_0 +// .set L26_0_set_37, LBB26_37-LJTI26_0 +// .set L26_0_set_40, LBB26_40-LJTI26_0 +// .set L26_0_set_35, LBB26_35-LJTI26_0 // .set L26_0_set_32, LBB26_32-LJTI26_0 -// .set L26_0_set_25, LBB26_25-LJTI26_0 -// .set L26_0_set_30, LBB26_30-LJTI26_0 LJTI26_0: - LONG $0xfffffecc // .long L26_0_set_28 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xfffffecc // .long L26_0_set_28 - LONG $0xfffffefc // .long L26_0_set_32 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xfffffea7 // .long L26_0_set_25 - LONG $0xfffffea7 // .long L26_0_set_25 - LONG $0xfffffea7 // .long L26_0_set_25 - LONG $0xfffffea7 // .long L26_0_set_25 - LONG $0xfffffea7 // .long L26_0_set_25 - LONG $0xfffffea7 // .long L26_0_set_25 - LONG $0xfffffea7 // .long L26_0_set_25 - LONG $0xfffffea7 // .long L26_0_set_25 - LONG $0xfffffea7 // .long L26_0_set_25 - LONG $0xfffffea7 // .long L26_0_set_25 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xfffffee4 // .long L26_0_set_30 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xffffff26 // .long L26_0_set_36 - LONG $0xfffffee4 // .long L26_0_set_30 + LONG $0xffffff00 // .long L26_0_set_37 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff00 // .long L26_0_set_37 + LONG $0xffffff10 // .long L26_0_set_35 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xffffff40 // .long L26_0_set_40 + LONG $0xfffffecc // .long L26_0_set_32 _skip_positive: BYTE $0x55 // pushq %rbp @@ -6801,16 +6945,16 @@ _skip_positive: MOVQ 8(DI), SI SUBQ BX, SI MOVQ AX, DI - LONG $0xfffbb8e8; BYTE $0xff // callq _do_skip_number + LONG $0xfffc78e8; BYTE $0xff // callq _do_skip_number + LEAQ -1(AX), DX MOVQ $-2, CX - MOVQ $-2, DX - SUBQ AX, DX + MOVQ $-2, SI + SUBQ AX, SI TESTQ AX, AX - LEAQ -1(AX), AX - LONG $0xc2480f48 // cmovsq %rdx, %rax + LONG $0xf2490f48 // cmovnsq %rdx, %rsi LONG $0xcb490f48 // cmovnsq %rbx, %rcx - ADDQ R15, AX - MOVQ AX, 0(R14) + ADDQ R15, SI + MOVQ SI, 0(R14) MOVQ CX, AX ADDQ $8, SP BYTE $0x5b // popq %rbx @@ -6846,7 +6990,7 @@ _skip_number: LBB28_3: MOVQ BX, DI - LONG $0xfffb41e8; BYTE $0xff // callq _do_skip_number + LONG $0xfffc01e8; BYTE $0xff // callq _do_skip_number TESTQ AX, AX JS LBB28_7 ADDQ AX, BX @@ -6889,49 +7033,64 @@ _validate_one: _find_non_ascii: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - MOVQ DI, CX CMPQ SI, $16 JL LBB30_1 + XORL AX, AX + XORL DX, DX -LBB30_6: - LONG $0x016ffac5 // vmovdqu (%rcx), %xmm0 - LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax - TESTW AX, AX - JNE LBB30_7 +LBB30_9: + LONG $0x046ffac5; BYTE $0x07 // vmovdqu (%rdi,%rax), %xmm0 + LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx + TESTL CX, CX + JNE LBB30_10 + ADDQ $16, AX + LEAQ 0(SI)(DX*1), CX + ADDQ $-16, CX + ADDQ $-16, DX ADDQ $16, CX - CMPQ SI, $31 - LEAQ -16(SI), SI - JG LBB30_6 + CMPQ CX, $31 + JG LBB30_9 + MOVQ DI, CX + SUBQ DX, CX + SUBQ AX, SI + MOVQ $-1, AX + TESTQ SI, SI + JG LBB30_5 + JMP LBB30_12 LBB30_1: + MOVQ DI, CX MOVQ $-1, AX TESTQ SI, SI - JLE LBB30_9 - INCQ SI + JLE LBB30_12 + +LBB30_5: + ADDQ $1, SI + MOVQ CX, DX + SUBQ DI, DX -LBB30_3: +LBB30_6: CMPB 0(CX), $0 - JS LBB30_8 - INCQ CX - DECQ SI + JS LBB30_7 + ADDQ $1, CX + ADDQ $-1, SI + ADDQ $1, DX CMPQ SI, $1 - JG LBB30_3 + JG LBB30_6 -LBB30_9: +LBB30_12: BYTE $0x5d // popq %rbp RET -LBB30_8: - SUBQ DI, CX - MOVQ CX, AX +LBB30_7: + MOVQ DX, AX BYTE $0x5d // popq %rbp RET -LBB30_7: +LBB30_10: + BSFW CX, AX MOVWLZX AX, AX - SUBQ DI, CX - BSFQ AX, AX - ADDQ CX, AX + SUBQ DX, AX BYTE $0x5d // popq %rbp RET @@ -6940,7 +7099,7 @@ _print_mantissa: WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp WORD $0x5641 // pushq %r14 BYTE $0x53 // pushq %rbx - MOVLQSX DX, R14 + MOVL DX, R14 ADDQ SI, R14 MOVQ DI, AX SHRQ $32, AX @@ -6976,7 +7135,7 @@ _print_mantissa: LONG $0x64fa6b41 // imull $100, %r10d, %edi SUBL DI, AX MOVWLZX AX, R11 - LONG $0x203d8d48; WORD $0x0059; BYTE $0x00 // leaq $22816(%rip), %rdi /* _Digits(%rip) */ + LONG $0x753d8d48; WORD $0x0059; BYTE $0x00 // leaq $22901(%rip), %rdi /* _Digits(%rip) */ MOVWLZX 0(DI)(R8*2), AX MOVW AX, -2(R14) MOVWLZX 0(DI)(R9*2), AX @@ -6992,7 +7151,7 @@ LBB31_2: CMPL DI, $10000 JB LBB31_3 MOVL $3518437209, R8 - LONG $0xd80d8d4c; WORD $0x0058; BYTE $0x00 // leaq $22744(%rip), %r9 /* _Digits(%rip) */ + LONG $0x2d0d8d4c; WORD $0x0059; BYTE $0x00 // leaq $22829(%rip), %r9 /* _Digits(%rip) */ LBB31_5: MOVL DI, AX @@ -7023,7 +7182,7 @@ LBB31_7: WORD $0xd16b; BYTE $0x64 // imull $100, %ecx, %edx SUBL DX, AX MOVWLZX AX, AX - LONG $0x71158d48; WORD $0x0058; BYTE $0x00 // leaq $22641(%rip), %rdx /* _Digits(%rip) */ + LONG $0xc6158d48; WORD $0x0058; BYTE $0x00 // leaq $22726(%rip), %rdx /* _Digits(%rip) */ MOVWLZX 0(DX)(AX*2), AX MOVW AX, -2(R14) ADDQ $-2, R14 @@ -7033,7 +7192,7 @@ LBB31_8: CMPL AX, $10 JB LBB31_10 MOVL AX, AX - LONG $0x540d8d48; WORD $0x0058; BYTE $0x00 // leaq $22612(%rip), %rcx /* _Digits(%rip) */ + LONG $0xa90d8d48; WORD $0x0058; BYTE $0x00 // leaq $22697(%rip), %rcx /* _Digits(%rip) */ MOVWLZX 0(CX)(AX*2), AX MOVW AX, -2(R14) BYTE $0x5b // popq %rbx @@ -7058,292 +7217,314 @@ LBB31_10: _left_shift: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + WORD $0x5741 // pushq %r15 WORD $0x5641 // pushq %r14 BYTE $0x53 // pushq %rbx MOVL SI, CX - IMUL3Q $104, CX, DX - LONG $0x78358d48; WORD $0x008a; BYTE $0x00 // leaq $35448(%rip), %rsi /* _LSHIFT_TAB(%rip) */ - MOVL 0(DX)(SI*1), R8 - MOVQ 0(DI), R10 + IMUL3Q $104, CX, R14 + LONG $0xcb158d48; WORD $0x008b; BYTE $0x00 // leaq $35787(%rip), %rdx /* _LSHIFT_TAB(%rip) */ + MOVL 0(R14)(DX*1), R8 + MOVQ 0(DI), R11 MOVLQSX 16(DI), R9 - MOVB 4(DX)(SI*1), AX + MOVL R9, R10 TESTQ R9, R9 - JE LBB32_6 - LEAQ 5(DX)(SI*1), DX - XORL SI, SI + JE LBB32_1 + LEAQ 0(R14)(DX*1), SI + ADDQ $4, SI + XORL BX, BX LBB32_3: + MOVBLZX 0(SI)(BX*1), AX TESTB AX, AX - JE LBB32_8 - CMPB 0(R10)(SI*1), AX + JE LBB32_10 + CMPB 0(R11)(BX*1), AX JNE LBB32_5 - MOVBLZX 0(DX)(SI*1), AX - INCQ SI - CMPQ R9, SI + ADDQ $1, BX + CMPQ R9, BX JNE LBB32_3 + MOVL R9, SI + ADDQ R14, DX + CMPB 4(SI)(DX*1), $0 + JNE LBB32_9 + JMP LBB32_10 -LBB32_6: - TESTB AX, AX - JE LBB32_8 +LBB32_1: + XORL SI, SI + ADDQ R14, DX + CMPB 4(SI)(DX*1), $0 + JE LBB32_10 -LBB32_7: - DECL R8 +LBB32_9: + ADDL $-1, R8 -LBB32_8: - TESTL R9, R9 - JLE LBB32_23 - LEAL 0(R8)(R9*1), AX - MOVLQSX AX, R14 - DECQ R14 +LBB32_10: + TESTL R10, R10 + JLE LBB32_25 + LEAL 0(R8)(R10*1), AX + MOVLQSX AX, R15 + ADDL $-1, R9 + ADDQ $-1, R15 XORL DX, DX - MOVQ $-3689348814741910323, R11 + MOVQ $-3689348814741910323, R14 -LBB32_10: - MOVBQSX -1(R10)(R9*1), SI +LBB32_12: + MOVL R9, AX + MOVBQSX 0(R11)(AX*1), SI ADDQ $-48, SI SHLQ CX, SI ADDQ DX, SI MOVQ SI, AX - MULQ R11 + MULQ R14 SHRQ $3, DX LEAQ 0(DX)(DX*1), AX LEAQ 0(AX)(AX*4), BX MOVQ SI, AX SUBQ BX, AX - CMPQ 8(DI), R14 - JBE LBB32_16 + CMPQ 8(DI), R15 + JBE LBB32_18 ADDB $48, AX - MOVB AX, 0(R10)(R14*1) - JMP LBB32_18 + MOVB AX, 0(R11)(R15*1) + JMP LBB32_20 -LBB32_16: +LBB32_18: TESTQ AX, AX - JE LBB32_18 + JE LBB32_20 MOVL $1, 28(DI) -LBB32_18: - CMPQ R9, $2 - JL LBB32_12 - DECQ R9 - MOVQ 0(DI), R10 - DECQ R14 - JMP LBB32_10 +LBB32_20: + CMPQ R10, $2 + JL LBB32_14 + ADDQ $-1, R10 + MOVQ 0(DI), R11 + ADDL $-1, R9 + ADDQ $-1, R15 + JMP LBB32_12 -LBB32_12: +LBB32_14: CMPQ SI, $10 - JAE LBB32_13 + JAE LBB32_15 -LBB32_23: +LBB32_25: MOVLQSX 16(DI), CX MOVLQSX R8, AX ADDQ CX, AX MOVL AX, 16(DI) MOVQ 8(DI), CX CMPQ CX, AX - JA LBB32_25 + JA LBB32_27 MOVL CX, 16(DI) MOVL CX, AX -LBB32_25: +LBB32_27: ADDL R8, 20(DI) TESTL AX, AX - JLE LBB32_29 + JLE LBB32_31 MOVQ 0(DI), CX - MOVL AX, AX - -LBB32_27: - CMPB -1(CX)(AX*1), $48 - JNE LBB32_31 - MOVL AX, DX - DECQ AX - DECL DX - MOVL DX, 16(DI) - LEAQ 1(AX), DX - CMPQ DX, $1 - JG LBB32_27 + MOVL AX, DX + ADDQ $1, DX + ADDL $-1, AX LBB32_29: - TESTL AX, AX - JE LBB32_30 + MOVL AX, SI + CMPB 0(CX)(SI*1), $48 + JNE LBB32_33 + MOVL AX, 16(DI) + ADDQ $-1, DX + ADDL $-1, AX + CMPQ DX, $1 + JG LBB32_29 + JMP LBB32_32 LBB32_31: + JNE LBB32_33 + +LBB32_32: + MOVL $0, 20(DI) + +LBB32_33: BYTE $0x5b // popq %rbx WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 BYTE $0x5d // popq %rbp RET -LBB32_13: - MOVLQSX R14, SI - DECQ SI - JMP LBB32_14 - LBB32_15: + ADDL R8, R9 + MOVLQSX R9, SI + ADDQ $-1, SI + JMP LBB32_16 + +LBB32_17: ADDB $48, AX MOVQ 0(DI), BX MOVB AX, 0(BX)(SI*1) -LBB32_22: - DECQ SI +LBB32_24: + ADDQ $-1, SI CMPQ CX, $9 - JBE LBB32_23 + JBE LBB32_25 -LBB32_14: +LBB32_16: MOVQ DX, CX MOVQ DX, AX - MULQ R11 + MULQ R14 SHRQ $3, DX LEAQ 0(DX)(DX*1), AX LEAQ 0(AX)(AX*4), BX MOVQ CX, AX SUBQ BX, AX CMPQ 8(DI), SI - JA LBB32_15 + JA LBB32_17 TESTQ AX, AX - JE LBB32_22 + JE LBB32_24 MOVL $1, 28(DI) - JMP LBB32_22 - -LBB32_30: - MOVL $0, 20(DI) - BYTE $0x5b // popq %rbx - WORD $0x5e41 // popq %r14 - BYTE $0x5d // popq %rbp - RET + JMP LBB32_24 LBB32_5: - JL LBB32_7 - JMP LBB32_8 + JL LBB32_9 + JMP LBB32_10 _right_shift: - BYTE $0x55 // pushq %rbp - WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - MOVL SI, CX - MOVLQSX 16(DI), R9 - XORL SI, SI - XORL AX, AX + BYTE $0x55 // pushq %rbp + WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + BYTE $0x53 // pushq %rbx + MOVL SI, CX + MOVL 16(DI), R8 + XORL DX, DX + TESTL R8, R8 + MOVL $0, R11 + LONG $0xd84f0f45 // cmovgl %r8d, %r11d + XORL AX, AX LBB33_1: - CMPQ SI, R9 - JGE LBB33_2 + CMPQ R11, DX + JE LBB33_2 LEAQ 0(AX)(AX*4), AX - MOVQ 0(DI), DX - MOVBQSX 0(DX)(SI*1), DX - LEAQ -48(DX)(AX*2), AX - INCQ SI - MOVQ AX, DX - SHRQ CX, DX - TESTQ DX, DX + MOVQ 0(DI), SI + MOVBQSX 0(SI)(DX*1), SI + LEAQ 0(SI)(AX*2), AX + ADDQ $-48, AX + ADDQ $1, DX + MOVQ AX, SI + SHRQ CX, SI + TESTQ SI, SI JE LBB33_1 + MOVL DX, R11 -LBB33_6: +LBB33_7: MOVL 20(DI), DX - SUBL SI, DX - INCL DX - MOVQ $-1, R8 - SHLQ CX, R8 + SUBL R11, DX + ADDL $1, DX + MOVQ $-1, R9 + SHLQ CX, R9 MOVL DX, 20(DI) - NOTQ R8 + NOTQ R9 XORL R10, R10 - CMPL SI, R9 - JGE LBB33_9 - MOVLQSX SI, R9 + CMPL R11, R8 + JGE LBB33_10 + MOVLQSX R11, R8 MOVQ 0(DI), SI XORL R10, R10 -LBB33_8: +LBB33_9: MOVQ AX, DX SHRQ CX, DX - ANDQ R8, AX + ANDQ R9, AX ADDB $48, DX MOVB DX, 0(SI)(R10*1) - LEAQ 0(AX)(AX*4), AX MOVQ 0(DI), SI - LEAQ 0(SI)(R9*1), DX - MOVBQSX 0(R10)(DX*1), DX - LEAQ -48(DX)(AX*2), AX - MOVLQSX 16(DI), R11 - LEAQ 1(R9)(R10*1), DX - INCQ R10 - CMPQ DX, R11 - JL LBB33_8 - JMP LBB33_9 - -LBB33_11: + LEAQ 0(SI)(R8*1), DX + MOVBQSX 0(R10)(DX*1), R11 + LEAQ 1(R8)(R10*1), BX + ADDQ $1, R10 + LEAQ 0(AX)(AX*4), AX + LEAQ 0(R11)(AX*2), AX + ADDQ $-48, AX + MOVLQSX 16(DI), DX + CMPQ BX, DX + JL LBB33_9 + JMP LBB33_10 + +LBB33_12: ADDB $48, SI - MOVQ 0(DI), DX - MOVB SI, 0(DX)(R9*1) - INCL R9 - MOVL R9, R10 + MOVQ 0(DI), BX + MOVB SI, 0(BX)(DX*1) + ADDL $1, DX + MOVL DX, R10 -LBB33_14: +LBB33_15: ADDQ AX, AX LEAQ 0(AX)(AX*4), AX -LBB33_9: +LBB33_10: TESTQ AX, AX - JE LBB33_15 + JE LBB33_16 MOVQ AX, SI SHRQ CX, SI - ANDQ R8, AX - MOVLQSX R10, R9 - CMPQ 8(DI), R9 - JA LBB33_11 + ANDQ R9, AX + MOVLQSX R10, DX + CMPQ 8(DI), DX + JA LBB33_12 TESTQ SI, SI - JE LBB33_14 + JE LBB33_15 MOVL $1, 28(DI) - JMP LBB33_14 + JMP LBB33_15 -LBB33_15: +LBB33_16: MOVL R10, 16(DI) TESTL R10, R10 - JLE LBB33_19 + JLE LBB33_20 MOVQ 0(DI), AX - MOVL R10, R10 - -LBB33_17: - CMPB -1(AX)(R10*1), $48 - JNE LBB33_21 - MOVL R10, CX - DECQ R10 - DECL CX - MOVL CX, 16(DI) - LEAQ 1(R10), CX + MOVL R10, CX + ADDQ $1, CX + ADDL $-1, R10 + +LBB33_18: + MOVL R10, DX + CMPB 0(AX)(DX*1), $48 + JNE LBB33_22 + MOVL R10, 16(DI) + ADDQ $-1, CX + ADDL $-1, R10 CMPQ CX, $1 - JG LBB33_17 - -LBB33_19: - TESTL R10, R10 - JE LBB33_20 - -LBB33_21: - BYTE $0x5d // popq %rbp - RET + JG LBB33_18 + JMP LBB33_21 LBB33_2: TESTQ AX, AX - JE LBB33_22 + JE LBB33_23 MOVQ AX, DX SHRQ CX, DX TESTQ DX, DX - JNE LBB33_6 + JNE LBB33_7 LBB33_4: ADDQ AX, AX LEAQ 0(AX)(AX*4), AX - INCL SI + ADDL $1, R11 MOVQ AX, DX SHRQ CX, DX TESTQ DX, DX JE LBB33_4 - JMP LBB33_6 + JMP LBB33_7 LBB33_20: + JE LBB33_21 + +LBB33_22: + BYTE $0x5b // popq %rbx + BYTE $0x5d // popq %rbp + RET + +LBB33_21: MOVL $0, 20(DI) + BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB33_22: +LBB33_23: MOVL $0, 16(DI) + BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET @@ -10708,6 +10889,25 @@ __DoubleQuoteTab: QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +__EscTab: + QUAD $0x0101010101010101; QUAD $0x0101010101010101 // .ascii 16, '\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01' + QUAD $0x0101010101010101; QUAD $0x0101010101010101 // .ascii 16, '\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01' + QUAD $0x0000000000010000; QUAD $0x0000000000000000 // .ascii 16, '\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .ascii 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .ascii 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; LONG $0x00000000; BYTE $0x01 // .ascii 13, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + WORD $0x0000; BYTE $0x00 // .space 3, '\x00\x00\x00' + __UnquoteTab: QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .ascii 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .ascii 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' @@ -11480,29 +11680,29 @@ _LSHIFT_TAB: LONG $0x00000000 // .asciz 4, '\x00\x00\x00\x00' _P10_TAB: - QUAD $0x3ff0000000000000 // .quad 4607182418800017408 - QUAD $0x4024000000000000 // .quad 4621819117588971520 - QUAD $0x4059000000000000 // .quad 4636737291354636288 - QUAD $0x408f400000000000 // .quad 4652007308841189376 - QUAD $0x40c3880000000000 // .quad 4666723172467343360 - QUAD $0x40f86a0000000000 // .quad 4681608360884174848 - QUAD $0x412e848000000000 // .quad 4696837146684686336 - QUAD $0x416312d000000000 // .quad 4711630319722168320 - QUAD $0x4197d78400000000 // .quad 4726483295884279808 - QUAD $0x41cdcd6500000000 // .quad 4741671816366391296 - QUAD $0x4202a05f20000000 // .quad 4756540486875873280 - QUAD $0x42374876e8000000 // .quad 4771362005757984768 - QUAD $0x426d1a94a2000000 // .quad 4786511204640096256 - QUAD $0x42a2309ce5400000 // .quad 4801453603149578240 - QUAD $0x42d6bcc41e900000 // .quad 4816244402031689728 - QUAD $0x430c6bf526340000 // .quad 4831355200913801216 - QUAD $0x4341c37937e08000 // .quad 4846369599423283200 - QUAD $0x4376345785d8a000 // .quad 4861130398305394688 - QUAD $0x43abc16d674ec800 // .quad 4876203697187506176 - QUAD $0x43e158e460913d00 // .quad 4891288408196988160 - QUAD $0x4415af1d78b58c40 // .quad 4906019910204099648 - QUAD $0x444b1ae4d6e2ef50 // .quad 4921056587992461136 - QUAD $0x4480f0cf064dd592 // .quad 4936209963552724370 + QUAD $0x3ff0000000000000 // .quad 0x3ff0000000000000 + QUAD $0x4024000000000000 // .quad 0x4024000000000000 + QUAD $0x4059000000000000 // .quad 0x4059000000000000 + QUAD $0x408f400000000000 // .quad 0x408f400000000000 + QUAD $0x40c3880000000000 // .quad 0x40c3880000000000 + QUAD $0x40f86a0000000000 // .quad 0x40f86a0000000000 + QUAD $0x412e848000000000 // .quad 0x412e848000000000 + QUAD $0x416312d000000000 // .quad 0x416312d000000000 + QUAD $0x4197d78400000000 // .quad 0x4197d78400000000 + QUAD $0x41cdcd6500000000 // .quad 0x41cdcd6500000000 + QUAD $0x4202a05f20000000 // .quad 0x4202a05f20000000 + QUAD $0x42374876e8000000 // .quad 0x42374876e8000000 + QUAD $0x426d1a94a2000000 // .quad 0x426d1a94a2000000 + QUAD $0x42a2309ce5400000 // .quad 0x42a2309ce5400000 + QUAD $0x42d6bcc41e900000 // .quad 0x42d6bcc41e900000 + QUAD $0x430c6bf526340000 // .quad 0x430c6bf526340000 + QUAD $0x4341c37937e08000 // .quad 0x4341c37937e08000 + QUAD $0x4376345785d8a000 // .quad 0x4376345785d8a000 + QUAD $0x43abc16d674ec800 // .quad 0x43abc16d674ec800 + QUAD $0x43e158e460913d00 // .quad 0x43e158e460913d00 + QUAD $0x4415af1d78b58c40 // .quad 0x4415af1d78b58c40 + QUAD $0x444b1ae4d6e2ef50 // .quad 0x444b1ae4d6e2ef50 + QUAD $0x4480f0cf064dd592 // .quad 0x4480f0cf064dd592 _first: QUAD $0xf0f0f0f0f0f0f0f0; QUAD $0xf0f0f0f0f0f0f0f0 // .ascii 16, '\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0' @@ -11539,14 +11739,14 @@ TEXT ·__f64toa(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -120(SP), R12 + LEAQ -136(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow _f64toa: MOVQ out+0(FP), DI MOVSD val+8(FP), X0 - CALL ·__native_entry__+630(SB) // _f64toa + CALL ·__native_entry__+570(SB) // _f64toa MOVQ AX, ret+16(FP) RET @@ -11559,7 +11759,7 @@ TEXT ·__html_escape(SB), NOSPLIT | NOFRAME, $0 - 40 _entry: MOVQ (TLS), R14 - LEAQ -72(SP), R12 + LEAQ -64(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11568,7 +11768,7 @@ _html_escape: MOVQ nb+8(FP), SI MOVQ dp+16(FP), DX MOVQ dn+24(FP), CX - CALL ·__native_entry__+8160(SB) // _html_escape + CALL ·__native_entry__+9062(SB) // _html_escape MOVQ AX, ret+32(FP) RET @@ -11588,7 +11788,7 @@ _entry: _i64toa: MOVQ out+0(FP), DI MOVQ val+8(FP), SI - CALL ·__native_entry__+3642(SB) // _i64toa + CALL ·__native_entry__+3205(SB) // _i64toa MOVQ AX, ret+16(FP) RET @@ -11609,7 +11809,7 @@ _lspace: MOVQ sp+0(FP), DI MOVQ nb+8(FP), SI MOVQ off+16(FP), DX - CALL ·__native_entry__+301(SB) // _lspace + CALL ·__native_entry__+251(SB) // _lspace MOVQ AX, ret+24(FP) RET @@ -11652,7 +11852,7 @@ _quote: MOVQ dp+16(FP), DX MOVQ dn+24(FP), CX MOVQ flags+32(FP), R8 - CALL ·__native_entry__+4955(SB) // _quote + CALL ·__native_entry__+4498(SB) // _quote MOVQ AX, ret+40(FP) RET @@ -11665,7 +11865,7 @@ TEXT ·__skip_array(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -160(SP), R12 + LEAQ -144(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11673,7 +11873,7 @@ _skip_array: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+17551(SB) // _skip_array + CALL ·__native_entry__+18295(SB) // _skip_array MOVQ AX, ret+24(FP) RET @@ -11686,14 +11886,14 @@ TEXT ·__skip_number(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -96(SP), R12 + LEAQ -80(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow _skip_number: MOVQ s+0(FP), DI MOVQ p+8(FP), SI - CALL ·__native_entry__+20669(SB) // _skip_number + CALL ·__native_entry__+21246(SB) // _skip_number MOVQ AX, ret+16(FP) RET @@ -11706,7 +11906,7 @@ TEXT ·__skip_object(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -160(SP), R12 + LEAQ -144(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11714,7 +11914,7 @@ _skip_object: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+17588(SB) // _skip_object + CALL ·__native_entry__+18332(SB) // _skip_object MOVQ AX, ret+24(FP) RET @@ -11727,7 +11927,7 @@ TEXT ·__skip_one(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -160(SP), R12 + LEAQ -144(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11735,7 +11935,7 @@ _skip_one: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+15699(SB) // _skip_one + CALL ·__native_entry__+16448(SB) // _skip_one MOVQ AX, ret+24(FP) RET @@ -11755,7 +11955,7 @@ _entry: _u64toa: MOVQ out+0(FP), DI MOVQ val+8(FP), SI - CALL ·__native_entry__+3735(SB) // _u64toa + CALL ·__native_entry__+3300(SB) // _u64toa MOVQ AX, ret+16(FP) RET @@ -11768,7 +11968,7 @@ TEXT ·__unquote(SB), NOSPLIT | NOFRAME, $0 - 48 _entry: MOVQ (TLS), R14 - LEAQ -88(SP), R12 + LEAQ -72(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11778,7 +11978,7 @@ _unquote: MOVQ dp+16(FP), DX MOVQ ep+24(FP), CX MOVQ flags+32(FP), R8 - CALL ·__native_entry__+6005(SB) // _unquote + CALL ·__native_entry__+6037(SB) // _unquote MOVQ AX, ret+40(FP) RET @@ -11791,7 +11991,7 @@ TEXT ·__validate_one(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -160(SP), R12 + LEAQ -144(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11799,7 +11999,7 @@ _validate_one: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+20786(SB) // _validate_one + CALL ·__native_entry__+21363(SB) // _validate_one MOVQ AX, ret+24(FP) RET @@ -11822,7 +12022,7 @@ _value: MOVQ p+16(FP), DX MOVQ v+24(FP), CX MOVQ allow_control+32(FP), R8 - CALL ·__native_entry__+10880(SB) // _value + CALL ·__native_entry__+11651(SB) // _value MOVQ AX, ret+40(FP) RET @@ -11843,7 +12043,7 @@ _vnumber: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+13857(SB), AX // _vnumber + LEAQ ·__native_entry__+14561(SB), AX // _vnumber JMP AX _stack_grow: @@ -11863,7 +12063,7 @@ _vsigned: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+15171(SB), AX // _vsigned + LEAQ ·__native_entry__+15866(SB), AX // _vsigned JMP AX _stack_grow: @@ -11875,7 +12075,7 @@ TEXT ·__vstring(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -128(SP), R12 + LEAQ -120(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11883,7 +12083,7 @@ _vstring: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+12822(SB), AX // _vstring + LEAQ ·__native_entry__+13543(SB), AX // _vstring JMP AX _stack_grow: @@ -11895,7 +12095,7 @@ TEXT ·__vunsigned(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -8(SP), R12 + LEAQ -24(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -11903,7 +12103,7 @@ _vunsigned: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+15430(SB), AX // _vunsigned + LEAQ ·__native_entry__+16146(SB), AX // _vunsigned JMP AX _stack_grow: diff --git a/internal/native/avx/native_subr_amd64.go b/internal/native/avx/native_subr_amd64.go index 56d23be15..585aa6f88 100644 --- a/internal/native/avx/native_subr_amd64.go +++ b/internal/native/avx/native_subr_amd64.go @@ -9,45 +9,45 @@ package avx func __native_entry__() uintptr var ( - _subr__f64toa = __native_entry__() + 630 - _subr__html_escape = __native_entry__() + 8160 - _subr__i64toa = __native_entry__() + 3642 - _subr__lspace = __native_entry__() + 301 + _subr__f64toa = __native_entry__() + 570 + _subr__html_escape = __native_entry__() + 9062 + _subr__i64toa = __native_entry__() + 3205 + _subr__lspace = __native_entry__() + 251 _subr__lzero = __native_entry__() + 13 - _subr__quote = __native_entry__() + 4955 - _subr__skip_array = __native_entry__() + 17551 - _subr__skip_number = __native_entry__() + 20669 - _subr__skip_object = __native_entry__() + 17588 - _subr__skip_one = __native_entry__() + 15699 - _subr__u64toa = __native_entry__() + 3735 - _subr__unquote = __native_entry__() + 6005 - _subr__validate_one = __native_entry__() + 20786 - _subr__value = __native_entry__() + 10880 - _subr__vnumber = __native_entry__() + 13857 - _subr__vsigned = __native_entry__() + 15171 - _subr__vstring = __native_entry__() + 12822 - _subr__vunsigned = __native_entry__() + 15430 + _subr__quote = __native_entry__() + 4498 + _subr__skip_array = __native_entry__() + 18295 + _subr__skip_number = __native_entry__() + 21246 + _subr__skip_object = __native_entry__() + 18332 + _subr__skip_one = __native_entry__() + 16448 + _subr__u64toa = __native_entry__() + 3300 + _subr__unquote = __native_entry__() + 6037 + _subr__validate_one = __native_entry__() + 21363 + _subr__value = __native_entry__() + 11651 + _subr__vnumber = __native_entry__() + 14561 + _subr__vsigned = __native_entry__() + 15866 + _subr__vstring = __native_entry__() + 13543 + _subr__vunsigned = __native_entry__() + 16146 ) const ( - _stack__f64toa = 120 - _stack__html_escape = 72 + _stack__f64toa = 136 + _stack__html_escape = 64 _stack__i64toa = 24 _stack__lspace = 8 _stack__lzero = 8 _stack__quote = 80 - _stack__skip_array = 160 - _stack__skip_number = 96 - _stack__skip_object = 160 - _stack__skip_one = 160 + _stack__skip_array = 144 + _stack__skip_number = 80 + _stack__skip_object = 144 + _stack__skip_one = 144 _stack__u64toa = 8 - _stack__unquote = 88 - _stack__validate_one = 160 + _stack__unquote = 72 + _stack__validate_one = 144 _stack__value = 416 _stack__vnumber = 312 _stack__vsigned = 16 - _stack__vstring = 128 - _stack__vunsigned = 8 + _stack__vstring = 120 + _stack__vunsigned = 24 ) var ( diff --git a/internal/native/avx2/native_amd64.s b/internal/native/avx2/native_amd64.s index 48c421a94..d7a876233 100644 --- a/internal/native/avx2/native_amd64.s +++ b/internal/native/avx2/native_amd64.s @@ -15,89 +15,75 @@ _lzero: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp CMPQ SI, $32 - JB LBB0_5 - LEAQ -32(SI), AX - MOVQ AX, CX - ANDQ $-32, CX - LEAQ 32(CX)(DI*1), CX - ANDL $31, AX - -LBB0_2: + JB LBB0_3 + +LBB0_1: LONG $0x076ffec5 // vmovdqu (%rdi), %ymm0 LONG $0x177de2c4; BYTE $0xc0 // vptest %ymm0, %ymm0 - JNE LBB0_13 + JNE LBB0_9 ADDQ $32, DI ADDQ $-32, SI CMPQ SI, $31 - JA LBB0_2 - MOVQ AX, SI - MOVQ CX, DI + JA LBB0_1 -LBB0_5: +LBB0_3: WORD $0xf8c5; BYTE $0x77 // vzeroupper CMPQ SI, $16 - JB LBB0_10 - LEAQ -16(SI), AX - MOVQ AX, CX - ANDQ $-16, CX - LEAQ 16(CX)(DI*1), CX - ANDL $15, AX - -LBB0_7: + JB LBB0_6 + +LBB0_4: LONG $0x076ffac5 // vmovdqu (%rdi), %xmm0 LONG $0x1779e2c4; BYTE $0xc0 // vptest %xmm0, %xmm0 - JNE LBB0_14 + JNE LBB0_10 ADDQ $16, DI ADDQ $-16, SI CMPQ SI, $15 - JA LBB0_7 - MOVQ AX, SI - MOVQ CX, DI + JA LBB0_4 -LBB0_10: +LBB0_6: CMPQ SI, $8 - JB LBB0_16 + JB LBB0_12 MOVL $1, AX CMPQ 0(DI), $0 - JNE LBB0_12 + JNE LBB0_8 ADDQ $8, DI ADDQ $-8, SI -LBB0_16: +LBB0_12: CMPQ SI, $4 - JB LBB0_19 + JB LBB0_15 MOVL $1, AX CMPL 0(DI), $0 - JNE LBB0_12 + JNE LBB0_8 ADDQ $4, DI ADDQ $-4, SI -LBB0_19: +LBB0_15: CMPQ SI, $2 - JB LBB0_22 + JB LBB0_18 MOVL $1, AX CMPW 0(DI), $0 - JNE LBB0_12 + JNE LBB0_8 ADDQ $2, DI ADDQ $-2, SI -LBB0_22: +LBB0_18: XORL AX, AX TESTQ SI, SI - JE LBB0_12 + JE LBB0_8 CMPB 0(DI), $0 SETNE AX BYTE $0x5d // popq %rbp RET -LBB0_12: +LBB0_8: BYTE $0x5d // popq %rbp RET -LBB0_13: +LBB0_9: WORD $0xf8c5; BYTE $0x77 // vzeroupper -LBB0_14: +LBB0_10: MOVL $1, AX BYTE $0x5d // popq %rbp RET @@ -133,154 +119,143 @@ LCPI1_7: _lspace: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - LEAQ 0(DI)(DX*1), AX - SUBQ DX, SI + MOVQ DX, AX + LEAQ 0(DI)(DX*1), R10 + MOVQ SI, DX + SUBQ AX, DX JE LBB1_7 - MOVL AX, CX + MOVL R10, CX ANDL $31, CX TESTQ CX, CX JE LBB1_7 - LEAQ -1(SI), R9 - XORL DX, DX + LEAQ 1(DI), R9 + LEAQ -1(SI), CX MOVQ $4294977024, R8 LBB1_3: - MOVBLSX 0(AX)(DX*1), CX - CMPL CX, $32 - JA LBB1_5 - BTQ CX, R8 - JAE LBB1_5 - LEAQ 1(DX), R10 - CMPQ R9, DX + MOVBLSX 0(DI)(AX*1), DX + CMPL DX, $32 + JA LBB1_27 + BTQ DX, R8 + JAE LBB1_27 + LEAQ 1(AX), DX + CMPQ CX, AX JE LBB1_6 - LEAQ 1(AX)(DX*1), CX - ANDL $31, CX - MOVQ R10, DX - TESTQ CX, CX + ADDL R9, AX + ANDL $31, AX + TESTQ AX, AX + MOVQ DX, AX JNE LBB1_3 LBB1_6: - ADDQ R10, AX - SUBQ R10, SI + LEAQ 0(DI)(DX*1), R10 + SUBQ DX, SI + MOVQ SI, DX LBB1_7: - CMPQ SI, $32 - JB LBB1_15 - LEAQ -32(SI), CX - MOVQ CX, DX - ANDQ $-32, DX - LEAQ 32(DX)(AX*1), R8 - ANDL $31, CX - QUAD $0xfffffec2056ffec5 // vmovdqu $-318(%rip), %ymm0 /* LCPI1_0(%rip) */ - QUAD $0xfffffeda0d6ffec5 // vmovdqu $-294(%rip), %ymm1 /* LCPI1_1(%rip) */ - QUAD $0xfffffef2156ffec5 // vmovdqu $-270(%rip), %ymm2 /* LCPI1_2(%rip) */ - QUAD $0xffffff0a1d6ffec5 // vmovdqu $-246(%rip), %ymm3 /* LCPI1_3(%rip) */ + CMPQ DX, $32 + JB LBB1_13 + MOVQ DI, SI + SUBQ R10, SI + QUAD $0xfffffec4056ffec5 // vmovdqu $-316(%rip), %ymm0 /* LCPI1_0(%rip) */ + QUAD $0xfffffedc0d6ffec5 // vmovdqu $-292(%rip), %ymm1 /* LCPI1_1(%rip) */ + QUAD $0xfffffef4156ffec5 // vmovdqu $-268(%rip), %ymm2 /* LCPI1_2(%rip) */ + QUAD $0xffffff0c1d6ffec5 // vmovdqu $-244(%rip), %ymm3 /* LCPI1_3(%rip) */ LBB1_9: - LONG $0x206ffdc5 // vmovdqa (%rax), %ymm4 - LONG $0xe874ddc5 // vpcmpeqb %ymm0, %ymm4, %ymm5 - LONG $0xf174ddc5 // vpcmpeqb %ymm1, %ymm4, %ymm6 - LONG $0xeeebd5c5 // vpor %ymm6, %ymm5, %ymm5 - LONG $0xf274ddc5 // vpcmpeqb %ymm2, %ymm4, %ymm6 - LONG $0xe374ddc5 // vpcmpeqb %ymm3, %ymm4, %ymm4 - LONG $0xe6ebddc5 // vpor %ymm6, %ymm4, %ymm4 - LONG $0xe5ebddc5 // vpor %ymm5, %ymm4, %ymm4 - LONG $0xd4d7fdc5 // vpmovmskb %ymm4, %edx - CMPL DX, $-1 + LONG $0x6f7dc1c4; BYTE $0x22 // vmovdqa (%r10), %ymm4 + LONG $0xe874ddc5 // vpcmpeqb %ymm0, %ymm4, %ymm5 + LONG $0xf174ddc5 // vpcmpeqb %ymm1, %ymm4, %ymm6 + LONG $0xeeebd5c5 // vpor %ymm6, %ymm5, %ymm5 + LONG $0xf274ddc5 // vpcmpeqb %ymm2, %ymm4, %ymm6 + LONG $0xe374ddc5 // vpcmpeqb %ymm3, %ymm4, %ymm4 + LONG $0xe6ebddc5 // vpor %ymm6, %ymm4, %ymm4 + LONG $0xe5ebddc5 // vpor %ymm5, %ymm4, %ymm4 + LONG $0xc4d7fdc5 // vpmovmskb %ymm4, %eax + CMPL AX, $-1 JNE LBB1_10 - ADDQ $32, AX + ADDQ $32, R10 + ADDQ $-32, DX ADDQ $-32, SI - CMPQ SI, $31 + CMPQ DX, $31 JA LBB1_9 - MOVQ CX, SI - MOVQ R8, AX -LBB1_15: +LBB1_13: WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ SI, $16 - JB LBB1_19 - LEAQ -16(SI), CX - MOVQ CX, DX - ANDQ $-16, DX - LEAQ 16(DX)(AX*1), R8 - ANDL $15, CX - QUAD $0xfffffec5056ffac5 // vmovdqu $-315(%rip), %xmm0 /* LCPI1_4(%rip) */ - QUAD $0xfffffecd0d6ffac5 // vmovdqu $-307(%rip), %xmm1 /* LCPI1_5(%rip) */ - QUAD $0xfffffed5156ffac5 // vmovdqu $-299(%rip), %xmm2 /* LCPI1_6(%rip) */ - QUAD $0xfffffedd1d6ffac5 // vmovdqu $-291(%rip), %xmm3 /* LCPI1_7(%rip) */ - -LBB1_17: - LONG $0x206ff9c5 // vmovdqa (%rax), %xmm4 - LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 - LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 - LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 - LONG $0xf274d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm6 - LONG $0xe374d9c5 // vpcmpeqb %xmm3, %xmm4, %xmm4 - LONG $0xe6ebd9c5 // vpor %xmm6, %xmm4, %xmm4 - LONG $0xe5ebd9c5 // vpor %xmm5, %xmm4, %xmm4 - LONG $0xd4d7f9c5 // vpmovmskb %xmm4, %edx - CMPW DX, $-1 - JNE LBB1_18 - ADDQ $16, AX + CMPQ DX, $16 + JB LBB1_18 + MOVQ DI, SI + SUBQ R10, SI + QUAD $0xfffffed5056ffac5 // vmovdqu $-299(%rip), %xmm0 /* LCPI1_4(%rip) */ + QUAD $0xfffffedd0d6ffac5 // vmovdqu $-291(%rip), %xmm1 /* LCPI1_5(%rip) */ + QUAD $0xfffffee5156ffac5 // vmovdqu $-283(%rip), %xmm2 /* LCPI1_6(%rip) */ + QUAD $0xfffffeed1d6ffac5 // vmovdqu $-275(%rip), %xmm3 /* LCPI1_7(%rip) */ + +LBB1_15: + LONG $0x6f79c1c4; BYTE $0x22 // vmovdqa (%r10), %xmm4 + LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 + LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 + LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 + LONG $0xf274d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm6 + LONG $0xe374d9c5 // vpcmpeqb %xmm3, %xmm4, %xmm4 + LONG $0xe6ebd9c5 // vpor %xmm6, %xmm4, %xmm4 + LONG $0xe5ebd9c5 // vpor %xmm5, %xmm4, %xmm4 + LONG $0xc4d7f9c5 // vpmovmskb %xmm4, %eax + CMPW AX, $-1 + JNE LBB1_16 + ADDQ $16, R10 + ADDQ $-16, DX ADDQ $-16, SI - CMPQ SI, $15 - JA LBB1_17 - MOVQ CX, SI - MOVQ R8, AX + CMPQ DX, $15 + JA LBB1_15 -LBB1_19: - TESTQ SI, SI - JE LBB1_28 - LEAQ 0(AX)(SI*1), R8 - INCQ AX - MOVQ $4294977024, DX - -LBB1_21: - MOVBLSX -1(AX), CX - CMPL CX, $32 - JA LBB1_23 - BTQ CX, DX - JAE LBB1_23 - DECQ SI - INCQ AX - TESTQ SI, SI - JNE LBB1_21 - MOVQ R8, AX - JMP LBB1_28 +LBB1_18: + TESTQ DX, DX + JE LBB1_25 + LEAQ 0(R10)(DX*1), R8 + XORL AX, AX + MOVQ $4294977024, R9 -LBB1_10: - WORD $0xf8c5; BYTE $0x77 // vzeroupper - SUBQ DI, AX - NOTL DX - MOVLQSX DX, CX - BSFQ CX, CX - ADDQ CX, AX - BYTE $0x5d // popq %rbp - RET +LBB1_20: + MOVBLSX 0(R10)(AX*1), SI + CMPL SI, $32 + JA LBB1_22 + BTQ SI, R9 + JAE LBB1_22 + ADDQ $1, AX + CMPQ DX, AX + JNE LBB1_20 + MOVQ R8, R10 -LBB1_5: - ADDQ DX, AX +LBB1_25: + SUBQ DI, R10 -LBB1_28: - SUBQ DI, AX - BYTE $0x5d // popq %rbp - RET +LBB1_26: + MOVQ R10, AX -LBB1_18: - MOVWLZX DX, CX - SUBQ DI, AX - NOTL CX - BSFL CX, CX - ADDQ CX, AX - BYTE $0x5d // popq %rbp +LBB1_27: + BYTE $0x5d // popq %rbp RET -LBB1_23: - NOTQ DI - ADDQ DI, AX +LBB1_10: + WORD $0xf8c5; BYTE $0x77 // vzeroupper + +LBB1_11: + NOTL AX + BSFL AX, AX + SUBQ SI, AX BYTE $0x5d // popq %rbp RET +LBB1_16: + MOVWLZX AX, AX + JMP LBB1_11 + +LBB1_22: + SUBQ DI, R10 + ADDQ AX, R10 + JMP LBB1_26 + LCPI2_0: QUAD $0x3030303030303030; QUAD $0x3030303030303030 // .space 16, '0000000000000000' QUAD $0x3030303030303030; QUAD $0x3030303030303030 // .space 16, '0000000000000000' @@ -293,585 +268,570 @@ _f64toa: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $40, SP - MOVQ DI, R13 + SUBQ $56, SP + MOVQ DI, R12 LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax XORL R9, R9 TESTQ AX, AX - JS LBB2_108 + JS LBB2_1 TESTQ AX, AX - JE LBB2_109 + JE LBB2_3 -LBB2_2: +LBB2_4: MOVQ AX, DX SHRQ $52, DX XORL BX, BX CMPL DX, $2047 - JE LBB2_107 + JE LBB2_105 MOVQ $4503599627370495, DI ANDQ DI, AX - INCQ DI + ADDQ $1, DI ORQ AX, DI LEAL -1023(DX), CX CMPL CX, $52 MOVL R9, -44(BP) - MOVQ R13, -64(BP) - JA LBB2_5 + MOVQ R12, -56(BP) + JA LBB2_8 MOVL $1075, CX SUBQ DX, CX MOVQ $-1, SI SHLQ CX, SI NOTQ SI TESTQ SI, DI - JE LBB2_14 + JE LBB2_7 -LBB2_5: +LBB2_8: TESTL DX, DX LONG $0xf8440f48 // cmoveq %rax, %rdi LEAL -1077(DX), CX - MOVL $-1076, R11 - LONG $0xd9450f44 // cmovnel %ecx, %r11d - MOVQ DI, -72(BP) - LEAQ 0(DI*4), R8 + MOVL $-1076, BX + WORD $0x450f; BYTE $0xd9 // cmovnel %ecx, %ebx + MOVQ DI, -64(BP) + LEAQ 0(DI*4), R10 TESTQ AX, AX SETNE AX CMPL DX, $2 SETCS R13 ORB AX, R13 - MOVBLZX R13, R9 - TESTL R11, R11 - JS LBB2_12 - LONG $0x41e36945; WORD $0x0134; BYTE $0x00 // imull $78913, %r11d, %r12d - SHRL $18, R12 + MOVBLZX R13, R15 + TESTL BX, BX + JS LBB2_22 + LONG $0x41eb6944; WORD $0x0134; BYTE $0x00 // imull $78913, %ebx, %r13d + SHRL $18, R13 XORL AX, AX - CMPL R11, $3 - SETGT AX - SUBL AX, R12 - LONG $0x4fdc6941; WORD $0x1293; BYTE $0x00 // imull $1217359, %r12d, %ebx - MOVQ R12, AX + CMPL BX, $4 + SETGE AX + SUBL AX, R13 + LONG $0x4ff56941; WORD $0x1293; BYTE $0x00 // imull $1217359, %r13d, %esi + MOVQ R13, AX SHLQ $4, AX - LONG $0x230d8d48; WORD $0x008f; BYTE $0x00 // leaq $36643(%rip), %rcx /* _DOUBLE_POW5_INV_SPLIT(%rip) */ - MOVQ R8, DI - ORQ $2, DI - MOVQ 0(AX)(CX*1), R10 + LONG $0x470d8d48; WORD $0x008e; BYTE $0x00 // leaq $36423(%rip), %rcx /* _DOUBLE_POW5_INV_SPLIT(%rip) */ + MOVQ R10, R12 + ORQ $2, R12 + MOVQ 0(AX)(CX*1), R11 MOVQ 8(AX)(CX*1), R14 - MOVQ R10, AX - MULQ DI + MOVQ R11, AX + MULQ R12 MOVQ DX, CX MOVQ R14, AX - MOVQ R14, -56(BP) - MULQ DI - MOVQ AX, R13 - MOVQ DX, SI - SHRL $19, BX - ADDQ CX, R13 - ADCQ $0, SI - MOVL R12, CX - SUBL R11, CX - ADDL BX, CX + MULQ R12 + MOVQ AX, R8 + MOVQ DX, DI + SHRL $19, SI + ADDQ CX, R8 + ADCQ $0, DI + MOVL R13, CX + SUBL BX, CX + ADDL SI, CX ADDB $61, CX - LONG $0xf5ad0f49 // shrdq %cl, %rsi, %r13 - SHRQ CX, SI - NOTQ R9 - ADDQ R8, R9 - MOVQ R10, AX - MULQ R9 - MOVQ DX, R15 + LONG $0xf8ad0f49 // shrdq %cl, %rdi, %r8 + SHRQ CX, DI + NOTQ R15 + ADDQ R10, R15 + MOVQ R11, AX + MULQ R15 + MOVQ DX, BX + MOVQ R14, AX + MULQ R15 + MOVQ DX, R9 + MOVQ AX, SI + ADDQ BX, SI + ADCQ $0, R9 + LONG $0xcead0f4c // shrdq %cl, %r9, %rsi + SHRQ CX, R9 + MOVQ R11, AX + MULQ R10 + MOVQ DX, R11 MOVQ R14, AX - MULQ R9 + MULQ R10 MOVQ DX, R14 - MOVQ AX, BX - ADDQ R15, BX + ADDQ R11, AX ADCQ $0, R14 - LONG $0xf3ad0f4c // shrdq %cl, %r14, %rbx + LONG $0xf0ad0f4c // shrdq %cl, %r14, %rax SHRQ CX, R14 - MOVQ R10, AX - MULQ R8 - MOVQ DX, R11 - MOVQ -56(BP), AX - MULQ R8 - MOVQ DX, R10 - ADDQ R11, AX - ADCQ $0, R10 - LONG $0xd0ad0f4c // shrdq %cl, %r10, %rax - SHRQ CX, R10 TESTB $64, CX - LONG $0xf5440f49 // cmoveq %r13, %rsi - LONG $0xf3440f4c // cmoveq %rbx, %r14 - LONG $0xd0440f4c // cmoveq %rax, %r10 - CMPL R12, $21 - JA LBB2_23 + LONG $0xf8440f49 // cmoveq %r8, %rdi + LONG $0xce440f4c // cmoveq %rsi, %r9 + LONG $0xf0440f4c // cmoveq %rax, %r14 + CMPL R13, $21 + JA LBB2_10 MOVQ $-3689348814741910323, AX MOVQ $3689348814741910323, CX - MOVQ R8, DX + MOVQ R10, DX IMULQ AX, DX CMPQ DX, CX - JBE LBB2_17 - TESTB $1, -72(BP) - JNE LBB2_20 + JBE LBB2_12 + TESTB $1, -64(BP) + JNE LBB2_16 MOVL $-1, DX -LBB2_10: - IMULQ AX, R9 - INCL DX - CMPQ R9, CX - JBE LBB2_10 - CMPL DX, R12 - SETCC R13 +LBB2_20: + IMULQ AX, R15 + ADDL $1, DX + CMPQ R15, CX + JBE LBB2_20 + CMPL DX, R13 + SETCC R11 XORL R15, R15 - TESTB R15, R15 - JE LBB2_26 - JMP LBB2_29 + TESTB R11, R11 + JE LBB2_29 + JMP LBB2_30 -LBB2_12: - LONG $0x05e36945; WORD $0xf4d1; BYTE $0xff // imull $-732923, %r11d, %r12d - SHRL $20, R12 +LBB2_22: + LONG $0x05c36944; WORD $0xf4d1; BYTE $0xff // imull $-732923, %ebx, %r8d + SHRL $20, R8 XORL AX, AX - CMPL R11, $-1 + CMPL BX, $-1 SETNE AX - SUBL AX, R12 - ADDL R12, R11 - MOVL R11, AX + SUBL AX, R8 + ADDL R8, BX + MOVL BX, AX NEGL AX - LONG $0xb1db6941; WORD $0xed6c; BYTE $0xff // imull $-1217359, %r11d, %ebx - SHRL $19, BX - MOVLQSX AX, SI - SHLQ $4, SI - LONG $0x4e158d4c; WORD $0x00a3; BYTE $0x00 // leaq $41806(%rip), %r10 /* _DOUBLE_POW5_SPLIT(%rip) */ - MOVQ R8, DI - ORQ $2, DI - MOVQ 0(SI)(R10*1), R14 - MOVQ R14, AX - MOVQ R14, -56(BP) - MULQ DI - MOVQ DX, CX - MOVQ 8(SI)(R10*1), R10 - MOVQ R10, AX - MULQ DI + LONG $0x6cb1f369; WORD $0xffed // imull $-1217359, %ebx, %esi + SHRL $19, SI + MOVLQSX AX, DI + SHLQ $4, DI + LONG $0x7a1d8d4c; WORD $0x00a2; BYTE $0x00 // leaq $41594(%rip), %r11 /* _DOUBLE_POW5_SPLIT(%rip) */ + MOVQ R10, CX + ORQ $2, CX + MOVQ 0(DI)(R11*1), R9 + MOVQ R9, AX + MOVQ R9, -80(BP) + MULQ CX + MOVQ DX, R14 + MOVQ 8(DI)(R11*1), AX + MOVQ AX, -88(BP) + MULQ CX MOVQ DX, DI - MOVQ AX, SI - ADDQ CX, SI + MOVQ AX, R11 + ADDQ R14, R11 ADCQ $0, DI - MOVL R12, CX - SUBL BX, CX + MOVL R8, CX + SUBL SI, CX ADDB $60, CX - LONG $0xfead0f48 // shrdq %cl, %rdi, %rsi + LONG $0xfbad0f49 // shrdq %cl, %rdi, %r11 SHRQ CX, DI - NOTQ R9 - ADDQ R8, R9 + NOTQ R15 + ADDQ R10, R15 + MOVQ R9, AX + MULQ R15 + MOVQ DX, -72(BP) + MOVQ -88(BP), R14 MOVQ R14, AX - MULQ R9 + MULQ R15 + MOVQ DX, R9 + MOVQ AX, SI + ADDQ -72(BP), SI + ADCQ $0, R9 + LONG $0xcead0f4c // shrdq %cl, %r9, %rsi + SHRQ CX, R9 + MOVQ -80(BP), AX + MULQ R10 MOVQ DX, R15 - MOVQ R10, AX - MULQ R9 + MOVQ R14, AX + MULQ R10 MOVQ DX, R14 - MOVQ AX, BX - ADDQ R15, BX + ADDQ R15, AX ADCQ $0, R14 - LONG $0xf3ad0f4c // shrdq %cl, %r14, %rbx + LONG $0xf0ad0f4c // shrdq %cl, %r14, %rax SHRQ CX, R14 - MOVQ -56(BP), AX - MULQ R8 - MOVQ DX, R15 - MOVQ R10, AX - MULQ R8 - MOVQ DX, R10 - ADDQ R15, AX - ADCQ $0, R10 - LONG $0xd0ad0f4c // shrdq %cl, %r10, %rax - SHRQ CX, R10 TESTB $64, CX - LONG $0xfe440f48 // cmoveq %rsi, %rdi - MOVQ DI, SI - LONG $0xf3440f4c // cmoveq %rbx, %r14 - LONG $0xd0440f4c // cmoveq %rax, %r10 - CMPL R12, $1 - JA LBB2_15 - MOVQ -72(BP), AX + LONG $0xfb440f49 // cmoveq %r11, %rdi + LONG $0xce440f4c // cmoveq %rsi, %r9 + LONG $0xf0440f4c // cmoveq %rax, %r14 + CMPL R8, $1 + JA LBB2_24 + MOVQ -64(BP), AX ANDL $1, AX TESTQ AX, AX - SETEQ CX - ANDB CX, R13 - SUBQ AX, SI + SETEQ R11 + ANDB R13, R11 + SUBQ AX, DI MOVB $1, R15 - MOVL R11, AX - MOVQ AX, -56(BP) + MOVL BX, R13 JMP LBB2_30 -LBB2_14: +LBB2_7: MOVL CX, CX SHRQ CX, DI - XORL R12, R12 + XORL R13, R13 MOVL $1, R15 CMPQ DI, $10 - JAE LBB2_46 - JMP LBB2_61 + JAE LBB2_52 + JMP LBB2_67 -LBB2_15: - CMPL R12, $62 - JA LBB2_24 +LBB2_24: + CMPL R8, $62 + JA LBB2_25 MOVQ $-1, AX - MOVL R12, CX + MOVL R8, CX SHLQ CX, AX NOTQ AX - TESTQ AX, R8 + TESTQ AX, R10 SETEQ R15 - JMP LBB2_25 + JMP LBB2_27 -LBB2_17: +LBB2_12: MOVL $-1, DX -LBB2_18: - IMULQ AX, R8 - INCL DX - CMPQ R8, CX - JBE LBB2_18 - CMPL DX, R12 +LBB2_13: + IMULQ AX, R10 + ADDL $1, DX + CMPQ R10, CX + JBE LBB2_13 + CMPL DX, R13 SETCC R15 - XORL R13, R13 - TESTB R15, R15 - JE LBB2_26 - JMP LBB2_29 + XORL R11, R11 + TESTB R11, R11 + JE LBB2_29 + JMP LBB2_30 -LBB2_20: +LBB2_16: MOVL $-1, DX -LBB2_21: - IMULQ AX, DI - INCL DX - CMPQ DI, CX - JBE LBB2_21 - XORL AX, AX - CMPL DX, R12 - SETCC AX - SUBQ AX, SI +LBB2_17: + IMULQ AX, R12 + ADDL $1, DX + CMPQ R12, CX + JBE LBB2_17 + CMPL DX, R13 + ADCQ $-1, DI -LBB2_23: +LBB2_10: XORL R15, R15 - XORL R13, R13 - TESTB R15, R15 - JE LBB2_26 - JMP LBB2_29 + XORL R11, R11 + TESTB R11, R11 + JE LBB2_29 + JMP LBB2_30 -LBB2_24: +LBB2_25: XORL R15, R15 -LBB2_25: - XORL R13, R13 - MOVL R11, R12 - TESTB R15, R15 - JNE LBB2_29 +LBB2_27: + XORL R11, R11 + MOVL BX, R13 + TESTB R11, R11 + JNE LBB2_30 -LBB2_26: - TESTB R13, R13 - JNE LBB2_29 - MOVQ SI, BX - SHRQ $2, SI +LBB2_29: + TESTB R15, R15 + JNE LBB2_30 + MOVQ DI, AX + SHRQ $2, AX + MOVQ DI, BX MOVQ $2951479051793528259, DI - MOVQ SI, AX MULQ DI - MOVQ DX, CX - SHRQ $2, CX - MOVQ R14, AX + MOVQ DX, SI + MOVQ R9, AX SHRQ $2, AX MULQ DI - MOVQ DX, SI + MOVQ DX, CX SHRQ $2, SI - CMPQ CX, SI - JBE LBB2_39 - MOVQ R10, AX + SHRQ $2, CX + XORL R8, R8 + CMPQ SI, CX + JBE LBB2_44 + MOVQ R14, AX SHRQ $2, AX MULQ DI SHRQ $2, DX WORD $0xc26b; BYTE $0x9c // imull $-100, %edx, %eax - ADDL R10, AX - XORL DI, DI - CMPL AX, $49 - SETHI DI - MOVL $2, R11 - MOVQ SI, R14 - MOVQ CX, AX - MOVQ DX, R10 - MOVQ -64(BP), R13 - JMP LBB2_40 - -LBB2_29: - MOVQ R12, -56(BP) + ADDL R14, AX + CMPL AX, $50 + SETCC DI + MOVL $2, R8 + MOVQ DX, R14 + JMP LBB2_46 LBB2_30: - MOVQ $-3689348814741910323, BX - MOVQ SI, AX - MULQ BX - MOVQ DX, R12 - MOVQ R14, AX - MULQ BX - SHRQ $3, R12 + MOVQ $-3689348814741910323, R12 + MOVQ DI, AX + MULQ R12 + MOVQ DX, DI + MOVQ R9, AX + MULQ R12 + SHRQ $3, DI SHRQ $3, DX - XORL SI, SI - XORL R11, R11 - CMPQ R12, DX - JBE LBB2_37 XORL CX, CX + XORL R8, R8 + CMPQ DI, DX + JBE LBB2_31 + XORL BX, BX -LBB2_32: - MOVQ DX, R8 - LEAL 0(DX)(DX*1), DI - MOVQ R10, AX - MULQ BX - MOVQ DX, R9 - LEAL 0(DI)(DI*4), AX - SHRQ $3, R9 - LEAL 0(R9)(R9*1), DX - LEAL 0(DX)(DX*4), SI - NEGL SI - ADDB R10, SI - CMPL R14, AX +LBB2_33: + MOVQ DX, R10 + LEAL 0(DX)(DX*1), CX + MOVQ R14, AX + MULQ R12 + MOVQ DX, SI + LEAL 0(CX)(CX*4), AX + SHRQ $3, SI + LEAL 0(SI)(SI*1), CX + LEAL 0(CX)(CX*4), CX + NEGL CX + ADDB R14, CX + CMPL AX, R9 SETEQ AX - ANDB AX, R13 - TESTB CX, CX + ANDB AX, R11 + TESTB BX, BX SETEQ AX ANDB AX, R15 - INCL R11 - MOVQ R12, AX - MULQ BX - MOVQ DX, R12 - SHRQ $3, R12 - MOVQ R8, AX - MULQ BX + ADDL $1, R8 + MOVQ DI, AX + MULQ R12 + MOVQ DX, DI + SHRQ $3, DI + MOVQ R10, AX + MULQ R12 SHRQ $3, DX + MOVQ SI, R14 + MOVQ R10, R9 + MOVL CX, BX + CMPQ DI, DX + JA LBB2_33 + TESTB R11, R11 + JNE LBB2_36 + JMP LBB2_35 + +LBB2_31: MOVQ R9, R10 - MOVQ R8, R14 - MOVL SI, CX - CMPQ R12, DX - JA LBB2_32 - TESTB R13, R13 - JE LBB2_38 + MOVQ R14, SI + TESTB R11, R11 + JE LBB2_35 -LBB2_34: - MOVQ R8, AX - MULQ BX - MOVQ DX, CX - SHRQ $3, CX - LEAL 0(CX)(CX*1), AX +LBB2_36: + MOVQ R10, AX + MULQ R12 + MOVQ DX, DI + SHRQ $3, DI + LEAL 0(DI)(DI*1), AX LEAL 0(AX)(AX*4), AX - CMPL R8, AX - MOVQ -72(BP), DI - MOVQ -56(BP), R12 - JNE LBB2_44 - MOVQ BX, R14 + CMPL AX, R10 + JNE LBB2_35 -LBB2_36: - MOVQ R9, AX - MULQ R14 - MOVQ DX, R10 - MOVQ CX, R8 - SHRQ $3, R10 - LEAL 0(R10)(R10*1), AX +LBB2_37: + MOVQ SI, AX + MULQ R12 + MOVQ DX, R9 + MOVQ DI, R10 + SHRQ $3, R9 + LEAL 0(R9)(R9*1), AX LEAL 0(AX)(AX*4), BX NEGL BX - ADDB R9, BX - TESTB SI, SI + ADDB SI, BX + TESTB CX, CX SETEQ AX ANDB AX, R15 - INCL R11 - MOVQ CX, AX - MULQ R14 - MOVQ DX, CX - SHRQ $3, CX - LEAL 0(CX)(CX*1), AX + ADDL $1, R8 + MOVQ DI, AX + MULQ R12 + MOVQ DX, DI + SHRQ $3, DI + LEAL 0(DI)(DI*1), AX LEAL 0(AX)(AX*4), AX - MOVQ R10, R9 - MOVL BX, SI - CMPL R8, AX - JE LBB2_36 - JMP LBB2_45 + MOVQ R9, SI + MOVL BX, CX + CMPL AX, R10 + JE LBB2_37 + JMP LBB2_38 -LBB2_37: - MOVQ R14, R8 - MOVQ R10, R9 - TESTB R13, R13 - JNE LBB2_34 +LBB2_35: + MOVL CX, BX + MOVQ SI, R9 LBB2_38: - MOVL SI, BX - MOVQ R9, R10 - MOVQ -72(BP), DI - MOVQ -56(BP), R12 - JMP LBB2_45 + TESTB R15, R15 + SETEQ CX + CMPB BX, $5 + SETNE AX + CMPQ R9, R10 + MOVQ -56(BP), R12 + JNE LBB2_41 + MOVL $1, DI + TESTB $1, -64(BP) + JNE LBB2_42 + TESTB R11, R11 + JE LBB2_42 + +LBB2_41: + MOVL R9, DX + ANDB $1, DX + ORB DX, CX + ORB CX, AX + CMPB BX, $5 + SETCC CX + ANDB AX, CX + MOVBLZX CX, DI + +LBB2_42: + ADDQ R9, DI + JMP LBB2_50 -LBB2_39: - XORL R11, R11 +LBB2_44: XORL DI, DI - MOVQ -64(BP), R13 - MOVQ BX, AX + MOVQ R9, CX + MOVQ BX, SI -LBB2_40: - MOVQ $-3689348814741910323, BX - MULQ BX - MOVQ DX, CX - SHRQ $3, CX - MOVQ R14, AX - MULQ BX +LBB2_46: + MOVQ -56(BP), R12 + MOVQ $-3689348814741910323, R9 + MOVQ SI, AX + MULQ R9 + MOVQ DX, SI + SHRQ $3, SI + MOVQ CX, AX + MULQ R9 SHRQ $3, DX - CMPQ CX, DX - JBE LBB2_43 + CMPQ SI, DX + JBE LBB2_49 -LBB2_41: - MOVQ R10, SI - MOVQ DX, R14 - MOVQ R10, AX - MULQ BX - MOVQ DX, R10 - SHRQ $3, R10 - INCL R11 - MOVQ CX, AX - MULQ BX +LBB2_47: + MOVQ R14, DI MOVQ DX, CX - SHRQ $3, CX MOVQ R14, AX - MULQ BX - SHRQ $3, DX - CMPQ CX, DX - JA LBB2_41 - LEAL 0(R10)(R10*1), AX - LEAL 0(AX)(AX*4), AX - SUBL AX, SI - XORL DI, DI - CMPL SI, $4 - SETHI DI - -LBB2_43: - XORL AX, AX - CMPQ R10, R14 - SETEQ AX - ORQ DI, AX - ADDQ R10, AX - MOVQ AX, DI - ADDL R11, R12 - MOVL $1, R15 - CMPQ DI, $10 - JAE LBB2_46 - JMP LBB2_61 - -LBB2_44: - MOVL SI, BX - MOVQ R9, R10 - -LBB2_45: - TESTB R13, R13 - SETEQ SI - TESTB R15, R15 - SETNE AX - CMPB BX, $5 - SETEQ DX - TESTB $1, R10 - SETEQ CX - ANDB AX, CX - ANDB DX, CX - CMPQ R10, R8 - SETEQ DX - ORB SI, DI - CMPB BX, $4 - SETHI AX - XORB CX, AX - ANDB DX, DI - ORB AX, DI - MOVBLZX DI, DI - ADDQ R10, DI - MOVQ -64(BP), R13 - ADDL R11, R12 - MOVL $1, R15 - CMPQ DI, $10 - JB LBB2_61 + MULQ R9 + MOVQ DX, R14 + SHRQ $3, R14 + ADDL $1, R8 + MOVQ SI, AX + MULQ R9 + MOVQ DX, SI + SHRQ $3, SI + MOVQ CX, AX + MULQ R9 + SHRQ $3, DX + CMPQ SI, DX + JA LBB2_47 + LEAL 0(R14)(R14*1), AX + LEAL 0(AX)(AX*4), AX + SUBL AX, DI + CMPL DI, $5 + SETCC DI + +LBB2_49: + CMPQ R14, CX + SETEQ AX + TESTB DI, DI + SETNE CX + ORB AX, CX + MOVBLZX CX, DI + ADDQ R14, DI + +LBB2_50: + ADDL R8, R13 + MOVL $1, R15 + CMPQ DI, $10 + JB LBB2_67 -LBB2_46: +LBB2_52: MOVL $2, R15 CMPQ DI, $100 - JB LBB2_61 + JB LBB2_67 MOVL $3, R15 CMPQ DI, $1000 - JB LBB2_61 + JB LBB2_67 MOVL $4, R15 CMPQ DI, $10000 - JB LBB2_61 + JB LBB2_67 MOVL $5, R15 CMPQ DI, $100000 - JB LBB2_61 + JB LBB2_67 MOVL $6, R15 CMPQ DI, $1000000 - JB LBB2_61 + JB LBB2_67 MOVL $7, R15 CMPQ DI, $10000000 - JB LBB2_61 + JB LBB2_67 MOVL $8, R15 CMPQ DI, $100000000 - JB LBB2_61 + JB LBB2_67 MOVL $9, R15 CMPQ DI, $1000000000 - JB LBB2_61 + JB LBB2_67 MOVQ $8589934464, AX ADDQ $1410065536, AX MOVL $10, R15 CMPQ DI, AX - JB LBB2_61 + JB LBB2_67 MOVQ DI, AX SHRQ $11, AX MOVL $11, R15 CMPQ AX, $48828125 - JB LBB2_61 + JB LBB2_67 MOVQ DI, AX SHRQ $12, AX MOVL $12, R15 CMPQ AX, $244140625 - JB LBB2_61 + JB LBB2_67 MOVQ DI, AX SHRQ $13, AX MOVL $13, R15 CMPQ AX, $1220703125 - JB LBB2_61 + JB LBB2_67 MOVL $14, R15 MOVQ $100000000000000, AX CMPQ DI, AX - JB LBB2_61 + JB LBB2_67 MOVL $15, R15 MOVQ $1000000000000000, AX CMPQ DI, AX - JB LBB2_61 + JB LBB2_67 MOVQ $10000000000000000, AX CMPQ DI, AX MOVL $17, R15 SBBL $0, R15 -LBB2_61: - LEAL 0(R15)(R12*1), R14 - LEAL 5(R15)(R12*1), AX +LBB2_67: + LEAL 0(R15)(R13*1), R14 + LEAL 0(R15)(R13*1), AX + ADDL $5, AX CMPL AX, $27 - JB LBB2_67 - LEAQ 1(R13), BX + JB LBB2_78 + LEAQ 1(R12), BX MOVQ BX, SI MOVL R15, DX - LONG $0x0058b5e8; BYTE $0x00 // callq _print_mantissa - MOVB 1(R13), AX - MOVB AX, 0(R13) + LONG $0x0057cde8; BYTE $0x00 // callq _print_mantissa + MOVB 1(R12), AX + MOVB AX, 0(R12) MOVL $1, AX CMPL R15, $2 - JB LBB2_64 + JB LBB2_70 MOVB $46, 0(BX) - INCL R15 + ADDL $1, R15 MOVL R15, AX -LBB2_64: +LBB2_70: MOVL AX, BX - MOVB $101, 0(R13)(BX*1) - INCQ BX + MOVB $101, 0(R12)(BX*1) + ADDQ $1, BX TESTL R14, R14 - JLE LBB2_69 - DECL R14 + JLE LBB2_72 + ADDL $-1, R14 MOVL -44(BP), R9 CMPL R14, $100 - JL LBB2_70 + JL LBB2_75 -LBB2_66: +LBB2_74: MOVL R14, AX MOVL $3435973837, CX IMULQ AX, CX @@ -879,293 +839,230 @@ LBB2_66: LEAL 0(CX)(CX*1), AX LEAL 0(AX)(AX*4), AX SUBL AX, R14 - LONG $0x20058d48; WORD $0x00b2; BYTE $0x00 // leaq $45600(%rip), %rax /* _Digits(%rip) */ + LONG $0x8a058d48; WORD $0x00b1; BYTE $0x00 // leaq $45450(%rip), %rax /* _Digits(%rip) */ MOVWLZX 0(AX)(CX*2), AX MOVL BX, CX - MOVW AX, 0(R13)(CX*1) + MOVW AX, 0(R12)(CX*1) ORB $48, R14 - MOVLQSX BX, AX - MOVB R14, 2(AX)(R13*1) + MOVB R14, 2(R12)(CX*1) ADDL $3, BX - JMP LBB2_107 + JMP LBB2_105 -LBB2_67: +LBB2_78: TESTL R14, R14 - JLE LBB2_72 - MOVL R12, R13 - SARL $31, R13 - ANDL R14, R13 + JLE LBB2_82 + MOVL R13, R12 + SARL $31, R12 + ANDL R14, R12 XORL BX, BX - TESTL R12, R12 - LONG $0xe3480f44 // cmovsl %ebx, %r12d - JMP LBB2_74 + TESTL R13, R13 + LONG $0xeb480f44 // cmovsl %ebx, %r13d + JMP LBB2_80 -LBB2_69: +LBB2_72: ADDL $2, AX - MOVB $45, 0(R13)(BX*1) + MOVB $45, 0(R12)(BX*1) MOVL $1, CX SUBL R14, CX MOVL CX, R14 MOVL AX, BX MOVL -44(BP), R9 CMPL R14, $100 - JGE LBB2_66 + JGE LBB2_74 -LBB2_70: +LBB2_75: CMPL R14, $10 - JL LBB2_85 + JL LBB2_77 MOVLQSX R14, AX - LONG $0xb20d8d48; WORD $0x00b1; BYTE $0x00 // leaq $45490(%rip), %rcx /* _Digits(%rip) */ + LONG $0x210d8d48; WORD $0x00b1; BYTE $0x00 // leaq $45345(%rip), %rcx /* _Digits(%rip) */ MOVWLZX 0(CX)(AX*2), AX MOVL BX, CX - MOVW AX, 0(R13)(CX*1) + MOVW AX, 0(R12)(CX*1) ADDL $2, BX - JMP LBB2_107 + JMP LBB2_105 -LBB2_72: - MOVW $11824, 0(R13) +LBB2_82: + MOVW $11824, 0(R12) TESTL R14, R14 - JS LBB2_86 - XORL R13, R13 - MOVL $2, BX + JS LBB2_84 XORL R12, R12 + MOVL $2, BX + XORL R13, R13 -LBB2_74: +LBB2_80: MOVL BX, SI - ADDQ -64(BP), SI + ADDQ -56(BP), SI MOVL R15, DX - LONG $0x0057b1e8; BYTE $0x00 // callq _print_mantissa - TESTL R13, R13 - JE LBB2_78 - LEAL 0(R13)(BX*1), AX - CMPL R13, R15 - JGE LBB2_80 + LONG $0x0056cce8; BYTE $0x00 // callq _print_mantissa + TESTL R12, R12 + JE LBB2_81 + LEAL 0(R12)(BX*1), AX + CMPL R12, R15 + JGE LBB2_88 LEAL 0(BX)(R15*1), CX MOVL AX, AX - MOVQ -64(BP), R10 + MOVQ -56(BP), R10 -LBB2_77: +LBB2_90: MOVBLZX -1(R10)(CX*1), DX MOVB DX, 0(R10)(CX*1) - DECQ CX - CMPQ CX, AX - JG LBB2_77 - JMP LBB2_81 + LEAQ -1(CX), DX + MOVQ DX, CX + CMPQ DX, AX + JG LBB2_90 + JMP LBB2_91 -LBB2_78: - MOVQ -64(BP), R10 +LBB2_81: + MOVQ -56(BP), R10 ADDL R15, BX - TESTL R12, R12 - JNE LBB2_82 - JMP LBB2_106 + TESTL R13, R13 + JNE LBB2_94 -LBB2_80: +LBB2_93: + MOVL -44(BP), R9 + JMP LBB2_105 + +LBB2_88: MOVL AX, AX - MOVQ -64(BP), R10 + MOVQ -56(BP), R10 -LBB2_81: +LBB2_91: MOVB $46, 0(R10)(AX*1) ORL $1, BX ADDL R15, BX - TESTL R12, R12 - JE LBB2_106 + TESTL R13, R13 + JE LBB2_93 -LBB2_82: +LBB2_94: MOVLQSX BX, BX - LEAL -1(R12), R8 + LEAL -1(R13), R8 XORL AX, AX CMPL R8, $127 MOVL -44(BP), R9 - JB LBB2_95 - INCQ R8 + JB LBB2_103 + ADDQ $1, R8 MOVQ R8, AX ANDQ $-128, AX - LEAQ -128(AX), DX - MOVQ DX, SI + LEAQ -128(AX), CX + MOVQ CX, SI SHRQ $7, SI - INCQ SI - MOVL SI, CX - ANDL $3, CX - CMPQ DX, $384 - JAE LBB2_89 - XORL SI, SI - JMP LBB2_91 + ADDQ $1, SI + MOVL SI, DX + ANDL $3, DX + CMPQ CX, $384 + JAE LBB2_97 + XORL DI, DI + JMP LBB2_99 -LBB2_85: +LBB2_77: ADDB $48, R14 MOVL BX, AX - INCL BX - MOVB R14, 0(R13)(AX*1) - JMP LBB2_107 + ADDL $1, BX + MOVB R14, 0(R12)(AX*1) + JMP LBB2_105 -LBB2_86: +LBB2_84: MOVL $2, BX SUBL R14, BX - LEAQ -2(BX), R8 MOVL $2, AX - CMPQ R8, $128 - JB LBB2_104 - MOVQ R8, AX - ANDQ $-128, AX - LEAQ -128(AX), DX - MOVQ DX, SI - SHRQ $7, SI - INCQ SI - MOVL SI, CX - ANDL $3, CX - CMPQ DX, $384 - JAE LBB2_97 - XORL SI, SI - JMP LBB2_99 - -LBB2_89: - LEAQ 480(BX)(R10*1), DI - MOVQ CX, DX - SUBQ SI, DX - XORL SI, SI - QUAD $0xfffff66d056ffec5 // vmovdqu $-2451(%rip), %ymm0 /* LCPI2_0(%rip) */ - -LBB2_90: - QUAD $0xfffe2037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-480(%rdi,%rsi) - QUAD $0xfffe4037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-448(%rdi,%rsi) - QUAD $0xfffe6037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-416(%rdi,%rsi) - QUAD $0xfffe8037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-384(%rdi,%rsi) - QUAD $0xfffea037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-352(%rdi,%rsi) - QUAD $0xfffec037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-320(%rdi,%rsi) - QUAD $0xfffee037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-288(%rdi,%rsi) - QUAD $0xffff0037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-256(%rdi,%rsi) - QUAD $0xffff2037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-224(%rdi,%rsi) - QUAD $0xffff4037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-192(%rdi,%rsi) - QUAD $0xffff6037847ffec5; BYTE $0xff // vmovdqu %ymm0, $-160(%rdi,%rsi) - LONG $0x447ffec5; WORD $0x8037 // vmovdqu %ymm0, $-128(%rdi,%rsi) - LONG $0x447ffec5; WORD $0xa037 // vmovdqu %ymm0, $-96(%rdi,%rsi) - LONG $0x447ffec5; WORD $0xc037 // vmovdqu %ymm0, $-64(%rdi,%rsi) - LONG $0x447ffec5; WORD $0xe037 // vmovdqu %ymm0, $-32(%rdi,%rsi) - LONG $0x047ffec5; BYTE $0x37 // vmovdqu %ymm0, (%rdi,%rsi) - ADDQ $512, SI - ADDQ $4, DX - JNE LBB2_90 - -LBB2_91: - TESTQ CX, CX - JE LBB2_94 - ADDQ BX, SI - LEAQ 96(R10)(SI*1), DX - NEGQ CX - QUAD $0xfffff5c4056ffec5 // vmovdqu $-2620(%rip), %ymm0 /* LCPI2_0(%rip) */ - -LBB2_93: - LONG $0x427ffec5; BYTE $0xa0 // vmovdqu %ymm0, $-96(%rdx) - LONG $0x427ffec5; BYTE $0xc0 // vmovdqu %ymm0, $-64(%rdx) - LONG $0x427ffec5; BYTE $0xe0 // vmovdqu %ymm0, $-32(%rdx) - LONG $0x027ffec5 // vmovdqu %ymm0, (%rdx) - SUBQ $-128, DX - INCQ CX - JNE LBB2_93 - -LBB2_94: - ADDQ AX, BX - CMPQ R8, AX - JE LBB2_107 - -LBB2_95: - SUBL AX, R12 -LBB2_96: - MOVB $48, 0(R10)(BX*1) - INCQ BX - DECL R12 - JNE LBB2_96 - JMP LBB2_107 +LBB2_85: + MOVB $48, 0(R12)(AX*1) + ADDQ $1, AX + CMPQ BX, AX + JNE LBB2_85 + ADDQ BX, R12 + MOVQ R12, SI + MOVL R15, DX + LONG $0x0055f0e8; BYTE $0x00 // callq _print_mantissa + ADDL BX, R15 + MOVL R15, BX + MOVL -44(BP), R9 + JMP LBB2_105 LBB2_97: - MOVQ CX, DX - SUBQ SI, DX - XORL SI, SI - QUAD $0xfffff577056ffec5 // vmovdqu $-2697(%rip), %ymm0 /* LCPI2_0(%rip) */ + LEAQ 0(BX)(R10*1), CX + ADDQ $480, CX + ANDQ $-4, SI + NEGQ SI + XORL DI, DI + QUAD $0xfffff6c8056ffec5 // vmovdqu $-2360(%rip), %ymm0 /* LCPI2_0(%rip) */ LBB2_98: - LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x02 // vmovdqu %ymm0, $2(%r13,%rsi) - LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x22 // vmovdqu %ymm0, $34(%r13,%rsi) - LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x42 // vmovdqu %ymm0, $66(%r13,%rsi) - LONG $0x7f7ec1c4; WORD $0x3544; BYTE $0x62 // vmovdqu %ymm0, $98(%r13,%rsi) - QUAD $0x008235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $130(%r13,%rsi) - QUAD $0x00a235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $162(%r13,%rsi) - QUAD $0x00c235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $194(%r13,%rsi) - QUAD $0x00e235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $226(%r13,%rsi) - QUAD $0x010235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $258(%r13,%rsi) - QUAD $0x012235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $290(%r13,%rsi) - QUAD $0x014235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $322(%r13,%rsi) - QUAD $0x016235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $354(%r13,%rsi) - QUAD $0x018235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $386(%r13,%rsi) - QUAD $0x01a235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $418(%r13,%rsi) - QUAD $0x01c235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $450(%r13,%rsi) - QUAD $0x01e235847f7ec1c4; WORD $0x0000 // vmovdqu %ymm0, $482(%r13,%rsi) - ADDQ $512, SI - ADDQ $4, DX + QUAD $0xfffe2039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-480(%rcx,%rdi) + QUAD $0xfffe4039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-448(%rcx,%rdi) + QUAD $0xfffe6039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-416(%rcx,%rdi) + QUAD $0xfffe8039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-384(%rcx,%rdi) + QUAD $0xfffea039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-352(%rcx,%rdi) + QUAD $0xfffec039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-320(%rcx,%rdi) + QUAD $0xfffee039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-288(%rcx,%rdi) + QUAD $0xffff0039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-256(%rcx,%rdi) + QUAD $0xffff2039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-224(%rcx,%rdi) + QUAD $0xffff4039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-192(%rcx,%rdi) + QUAD $0xffff6039847ffec5; BYTE $0xff // vmovdqu %ymm0, $-160(%rcx,%rdi) + LONG $0x447ffec5; WORD $0x8039 // vmovdqu %ymm0, $-128(%rcx,%rdi) + LONG $0x447ffec5; WORD $0xa039 // vmovdqu %ymm0, $-96(%rcx,%rdi) + LONG $0x447ffec5; WORD $0xc039 // vmovdqu %ymm0, $-64(%rcx,%rdi) + LONG $0x447ffec5; WORD $0xe039 // vmovdqu %ymm0, $-32(%rcx,%rdi) + LONG $0x047ffec5; BYTE $0x39 // vmovdqu %ymm0, (%rcx,%rdi) + ADDQ $512, DI + ADDQ $4, SI JNE LBB2_98 LBB2_99: - TESTQ CX, CX + TESTQ DX, DX JE LBB2_102 - NEGQ CX - QUAD $0xfffff4c2056ffec5 // vmovdqu $-2878(%rip), %ymm0 /* LCPI2_0(%rip) */ + ADDQ BX, DI + LEAQ 0(R10)(DI*1), CX + ADDQ $96, CX + SHLQ $7, DX + XORL SI, SI + QUAD $0xfffff619056ffec5 // vmovdqu $-2535(%rip), %ymm0 /* LCPI2_0(%rip) */ LBB2_101: - MOVQ SI, DX - ORQ $2, DX - LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x00 // vmovdqu %ymm0, (%r13,%rdx) - LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x20 // vmovdqu %ymm0, $32(%r13,%rdx) - LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x40 // vmovdqu %ymm0, $64(%r13,%rdx) - LONG $0x7f7ec1c4; WORD $0x1544; BYTE $0x60 // vmovdqu %ymm0, $96(%r13,%rdx) + LONG $0x447ffec5; WORD $0xa031 // vmovdqu %ymm0, $-96(%rcx,%rsi) + LONG $0x447ffec5; WORD $0xc031 // vmovdqu %ymm0, $-64(%rcx,%rsi) + LONG $0x447ffec5; WORD $0xe031 // vmovdqu %ymm0, $-32(%rcx,%rsi) + LONG $0x047ffec5; BYTE $0x31 // vmovdqu %ymm0, (%rcx,%rsi) SUBQ $-128, SI - INCQ CX + CMPQ DX, SI JNE LBB2_101 LBB2_102: + ADDQ AX, BX CMPQ R8, AX JE LBB2_105 - ORQ $2, AX + +LBB2_103: + SUBL AX, R13 LBB2_104: - MOVB $48, 0(R13)(AX*1) - INCQ AX - CMPQ BX, AX + MOVB $48, 0(R10)(BX*1) + ADDQ $1, BX + ADDL $-1, R13 JNE LBB2_104 LBB2_105: - ADDQ BX, R13 - MOVQ R13, SI - MOVL R15, DX - WORD $0xf8c5; BYTE $0x77 // vzeroupper - LONG $0x00549fe8; BYTE $0x00 // callq _print_mantissa - ADDL BX, R15 - MOVL R15, BX - -LBB2_106: - MOVL -44(BP), R9 - -LBB2_107: ADDL R9, BX - JMP LBB2_110 + JMP LBB2_106 -LBB2_108: +LBB2_1: MOVQ $9223372036854775807, CX ANDQ CX, AX - MOVB $45, 0(R13) - INCQ R13 + MOVB $45, 0(R12) + ADDQ $1, R12 MOVL $1, R9 TESTQ AX, AX - JNE LBB2_2 + JNE LBB2_4 -LBB2_109: - MOVB $48, 0(R13) - INCL R9 +LBB2_3: + MOVB $48, 0(R12) + ADDL $1, R9 MOVL R9, BX -LBB2_110: +LBB2_106: MOVL BX, AX - ADDQ $40, SP + ADDQ $56, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -1184,10 +1081,10 @@ LBB3_1: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp MOVB $45, 0(DI) - INCQ DI + ADDQ $1, DI NEGQ SI - LONG $0x000044e8; BYTE $0x00 // callq _u64toa - INCL AX + LONG $0x000045e8; BYTE $0x00 // callq _u64toa + ADDL $1, AX BYTE $0x5d // popq %rbp RET @@ -1231,7 +1128,7 @@ _u64toa: ADDQ AX, AX CMPL SI, $1000 JB LBB4_3 - LONG $0x740d8d48; WORD $0x00ad; BYTE $0x00 // leaq $44404(%rip), %rcx /* _Digits(%rip) */ + LONG $0x0c0d8d48; WORD $0x00ae; BYTE $0x00 // leaq $44556(%rip), %rcx /* _Digits(%rip) */ MOVB 0(DX)(CX*1), CX MOVB CX, 0(DI) MOVL $1, CX @@ -1245,26 +1142,26 @@ LBB4_3: LBB4_4: MOVWLZX DX, DX ORQ $1, DX - LONG $0x53358d48; WORD $0x00ad; BYTE $0x00 // leaq $44371(%rip), %rsi /* _Digits(%rip) */ + LONG $0xeb358d48; WORD $0x00ad; BYTE $0x00 // leaq $44523(%rip), %rsi /* _Digits(%rip) */ MOVB 0(DX)(SI*1), DX MOVL CX, SI - INCL CX + ADDL $1, CX MOVB DX, 0(DI)(SI*1) LBB4_6: - LONG $0x42158d48; WORD $0x00ad; BYTE $0x00 // leaq $44354(%rip), %rdx /* _Digits(%rip) */ + LONG $0xd9158d48; WORD $0x00ad; BYTE $0x00 // leaq $44505(%rip), %rdx /* _Digits(%rip) */ MOVB 0(AX)(DX*1), DX MOVL CX, SI - INCL CX + ADDL $1, CX MOVB DX, 0(DI)(SI*1) LBB4_7: MOVWLZX AX, AX ORQ $1, AX - LONG $0x2a158d48; WORD $0x00ad; BYTE $0x00 // leaq $44330(%rip), %rdx /* _Digits(%rip) */ + LONG $0xc0158d48; WORD $0x00ad; BYTE $0x00 // leaq $44480(%rip), %rdx /* _Digits(%rip) */ MOVB 0(AX)(DX*1), AX MOVL CX, DX - INCL CX + ADDL $1, CX MOVB AX, 0(DI)(DX*1) MOVL CX, AX BYTE $0x5d // popq %rbp @@ -1308,7 +1205,7 @@ LBB4_8: ADDQ R11, R11 CMPL SI, $10000000 JB LBB4_11 - LONG $0x93058d48; WORD $0x00ac; BYTE $0x00 // leaq $44179(%rip), %rax /* _Digits(%rip) */ + LONG $0x28058d48; WORD $0x00ad; BYTE $0x00 // leaq $44328(%rip), %rax /* _Digits(%rip) */ MOVB 0(R10)(AX*1), AX MOVB AX, 0(DI) MOVL $1, CX @@ -1322,39 +1219,39 @@ LBB4_11: LBB4_12: MOVL R10, AX ORQ $1, AX - LONG $0x6e358d48; WORD $0x00ac; BYTE $0x00 // leaq $44142(%rip), %rsi /* _Digits(%rip) */ + LONG $0x03358d48; WORD $0x00ad; BYTE $0x00 // leaq $44291(%rip), %rsi /* _Digits(%rip) */ MOVB 0(AX)(SI*1), AX MOVL CX, SI - INCL CX + ADDL $1, CX MOVB AX, 0(DI)(SI*1) LBB4_14: - LONG $0x5d058d48; WORD $0x00ac; BYTE $0x00 // leaq $44125(%rip), %rax /* _Digits(%rip) */ + LONG $0xf1058d48; WORD $0x00ac; BYTE $0x00 // leaq $44273(%rip), %rax /* _Digits(%rip) */ MOVB 0(R9)(AX*1), AX MOVL CX, SI - INCL CX + ADDL $1, CX MOVB AX, 0(DI)(SI*1) LBB4_15: MOVWLZX R9, AX ORQ $1, AX - LONG $0x43358d48; WORD $0x00ac; BYTE $0x00 // leaq $44099(%rip), %rsi /* _Digits(%rip) */ + LONG $0xd6358d48; WORD $0x00ac; BYTE $0x00 // leaq $44246(%rip), %rsi /* _Digits(%rip) */ MOVB 0(AX)(SI*1), AX MOVL CX, DX - MOVB AX, 0(DX)(DI*1) + MOVB AX, 0(DI)(DX*1) MOVB 0(R8)(SI*1), AX - MOVB AX, 1(DX)(DI*1) + MOVB AX, 1(DI)(DX*1) MOVWLZX R8, AX ORQ $1, AX MOVB 0(AX)(SI*1), AX - MOVB AX, 2(DX)(DI*1) + MOVB AX, 2(DI)(DX*1) MOVB 0(R11)(SI*1), AX - MOVB AX, 3(DX)(DI*1) + MOVB AX, 3(DI)(DX*1) MOVWLZX R11, AX ORQ $1, AX MOVB 0(AX)(SI*1), AX ADDL $5, CX - MOVB AX, 4(DX)(DI*1) + MOVB AX, 4(DI)(DX*1) MOVL CX, AX BYTE $0x5d // popq %rbp RET @@ -1376,7 +1273,7 @@ LBB4_16: LONG $0xe100c269; WORD $0x05f5 // imull $100000000, %edx, %eax SUBL AX, SI LONG $0xc26ef9c5 // vmovd %edx, %xmm0 - QUAD $0xfffffdcb0d6ffac5 // vmovdqu $-565(%rip), %xmm1 /* LCPI4_0(%rip) */ + QUAD $0xfffffdc60d6ffac5 // vmovdqu $-570(%rip), %xmm1 /* LCPI4_0(%rip) */ LONG $0xd1f4f9c5 // vpmuludq %xmm1, %xmm0, %xmm2 LONG $0xd273e9c5; BYTE $0x2d // vpsrlq $45, %xmm2, %xmm2 MOVL $10000, AX @@ -1387,11 +1284,11 @@ LBB4_16: LONG $0xf073f9c5; BYTE $0x02 // vpsllq $2, %xmm0, %xmm0 LONG $0xc070fbc5; BYTE $0x50 // vpshuflw $80, %xmm0, %xmm0 LONG $0xc070f9c5; BYTE $0x50 // vpshufd $80, %xmm0, %xmm0 - QUAD $0xfffdc4155979e2c4; BYTE $0xff // vpbroadcastq $-572(%rip), %xmm2 /* LCPI4_1(%rip) */ - QUAD $0xfffdc3255979e2c4; BYTE $0xff // vpbroadcastq $-573(%rip), %xmm4 /* LCPI4_2(%rip) */ + QUAD $0xfffdbf155979e2c4; BYTE $0xff // vpbroadcastq $-577(%rip), %xmm2 /* LCPI4_1(%rip) */ + QUAD $0xfffdbe255979e2c4; BYTE $0xff // vpbroadcastq $-578(%rip), %xmm4 /* LCPI4_2(%rip) */ LONG $0xc2e4f9c5 // vpmulhuw %xmm2, %xmm0, %xmm0 LONG $0xc4e4f9c5 // vpmulhuw %xmm4, %xmm0, %xmm0 - QUAD $0xfffffd8b2d6ffac5 // vmovdqu $-629(%rip), %xmm5 /* LCPI4_3(%rip) */ + QUAD $0xfffffd862d6ffac5 // vmovdqu $-634(%rip), %xmm5 /* LCPI4_3(%rip) */ LONG $0xf5d5f9c5 // vpmullw %xmm5, %xmm0, %xmm6 LONG $0xf673c9c5; BYTE $0x10 // vpsllq $16, %xmm6, %xmm6 LONG $0xc6f9f9c5 // vpsubw %xmm6, %xmm0, %xmm0 @@ -1410,17 +1307,17 @@ LBB4_16: LONG $0xf273e9c5; BYTE $0x10 // vpsllq $16, %xmm2, %xmm2 LONG $0xcaf9f1c5 // vpsubw %xmm2, %xmm1, %xmm1 LONG $0xc167f9c5 // vpackuswb %xmm1, %xmm0, %xmm0 - QUAD $0xfffffd450dfcf9c5 // vpaddb $-699(%rip), %xmm0, %xmm1 /* LCPI4_4(%rip) */ + QUAD $0xfffffd400dfcf9c5 // vpaddb $-704(%rip), %xmm0, %xmm1 /* LCPI4_4(%rip) */ LONG $0xd2efe9c5 // vpxor %xmm2, %xmm2, %xmm2 LONG $0xc274f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm0 LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax + NOTL AX ORL $32768, AX - XORL $-32769, AX BSFL AX, AX MOVL $16, CX SUBL AX, CX SHLQ $4, AX - LONG $0xb6158d48; WORD $0x00ab; BYTE $0x00 // leaq $43958(%rip), %rdx /* _VecShiftShuffles(%rip) */ + LONG $0x4c158d48; WORD $0x00ac; BYTE $0x00 // leaq $44108(%rip), %rdx /* _VecShiftShuffles(%rip) */ LONG $0x0071e2c4; WORD $0x1004 // vpshufb (%rax,%rdx), %xmm1, %xmm0 LONG $0x077ffac5 // vmovdqu %xmm0, (%rdi) MOVL CX, AX @@ -1443,16 +1340,14 @@ LBB4_18: JMP LBB4_25 LBB4_20: - CMPL DX, $99 - JA LBB4_22 - MOVL DX, AX - LONG $0x990d8d48; WORD $0x00aa; BYTE $0x00 // leaq $43673(%rip), %rcx /* _Digits(%rip) */ - MOVB 0(CX)(AX*2), DX - MOVB 1(CX)(AX*2), AX - MOVB DX, 0(DI) - MOVB AX, 1(DI) - MOVL $2, CX - JMP LBB4_25 + CMPL DX, $99 + JA LBB4_22 + MOVL DX, AX + LONG $0x2f0d8d48; WORD $0x00ab; BYTE $0x00 // leaq $43823(%rip), %rcx /* _Digits(%rip) */ + MOVWLZX 0(CX)(AX*2), AX + MOVW AX, 0(DI) + MOVL $2, CX + JMP LBB4_25 LBB4_22: MOVL DX, AX @@ -1471,11 +1366,9 @@ LBB4_22: WORD $0xc96b; BYTE $0x64 // imull $100, %ecx, %ecx SUBL CX, AX MOVWLZX AX, AX - LONG $0x480d8d48; WORD $0x00aa; BYTE $0x00 // leaq $43592(%rip), %rcx /* _Digits(%rip) */ - MOVB 0(CX)(AX*2), DX - MOVB 1(CX)(AX*2), AX - MOVB DX, 1(DI) - MOVB AX, 2(DI) + LONG $0xe60d8d48; WORD $0x00aa; BYTE $0x00 // leaq $43750(%rip), %rcx /* _Digits(%rip) */ + MOVWLZX 0(CX)(AX*2), AX + MOVW AX, 1(DI) MOVL $3, CX JMP LBB4_25 @@ -1483,18 +1376,15 @@ LBB4_24: WORD $0xc86b; BYTE $0x64 // imull $100, %eax, %ecx SUBL CX, DX MOVWLZX AX, AX - LONG $0x25058d4c; WORD $0x00aa; BYTE $0x00 // leaq $43557(%rip), %r8 /* _Digits(%rip) */ - MOVB 0(R8)(AX*2), CX - MOVB 1(R8)(AX*2), AX - MOVB CX, 0(DI) - MOVB AX, 1(DI) + LONG $0xc80d8d48; WORD $0x00aa; BYTE $0x00 // leaq $43720(%rip), %rcx /* _Digits(%rip) */ + MOVWLZX 0(CX)(AX*2), AX + MOVW AX, 0(DI) MOVWLZX DX, AX - MOVB 0(R8)(AX*2), CX + MOVB 0(CX)(AX*2), DX ADDQ AX, AX - MOVB CX, 2(DI) - ORL $1, AX + MOVB DX, 2(DI) MOVWLZX AX, AX - MOVB 0(AX)(R8*1), AX + MOVB 1(AX)(CX*1), AX MOVB AX, 3(DI) MOVL $4, CX @@ -1504,7 +1394,7 @@ LBB4_25: MULQ DX SHRQ $26, DX LONG $0xc26ef9c5 // vmovd %edx, %xmm0 - QUAD $0xfffffbe40d6ffac5 // vmovdqu $-1052(%rip), %xmm1 /* LCPI4_0(%rip) */ + QUAD $0xfffffbfa0d6ffac5 // vmovdqu $-1030(%rip), %xmm1 /* LCPI4_0(%rip) */ LONG $0xd1f4f9c5 // vpmuludq %xmm1, %xmm0, %xmm2 LONG $0xd273e9c5; BYTE $0x2d // vpsrlq $45, %xmm2, %xmm2 MOVL $10000, AX @@ -1514,12 +1404,12 @@ LBB4_25: LONG $0xc061e9c5 // vpunpcklwd %xmm0, %xmm2, %xmm0 LONG $0xf073f9c5; BYTE $0x02 // vpsllq $2, %xmm0, %xmm0 LONG $0xc070fbc5; BYTE $0x50 // vpshuflw $80, %xmm0, %xmm0 - QUAD $0xfffbe2155979e2c4; BYTE $0xff // vpbroadcastq $-1054(%rip), %xmm2 /* LCPI4_1(%rip) */ + QUAD $0xfffbf8155979e2c4; BYTE $0xff // vpbroadcastq $-1032(%rip), %xmm2 /* LCPI4_1(%rip) */ LONG $0xc070f9c5; BYTE $0x50 // vpshufd $80, %xmm0, %xmm0 LONG $0xc2e4f9c5 // vpmulhuw %xmm2, %xmm0, %xmm0 - QUAD $0xfffbd8255979e2c4; BYTE $0xff // vpbroadcastq $-1064(%rip), %xmm4 /* LCPI4_2(%rip) */ + QUAD $0xfffbee255979e2c4; BYTE $0xff // vpbroadcastq $-1042(%rip), %xmm4 /* LCPI4_2(%rip) */ LONG $0xc4e4f9c5 // vpmulhuw %xmm4, %xmm0, %xmm0 - QUAD $0xfffffba42d6ffac5 // vmovdqu $-1116(%rip), %xmm5 /* LCPI4_3(%rip) */ + QUAD $0xfffffbba2d6ffac5 // vmovdqu $-1094(%rip), %xmm5 /* LCPI4_3(%rip) */ LONG $0xf5d5f9c5 // vpmullw %xmm5, %xmm0, %xmm6 LONG $0xf673c9c5; BYTE $0x10 // vpsllq $16, %xmm6, %xmm6 LONG $0xc6f9f9c5 // vpsubw %xmm6, %xmm0, %xmm0 @@ -1540,7 +1430,7 @@ LBB4_25: LONG $0xf273e9c5; BYTE $0x10 // vpsllq $16, %xmm2, %xmm2 LONG $0xcaf9f1c5 // vpsubw %xmm2, %xmm1, %xmm1 LONG $0xc167f9c5 // vpackuswb %xmm1, %xmm0, %xmm0 - QUAD $0xfffffb5605fcf9c5 // vpaddb $-1194(%rip), %xmm0, %xmm0 /* LCPI4_4(%rip) */ + QUAD $0xfffffb6c05fcf9c5 // vpaddb $-1172(%rip), %xmm0, %xmm0 /* LCPI4_4(%rip) */ MOVL CX, AX LONG $0x047ffac5; BYTE $0x07 // vmovdqu %xmm0, (%rdi,%rax) ORL $16, CX @@ -1577,533 +1467,814 @@ _quote: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $16, SP + SUBQ $24, SP MOVQ CX, R15 - TESTB $1, R8 - LONG $0xc5058d48; WORD $0x00a9; BYTE $0x00 // leaq $43461(%rip), %rax /* __SingleQuoteTab(%rip) */ - LONG $0xbe158d4c; WORD $0x00b9; BYTE $0x00 // leaq $47550(%rip), %r10 /* __DoubleQuoteTab(%rip) */ - LONG $0xd0440f4c // cmoveq %rax, %r10 - MOVQ DX, R8 - MOVQ DI, AX - TESTQ SI, SI - JE LBB5_84 MOVQ SI, R14 - MOVQ 0(R15), R11 - QUAD $0xffffff290d6f7ec5 // vmovdqu $-215(%rip), %ymm9 /* LCPI5_0(%rip) */ - QUAD $0xffffff41156f7ec5 // vmovdqu $-191(%rip), %ymm10 /* LCPI5_1(%rip) */ - QUAD $0xffffff591d6f7ec5 // vmovdqu $-167(%rip), %ymm11 /* LCPI5_2(%rip) */ + MOVQ 0(CX), R10 + TESTB $1, R8 + LONG $0x6d058d48; WORD $0x00aa; BYTE $0x00 // leaq $43629(%rip), %rax /* __SingleQuoteTab(%rip) */ + LONG $0x66058d4c; WORD $0x00ba; BYTE $0x00 // leaq $47718(%rip), %r8 /* __DoubleQuoteTab(%rip) */ + LONG $0xc0440f4c // cmoveq %rax, %r8 + LEAQ 0(SI*8), AX + CMPQ R10, AX + JGE LBB5_93 + MOVQ DX, R9 + MOVQ DI, R11 + TESTQ R14, R14 + JE LBB5_125 + QUAD $0xffffff18256ffec5 // vmovdqu $-232(%rip), %ymm4 /* LCPI5_0(%rip) */ + QUAD $0xffffff302d6ffec5 // vmovdqu $-208(%rip), %ymm5 /* LCPI5_1(%rip) */ + QUAD $0xffffff48356ffec5 // vmovdqu $-184(%rip), %ymm6 /* LCPI5_2(%rip) */ LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 - MOVQ DI, AX - MOVQ DX, R8 - MOVQ R15, -48(BP) - MOVQ DX, -56(BP) + MOVQ DI, CX + MOVQ DX, -64(BP) + MOVQ DX, R9 -LBB5_2: - CMPQ R14, $31 - SETGT CX - MOVQ R11, R9 - MOVQ R8, R13 - MOVQ R14, SI - MOVQ AX, R12 - CMPQ R11, $32 - JL LBB5_8 +LBB5_3: + MOVQ CX, R13 CMPQ R14, $32 - JL LBB5_8 - MOVQ AX, R12 + SETGE CX + MOVQ R13, AX + MOVQ R10, BX + MOVQ R9, R12 + MOVQ R14, R11 + JL LBB5_10 + CMPQ R10, $32 + JL LBB5_10 + XORL R12, R12 MOVQ R14, SI - MOVQ R8, R13 - MOVQ R11, DX + MOVQ R10, DX -LBB5_5: - LONG $0x6f7ec1c4; WORD $0x2404 // vmovdqu (%r12), %ymm0 - LONG $0xc864b5c5 // vpcmpgtb %ymm0, %ymm9, %ymm1 - LONG $0xd074adc5 // vpcmpeqb %ymm0, %ymm10, %ymm2 - LONG $0xd874a5c5 // vpcmpeqb %ymm0, %ymm11, %ymm3 +LBB5_6: + LONG $0x6f7ea1c4; WORD $0x2004 // vmovdqu (%rax,%r12), %ymm0 + LONG $0xc864ddc5 // vpcmpgtb %ymm0, %ymm4, %ymm1 + LONG $0xd574fdc5 // vpcmpeqb %ymm5, %ymm0, %ymm2 + LONG $0xde74fdc5 // vpcmpeqb %ymm6, %ymm0, %ymm3 LONG $0xd2ebe5c5 // vpor %ymm2, %ymm3, %ymm2 - LONG $0x7f7ec1c4; WORD $0x0045 // vmovdqu %ymm0, (%r13) + LONG $0x7f7e81c4; WORD $0x2104 // vmovdqu %ymm0, (%r9,%r12) LONG $0x647dc1c4; BYTE $0xc0 // vpcmpgtb %ymm8, %ymm0, %ymm0 - LONG $0xc1dbfdc5 // vpand %ymm1, %ymm0, %ymm0 + LONG $0xc0dbf5c5 // vpand %ymm0, %ymm1, %ymm0 LONG $0xc0ebedc5 // vpor %ymm0, %ymm2, %ymm0 LONG $0xc8d7fdc5 // vpmovmskb %ymm0, %ecx TESTL CX, CX - JNE LBB5_22 + JNE LBB5_20 + LEAQ -32(SI), R11 + LEAQ -32(DX), BX ADDQ $32, R12 - ADDQ $32, R13 - LEAQ -32(DX), R9 - CMPQ SI, $63 - SETGT CX CMPQ SI, $64 - LEAQ -32(SI), SI - JL LBB5_8 + SETGE CX + JL LBB5_9 + MOVQ R11, SI CMPQ DX, $63 - MOVQ R9, DX - JG LBB5_5 + MOVQ BX, DX + JG LBB5_6 -LBB5_8: +LBB5_9: + LEAQ 0(AX)(R12*1), R13 + ADDQ R9, R12 + +LBB5_10: TESTB CX, CX - JE LBB5_12 - LONG $0x6f7ec1c4; WORD $0x2404 // vmovdqu (%r12), %ymm0 - LONG $0xc864b5c5 // vpcmpgtb %ymm0, %ymm9, %ymm1 - LONG $0xd074adc5 // vpcmpeqb %ymm0, %ymm10, %ymm2 - LONG $0xd874a5c5 // vpcmpeqb %ymm0, %ymm11, %ymm3 + JE LBB5_14 + MOVQ R14, SI + LONG $0x6f7ec1c4; WORD $0x0045 // vmovdqu (%r13), %ymm0 + LONG $0xc864ddc5 // vpcmpgtb %ymm0, %ymm4, %ymm1 + LONG $0xd574fdc5 // vpcmpeqb %ymm5, %ymm0, %ymm2 + LONG $0xde74fdc5 // vpcmpeqb %ymm6, %ymm0, %ymm3 LONG $0xd2ebe5c5 // vpor %ymm2, %ymm3, %ymm2 LONG $0x647dc1c4; BYTE $0xc0 // vpcmpgtb %ymm8, %ymm0, %ymm0 - LONG $0xc1dbfdc5 // vpand %ymm1, %ymm0, %ymm0 + LONG $0xc0dbf5c5 // vpand %ymm0, %ymm1, %ymm0 LONG $0xc0ebedc5 // vpor %ymm0, %ymm2, %ymm0 LONG $0xc8d7fdc5 // vpmovmskb %ymm0, %ecx MOVQ $4294967296, DX ORQ DX, CX - BSFQ CX, R15 - LONG $0x6f7ac1c4; WORD $0x2404 // vmovdqu (%r12), %xmm0 + BSFQ CX, R14 + LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 LONG $0x16f9e3c4; WORD $0x01c1 // vpextrq $1, %xmm0, %rcx LONG $0x7ef9e1c4; BYTE $0xc2 // vmovq %xmm0, %rdx - CMPQ R15, R9 - JLE LBB5_23 - CMPQ R9, $16 - JB LBB5_33 - MOVQ DX, 0(R13) - MOVQ CX, 8(R13) - LEAQ 16(R12), DX - ADDQ $16, R13 - LEAQ -16(R9), SI - MOVQ -48(BP), R15 - CMPQ SI, $8 - JAE LBB5_34 - JMP LBB5_35 + CMPQ R14, BX + JLE LBB5_21 + CMPQ BX, $16 + MOVQ AX, R11 + JB LBB5_24 + MOVQ DX, 0(R12) + MOVQ CX, 8(R12) + LEAQ 16(R13), CX + ADDQ $16, R12 + LEAQ -16(BX), DX + MOVQ SI, R14 + CMPQ DX, $8 + JAE LBB5_25 + JMP LBB5_26 -LBB5_12: +LBB5_14: WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ SI, $15 - SETGT R15 - CMPQ R9, $16 - JL LBB5_25 - CMPQ SI, $16 - JL LBB5_25 - QUAD $0xfffffdcd0d6f7ec5 // vmovdqu $-563(%rip), %ymm9 /* LCPI5_0(%rip) */ - QUAD $0xfffffde5156f7ec5 // vmovdqu $-539(%rip), %ymm10 /* LCPI5_1(%rip) */ - QUAD $0xfffffdfd1d6f7ec5 // vmovdqu $-515(%rip), %ymm11 /* LCPI5_2(%rip) */ + CMPQ R11, $16 + SETGE DX + MOVQ R15, -56(BP) + MOVQ R14, -48(BP) + JL LBB5_31 + CMPQ BX, $16 + QUAD $0xfffffe203d6ffac5 // vmovdqu $-480(%rip), %xmm7 /* LCPI5_3(%rip) */ + QUAD $0xfffffe280d6f7ac5 // vmovdqu $-472(%rip), %xmm9 /* LCPI5_4(%rip) */ + QUAD $0xfffffe30156f7ac5 // vmovdqu $-464(%rip), %xmm10 /* LCPI5_5(%rip) */ + LONG $0x762141c4; BYTE $0xdb // vpcmpeqd %xmm11, %xmm11, %xmm11 + JL LBB5_36 + MOVQ AX, CX + SUBQ R13, CX + QUAD $0xfffffd97256ffec5 // vmovdqu $-617(%rip), %ymm4 /* LCPI5_0(%rip) */ + QUAD $0xfffffdaf2d6ffec5 // vmovdqu $-593(%rip), %ymm5 /* LCPI5_1(%rip) */ + QUAD $0xfffffdc7356ffec5 // vmovdqu $-569(%rip), %ymm6 /* LCPI5_2(%rip) */ LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 - QUAD $0xfffffe103d6ffac5 // vmovdqu $-496(%rip), %xmm7 /* LCPI5_3(%rip) */ - QUAD $0xfffffe18256ffac5 // vmovdqu $-488(%rip), %xmm4 /* LCPI5_4(%rip) */ - QUAD $0xfffffe202d6ffac5 // vmovdqu $-480(%rip), %xmm5 /* LCPI5_5(%rip) */ - LONG $0xf676c9c5 // vpcmpeqd %xmm6, %xmm6, %xmm6 -LBB5_15: - LONG $0x6f7ac1c4; WORD $0x2404 // vmovdqu (%r12), %xmm0 +LBB5_17: + LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 LONG $0xc864c1c5 // vpcmpgtb %xmm0, %xmm7, %xmm1 - LONG $0xd474f9c5 // vpcmpeqb %xmm4, %xmm0, %xmm2 - LONG $0xdd74f9c5 // vpcmpeqb %xmm5, %xmm0, %xmm3 + LONG $0xd074b1c5 // vpcmpeqb %xmm0, %xmm9, %xmm2 + LONG $0xd874a9c5 // vpcmpeqb %xmm0, %xmm10, %xmm3 LONG $0xd2ebe1c5 // vpor %xmm2, %xmm3, %xmm2 - LONG $0x7f7ac1c4; WORD $0x0045 // vmovdqu %xmm0, (%r13) - LONG $0xc664f9c5 // vpcmpgtb %xmm6, %xmm0, %xmm0 - LONG $0xc1dbf9c5 // vpand %xmm1, %xmm0, %xmm0 + LONG $0x7f7ac1c4; WORD $0x2404 // vmovdqu %xmm0, (%r12) + LONG $0x6479c1c4; BYTE $0xc3 // vpcmpgtb %xmm11, %xmm0, %xmm0 + LONG $0xc0dbf1c5 // vpand %xmm0, %xmm1, %xmm0 LONG $0xc0ebe9c5 // vpor %xmm0, %xmm2, %xmm0 - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - TESTW CX, CX - JNE LBB5_32 - ADDQ $16, R12 + LONG $0xd0d7f9c5 // vpmovmskb %xmm0, %edx + TESTL DX, DX + JNE LBB5_23 ADDQ $16, R13 - LEAQ -16(R9), BX - CMPQ SI, $31 - SETGT R15 - CMPQ SI, $32 - LEAQ -16(SI), SI - JL LBB5_18 - CMPQ R9, $31 - MOVQ BX, R9 - JG LBB5_15 + ADDQ $16, R12 + LEAQ -16(R11), R15 + LEAQ -16(BX), R14 + CMPQ R11, $32 + SETGE DX + JL LBB5_32 + ADDQ $-16, CX + MOVQ R15, R11 + CMPQ BX, $31 + MOVQ R14, BX + JG LBB5_17 -LBB5_18: - TESTB R15, R15 - JE LBB5_26 +LBB5_32: + TESTB DX, DX + JE LBB5_37 -LBB5_19: - LONG $0x6f7ac1c4; WORD $0x2404 // vmovdqu (%r12), %xmm0 +LBB5_33: + LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 LONG $0xc864c1c5 // vpcmpgtb %xmm0, %xmm7, %xmm1 - LONG $0xd474f9c5 // vpcmpeqb %xmm4, %xmm0, %xmm2 - LONG $0xdd74f9c5 // vpcmpeqb %xmm5, %xmm0, %xmm3 + LONG $0xd074b1c5 // vpcmpeqb %xmm0, %xmm9, %xmm2 + LONG $0xd874a9c5 // vpcmpeqb %xmm0, %xmm10, %xmm3 LONG $0xd2ebe1c5 // vpor %xmm2, %xmm3, %xmm2 - LONG $0xde64f9c5 // vpcmpgtb %xmm6, %xmm0, %xmm3 - LONG $0xc9dbe1c5 // vpand %xmm1, %xmm3, %xmm1 + LONG $0x6479c1c4; BYTE $0xdb // vpcmpgtb %xmm11, %xmm0, %xmm3 + LONG $0xcbdbf1c5 // vpand %xmm3, %xmm1, %xmm1 LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 LONG $0xc9d7f9c5 // vpmovmskb %xmm1, %ecx ORL $65536, CX - BSFL CX, DX + BSFL CX, BX LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx - CMPQ BX, DX - JGE LBB5_51 - CMPQ BX, $8 - JB LBB5_54 - MOVQ CX, 0(R13) - LEAQ 8(R12), SI - ADDQ $8, R13 - LEAQ -8(BX), DX - MOVQ -48(BP), R15 - CMPQ DX, $4 - JAE LBB5_80 - -LBB5_56: - CMPQ DX, $2 - JB LBB5_57 + CMPQ R14, BX + MOVQ AX, R11 + JGE LBB5_57 + CMPQ R14, $8 + MOVQ -56(BP), R15 + JB LBB5_62 + MOVQ CX, 0(R12) + LEAQ 8(R13), CX + ADDQ $8, R12 + LEAQ -8(R14), DX + JMP LBB5_63 -LBB5_81: - MOVWLZX 0(SI), CX - MOVW CX, 0(R13) - ADDQ $2, SI - ADDQ $2, R13 - ADDQ $-2, DX - TESTQ DX, DX - JNE LBB5_58 - JMP LBB5_59 +LBB5_20: + MOVQ AX, R11 + BSFL CX, R13 + ADDQ R12, R13 + JMP LBB5_75 -LBB5_22: - SUBQ AX, R12 - BSFL CX, R9 - ADDQ R12, R9 - JMP LBB5_67 +LBB5_21: + CMPL R14, $16 + MOVQ AX, R11 + JB LBB5_46 + MOVQ DX, 0(R12) + MOVQ CX, 8(R12) + LEAQ 16(R13), BX + ADDQ $16, R12 + LEAQ -16(R14), CX + CMPQ CX, $8 + JAE LBB5_47 + JMP LBB5_48 LBB5_23: - CMPL R15, $16 - JB LBB5_40 - MOVQ DX, 0(R13) - MOVQ CX, 8(R13) - LEAQ 16(R12), BX - ADDQ $16, R13 - LEAQ -16(R15), SI - CMPQ SI, $8 - JAE LBB5_41 - JMP LBB5_42 + BSFW DX, DX + MOVWLZX DX, R13 + SUBQ CX, R13 + MOVQ -56(BP), R15 + MOVQ -48(BP), R14 + MOVQ AX, R11 + JMP LBB5_75 + +LBB5_24: + MOVQ R13, CX + MOVQ BX, DX + MOVQ SI, R14 + CMPQ DX, $8 + JB LBB5_26 LBB5_25: - MOVQ R9, BX - QUAD $0xfffffc5d0d6f7ec5 // vmovdqu $-931(%rip), %ymm9 /* LCPI5_0(%rip) */ - QUAD $0xfffffc75156f7ec5 // vmovdqu $-907(%rip), %ymm10 /* LCPI5_1(%rip) */ - QUAD $0xfffffc8d1d6f7ec5 // vmovdqu $-883(%rip), %ymm11 /* LCPI5_2(%rip) */ - LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 - QUAD $0xfffffca03d6ffac5 // vmovdqu $-864(%rip), %xmm7 /* LCPI5_3(%rip) */ - QUAD $0xfffffca8256ffac5 // vmovdqu $-856(%rip), %xmm4 /* LCPI5_4(%rip) */ - QUAD $0xfffffcb02d6ffac5 // vmovdqu $-848(%rip), %xmm5 /* LCPI5_5(%rip) */ - LONG $0xf676c9c5 // vpcmpeqd %xmm6, %xmm6, %xmm6 - TESTB R15, R15 - JNE LBB5_19 + MOVQ 0(CX), SI + MOVQ SI, 0(R12) + ADDQ $8, CX + ADDQ $8, R12 + ADDQ $-8, DX LBB5_26: - TESTQ BX, BX - MOVQ -48(BP), R15 - LONG $0x890d8d4c; WORD $0x00a6; BYTE $0x00 // leaq $42633(%rip), %r9 /* __SingleQuoteTab(%rip) */ - JLE LBB5_31 - TESTQ SI, SI - JLE LBB5_31 + CMPQ DX, $4 + JB LBB5_27 + MOVL 0(CX), SI + MOVL SI, 0(R12) + ADDQ $4, CX + ADDQ $4, R12 + ADDQ $-4, DX + CMPQ DX, $2 + JAE LBB5_54 LBB5_28: - MOVBLZX 0(R12), CX - MOVQ CX, DX - SHLQ $4, DX - CMPQ 0(DX)(R9*1), $0 - JNE LBB5_53 - INCQ R12 - MOVB CX, 0(R13) - CMPQ SI, $2 - LEAQ -1(SI), SI - JL LBB5_31 - INCQ R13 - CMPQ BX, $1 - LEAQ -1(BX), BX - JG LBB5_28 + TESTQ DX, DX + JE LBB5_30 -LBB5_31: - SUBQ AX, R12 - NEGQ SI - SBBQ R9, R9 - XORQ R12, R9 - JMP LBB5_67 +LBB5_29: + MOVB 0(CX), CX + MOVB CX, 0(R12) -LBB5_32: - MOVWLZX CX, CX - SUBQ AX, R12 - BSFL CX, R9 - ADDQ R12, R9 - MOVQ -48(BP), R15 - JMP LBB5_67 +LBB5_30: + ADDQ R13, BX + NOTQ BX + ADDQ R11, BX + MOVQ BX, R13 + JMP LBB5_75 -LBB5_33: - MOVQ R12, DX - MOVQ R9, SI - MOVQ -48(BP), R15 - CMPQ SI, $8 - JB LBB5_35 +LBB5_27: + CMPQ DX, $2 + JB LBB5_28 -LBB5_34: - MOVQ 0(DX), CX - MOVQ CX, 0(R13) - ADDQ $8, DX - ADDQ $8, R13 - ADDQ $-8, SI +LBB5_54: + MOVWLZX 0(CX), SI + MOVW SI, 0(R12) + ADDQ $2, CX + ADDQ $2, R12 + ADDQ $-2, DX + TESTQ DX, DX + JNE LBB5_29 + JMP LBB5_30 -LBB5_35: - CMPQ SI, $4 - JB LBB5_36 - MOVL 0(DX), CX - MOVL CX, 0(R13) - ADDQ $4, DX - ADDQ $4, R13 - ADDQ $-4, SI - CMPQ SI, $2 - JAE LBB5_48 +LBB5_31: + MOVQ BX, R14 + MOVQ R11, R15 + QUAD $0xfffffbd2256ffec5 // vmovdqu $-1070(%rip), %ymm4 /* LCPI5_0(%rip) */ + QUAD $0xfffffbea2d6ffec5 // vmovdqu $-1046(%rip), %ymm5 /* LCPI5_1(%rip) */ + QUAD $0xfffffc02356ffec5 // vmovdqu $-1022(%rip), %ymm6 /* LCPI5_2(%rip) */ + LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 + QUAD $0xfffffc153d6ffac5 // vmovdqu $-1003(%rip), %xmm7 /* LCPI5_3(%rip) */ + QUAD $0xfffffc1d0d6f7ac5 // vmovdqu $-995(%rip), %xmm9 /* LCPI5_4(%rip) */ + QUAD $0xfffffc25156f7ac5 // vmovdqu $-987(%rip), %xmm10 /* LCPI5_5(%rip) */ + LONG $0x762141c4; BYTE $0xdb // vpcmpeqd %xmm11, %xmm11, %xmm11 + TESTB DX, DX + JE LBB5_37 + JMP LBB5_33 -LBB5_37: - TESTQ SI, SI - JE LBB5_39 +LBB5_36: + MOVQ BX, R14 + MOVQ R11, R15 + QUAD $0xfffffb89256ffec5 // vmovdqu $-1143(%rip), %ymm4 /* LCPI5_0(%rip) */ + QUAD $0xfffffba12d6ffec5 // vmovdqu $-1119(%rip), %ymm5 /* LCPI5_1(%rip) */ + QUAD $0xfffffbb9356ffec5 // vmovdqu $-1095(%rip), %ymm6 /* LCPI5_2(%rip) */ + LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 + TESTB DX, DX + JNE LBB5_33 -LBB5_38: - MOVB 0(DX), CX - MOVB CX, 0(R13) +LBB5_37: + TESTQ R15, R15 + MOVQ AX, R11 + JLE LBB5_44 + TESTQ R14, R14 + JLE LBB5_44 + XORL DX, DX + XORL CX, CX -LBB5_39: - SUBQ AX, R9 - ADDQ R12, R9 - NOTQ R9 - JMP LBB5_67 +LBB5_40: + MOVBLZX 0(R13)(DX*1), BX + MOVQ BX, SI + SHLQ $4, SI + LONG $0x69058d48; WORD $0x00a6; BYTE $0x00 // leaq $42601(%rip), %rax /* __SingleQuoteTab(%rip) */ + CMPQ 0(SI)(AX*1), $0 + JNE LBB5_61 + LEAQ 0(R15)(CX*1), AX + MOVB BX, 0(R12)(DX*1) + LEAQ -1(CX), SI + CMPQ AX, $2 + JL LBB5_43 + ADDQ R14, CX + ADDQ $1, DX + CMPQ CX, $1 + MOVQ SI, CX + JG LBB5_40 -LBB5_36: - CMPQ SI, $2 - JB LBB5_37 +LBB5_43: + SUBQ SI, R13 + ADDQ SI, R15 -LBB5_48: - MOVWLZX 0(DX), CX - MOVW CX, 0(R13) - ADDQ $2, DX - ADDQ $2, R13 - ADDQ $-2, SI - TESTQ SI, SI - JNE LBB5_38 - JMP LBB5_39 +LBB5_44: + TESTQ R15, R15 + MOVQ -48(BP), R14 + MOVQ $12884901889, R12 + JE LBB5_59 + NOTQ R13 + ADDQ R11, R13 + JMP LBB5_60 -LBB5_40: - MOVQ R12, BX - MOVQ R15, SI - CMPQ SI, $8 - JB LBB5_42 +LBB5_46: + MOVQ R13, BX + MOVQ R14, CX + CMPQ CX, $8 + JB LBB5_48 -LBB5_41: - MOVQ 0(BX), CX - MOVQ CX, 0(R13) +LBB5_47: + MOVQ 0(BX), DX + MOVQ DX, 0(R12) ADDQ $8, BX - ADDQ $8, R13 - ADDQ $-8, SI + ADDQ $8, R12 + ADDQ $-8, CX -LBB5_42: - CMPQ SI, $4 - JB LBB5_43 - MOVL 0(BX), CX - MOVL CX, 0(R13) +LBB5_48: + CMPQ CX, $4 + JB LBB5_49 + MOVL 0(BX), DX + MOVL DX, 0(R12) ADDQ $4, BX - ADDQ $4, R13 - ADDQ $-4, SI - CMPQ SI, $2 - JAE LBB5_50 + ADDQ $4, R12 + ADDQ $-4, CX + CMPQ CX, $2 + JAE LBB5_56 -LBB5_44: - TESTQ SI, SI - JE LBB5_46 +LBB5_50: + TESTQ CX, CX + JE LBB5_52 -LBB5_45: +LBB5_51: MOVB 0(BX), CX - MOVB CX, 0(R13) + MOVB CX, 0(R12) -LBB5_46: - SUBQ AX, R12 - ADDQ R15, R12 - MOVQ R12, R9 - MOVQ -48(BP), R15 - JMP LBB5_67 +LBB5_52: + SUBQ R11, R13 + ADDQ R14, R13 + MOVQ SI, R14 + JMP LBB5_75 -LBB5_43: - CMPQ SI, $2 - JB LBB5_44 +LBB5_49: + CMPQ CX, $2 + JB LBB5_50 -LBB5_50: - MOVWLZX 0(BX), CX - MOVW CX, 0(R13) +LBB5_56: + MOVWLZX 0(BX), DX + MOVW DX, 0(R12) ADDQ $2, BX - ADDQ $2, R13 - ADDQ $-2, SI - TESTQ SI, SI - JNE LBB5_45 - JMP LBB5_46 + ADDQ $2, R12 + ADDQ $-2, CX + TESTQ CX, CX + JNE LBB5_51 + JMP LBB5_52 -LBB5_51: - CMPL DX, $8 - JB LBB5_60 - MOVQ CX, 0(R13) - LEAQ 8(R12), R9 - ADDQ $8, R13 - LEAQ -8(DX), SI - MOVQ -48(BP), R15 - CMPQ SI, $4 - JAE LBB5_82 +LBB5_57: + CMPL BX, $8 + MOVQ -56(BP), R15 + JB LBB5_69 + MOVQ CX, 0(R12) + LEAQ 8(R13), DX + ADDQ $8, R12 + LEAQ -8(BX), CX + JMP LBB5_70 -LBB5_62: - CMPQ SI, $2 - JB LBB5_63 - -LBB5_83: - MOVWLZX 0(R9), CX - MOVW CX, 0(R13) - ADDQ $2, R9 - ADDQ $2, R13 - ADDQ $-2, SI - TESTQ SI, SI - JNE LBB5_64 - JMP LBB5_65 +LBB5_59: + SUBQ R11, R13 -LBB5_53: - SUBQ AX, R12 - JMP LBB5_66 +LBB5_60: + MOVQ -56(BP), R15 + JMP LBB5_76 -LBB5_54: - MOVQ R12, SI - MOVQ BX, DX - MOVQ -48(BP), R15 - CMPQ DX, $4 - JB LBB5_56 +LBB5_61: + SUBQ R11, R13 + SUBQ CX, R13 + MOVQ -56(BP), R15 + JMP LBB5_68 -LBB5_80: - MOVL 0(SI), CX - MOVL CX, 0(R13) - ADDQ $4, SI - ADDQ $4, R13 +LBB5_62: + MOVQ R13, CX + MOVQ R14, DX + +LBB5_63: + CMPQ DX, $4 + JB LBB5_64 + MOVL 0(CX), SI + MOVL SI, 0(R12) + ADDQ $4, CX + ADDQ $4, R12 ADDQ $-4, DX CMPQ DX, $2 - JAE LBB5_81 + JAE LBB5_90 -LBB5_57: +LBB5_65: TESTQ DX, DX - JE LBB5_59 + JE LBB5_67 -LBB5_58: - MOVB 0(SI), CX - MOVB CX, 0(R13) +LBB5_66: + MOVB 0(CX), CX + MOVB CX, 0(R12) -LBB5_59: - SUBQ AX, BX - ADDQ R12, BX - NOTQ BX - MOVQ BX, R9 - JMP LBB5_67 +LBB5_67: + ADDQ R13, R14 + NOTQ R14 + ADDQ R11, R14 + MOVQ R14, R13 -LBB5_60: - MOVQ R12, R9 - MOVQ DX, SI - MOVQ -48(BP), R15 - CMPQ SI, $4 - JB LBB5_62 +LBB5_68: + MOVQ -48(BP), R14 + JMP LBB5_75 -LBB5_82: - MOVL 0(R9), CX - MOVL CX, 0(R13) - ADDQ $4, R9 - ADDQ $4, R13 - ADDQ $-4, SI - CMPQ SI, $2 - JAE LBB5_83 +LBB5_64: + CMPQ DX, $2 + JB LBB5_65 -LBB5_63: - TESTQ SI, SI - JE LBB5_65 +LBB5_90: + MOVWLZX 0(CX), SI + MOVW SI, 0(R12) + ADDQ $2, CX + ADDQ $2, R12 + ADDQ $-2, DX + TESTQ DX, DX + JNE LBB5_66 + JMP LBB5_67 -LBB5_64: - MOVB 0(R9), CX - MOVB CX, 0(R13) +LBB5_69: + MOVQ R13, DX + MOVQ BX, CX -LBB5_65: - SUBQ AX, R12 - ADDQ DX, R12 +LBB5_70: + MOVQ -48(BP), R14 + CMPQ CX, $4 + JB LBB5_71 + MOVL 0(DX), SI + MOVL SI, 0(R12) + ADDQ $4, DX + ADDQ $4, R12 + ADDQ $-4, CX + CMPQ CX, $2 + JAE LBB5_92 -LBB5_66: - MOVQ R12, R9 +LBB5_72: + TESTQ CX, CX + JE LBB5_74 -LBB5_67: - MOVQ -56(BP), DX - TESTQ R9, R9 - JS LBB5_87 - ADDQ R9, AX - ADDQ R9, R8 - CMPQ R14, R9 - JE LBB5_84 - SUBQ R9, R11 - SUBQ R14, R9 - JMP LBB5_71 +LBB5_73: + MOVB 0(DX), CX + MOVB CX, 0(R12) -LBB5_70: - INCQ AX - ADDQ R13, R8 - INCQ R9 - JE LBB5_84 +LBB5_74: + SUBQ R11, R13 + ADDQ BX, R13 -LBB5_71: - MOVBLZX 0(AX), SI +LBB5_75: + MOVQ $12884901889, R12 + +LBB5_76: + TESTQ R13, R13 + JS LBB5_128 + ADDQ R13, R9 + CMPQ R14, R13 + JE LBB5_124 + SUBQ R13, R10 + JMP LBB5_80 + +LBB5_79: + ADDQ CX, R9 + ADDQ $1, R13 + CMPQ R14, R13 + JE LBB5_124 + +LBB5_80: + MOVBLZX 0(R11)(R13*1), SI SHLQ $4, SI - MOVQ 0(R10)(SI*1), BX + MOVQ 0(R8)(SI*1), BX TESTL BX, BX - JE LBB5_79 - MOVLQSX BX, R13 - SUBQ R13, R11 - JL LBB5_85 + JE LBB5_88 + MOVLQSX BX, CX + SUBQ CX, R10 + JL LBB5_126 SHLQ $32, BX - LEAQ 8(R10)(SI*1), R12 - MOVQ $12884901889, CX - CMPQ BX, CX - JL LBB5_75 - MOVL 0(R12), CX - MOVL CX, 0(R8) - LEAQ 12(R10)(SI*1), R12 - LEAQ 4(R8), R14 - LEAQ -4(R13), CX - CMPQ CX, $2 - JGE LBB5_76 - JMP LBB5_77 + LEAQ 0(R8)(SI*1), DX + ADDQ $8, DX + CMPQ BX, R12 + JL LBB5_84 + MOVL 0(DX), AX + MOVL AX, 0(R9) + LEAQ 0(R8)(SI*1), DX + ADDQ $12, DX + LEAQ 4(R9), SI + LEAQ -4(CX), BX + CMPQ BX, $2 + JGE LBB5_85 + JMP LBB5_86 -LBB5_75: - MOVQ R8, R14 - MOVQ R13, CX +LBB5_84: + MOVQ R9, SI + MOVQ CX, BX + CMPQ BX, $2 + JL LBB5_86 + +LBB5_85: + MOVWLZX 0(DX), AX + MOVW AX, 0(SI) + ADDQ $2, DX + ADDQ $2, SI + ADDQ $-2, BX + +LBB5_86: + TESTQ BX, BX + JLE LBB5_79 + MOVBLZX 0(DX), AX + MOVB AX, 0(SI) + JMP LBB5_79 + +LBB5_88: + LEAQ 0(R11)(R13*1), CX + SUBQ R13, R14 + JNE LBB5_3 + JMP LBB5_124 + +LBB5_71: CMPQ CX, $2 - JL LBB5_77 + JB LBB5_72 -LBB5_76: - MOVWLZX 0(R12), BX - MOVW BX, 0(R14) +LBB5_92: + MOVWLZX 0(DX), SI + MOVW SI, 0(R12) + ADDQ $2, DX ADDQ $2, R12 - ADDQ $2, R14 ADDQ $-2, CX - -LBB5_77: TESTQ CX, CX - JLE LBB5_70 - MOVBLZX 0(R12), CX - MOVB CX, 0(R14) - JMP LBB5_70 + JNE LBB5_73 + JMP LBB5_74 + +LBB5_93: + LONG $0xcf0d8d4c; WORD $0x00c3; BYTE $0x00 // leaq $50127(%rip), %r9 /* __EscTab(%rip) */ + QUAD $0xfffff8a5156f7ec5 // vmovdqu $-1883(%rip), %ymm10 /* LCPI5_0(%rip) */ + QUAD $0xfffff8bd0d6f7ec5 // vmovdqu $-1859(%rip), %ymm9 /* LCPI5_1(%rip) */ + QUAD $0xfffff8d51d6f7ec5 // vmovdqu $-1835(%rip), %ymm11 /* LCPI5_2(%rip) */ + LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 + MOVQ DX, BX + MOVQ R14, SI -LBB5_79: - MOVQ R9, R14 - NEGQ R14 - TESTQ R9, R9 - JNE LBB5_2 +LBB5_94: + CMPQ SI, $16 + JGE LBB5_95 -LBB5_84: - SUBQ DX, R8 - MOVQ R8, 0(R15) - SUBQ DI, AX - JMP LBB5_86 +LBB5_100: + CMPQ SI, $8 + JL LBB5_104 + +LBB5_101: + MOVBLZX 0(DI), AX + MOVBLZX 1(DI), CX + MOVB 0(CX)(R9*1), CX + ADDB CX, CX + ORB 0(AX)(R9*1), CX + MOVBLZX 2(DI), AX + MOVB 0(AX)(R9*1), R10 + SHLB $2, R10 + ORB CX, R10 + MOVBLZX 3(DI), AX + MOVB 0(AX)(R9*1), AX + SHLB $3, AX + ORB R10, AX + MOVQ 0(DI), CX + MOVQ CX, 0(BX) + JNE LBB5_121 + MOVBLZX 4(DI), AX + MOVBLZX 5(DI), CX + MOVB 0(CX)(R9*1), CX + ADDB CX, CX + ORB 0(AX)(R9*1), CX + MOVBLZX 6(DI), AX + MOVB 0(AX)(R9*1), R10 + SHLB $2, R10 + ORB CX, R10 + MOVBLZX 7(DI), AX + MOVB 0(AX)(R9*1), AX + SHLB $3, AX + ORB R10, AX + JNE LBB5_122 + ADDQ $8, BX + ADDQ $8, DI + ADDQ $-8, SI + +LBB5_104: + CMPQ SI, $4 + JL LBB5_107 + MOVBLZX 0(DI), AX + MOVBLZX 1(DI), CX + MOVB 0(CX)(R9*1), CX + ADDB CX, CX + ORB 0(AX)(R9*1), CX + MOVBLZX 2(DI), AX + MOVB 0(AX)(R9*1), R10 + SHLB $2, R10 + ORB CX, R10 + MOVBLZX 3(DI), AX + MOVB 0(AX)(R9*1), AX + SHLB $3, AX + ORB R10, AX + MOVL 0(DI), CX + MOVL CX, 0(BX) + JNE LBB5_121 + ADDQ $4, BX + ADDQ $4, DI + ADDQ $-4, SI + +LBB5_107: + TESTQ SI, SI + JLE LBB5_123 + +LBB5_108: + MOVBLZX 0(DI), AX + CMPB 0(AX)(R9*1), $0 + JNE LBB5_118 + ADDQ $1, DI + MOVB AX, 0(BX) + ADDQ $1, BX + LEAQ -1(SI), AX + CMPQ SI, $1 + MOVQ AX, SI + JG LBB5_108 + JMP LBB5_123 -LBB5_85: - SUBQ DX, R8 - MOVQ R8, 0(R15) - NOTQ AX - ADDQ DI, AX +LBB5_95: + CMPQ SI, $32 + JL LBB5_110 + MOVQ SI, CX -LBB5_86: - ADDQ $16, SP +LBB5_97: + LONG $0x076ffec5 // vmovdqu (%rdi), %ymm0 + LONG $0xc864adc5 // vpcmpgtb %ymm0, %ymm10, %ymm1 + LONG $0xd074b5c5 // vpcmpeqb %ymm0, %ymm9, %ymm2 + LONG $0xd874a5c5 // vpcmpeqb %ymm0, %ymm11, %ymm3 + LONG $0xd2ebe5c5 // vpor %ymm2, %ymm3, %ymm2 + LONG $0x037ffec5 // vmovdqu %ymm0, (%rbx) + LONG $0x647dc1c4; BYTE $0xc0 // vpcmpgtb %ymm8, %ymm0, %ymm0 + LONG $0xc0dbf5c5 // vpand %ymm0, %ymm1, %ymm0 + LONG $0xc0ebedc5 // vpor %ymm0, %ymm2, %ymm0 + LONG $0xc0d7fdc5 // vpmovmskb %ymm0, %eax + TESTL AX, AX + JNE LBB5_116 + ADDQ $32, DI + ADDQ $32, BX + LEAQ -32(CX), SI + CMPQ CX, $63 + MOVQ SI, CX + JG LBB5_97 + WORD $0xf8c5; BYTE $0x77 // vzeroupper + LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 + QUAD $0xfffff75f1d6f7ec5 // vmovdqu $-2209(%rip), %ymm11 /* LCPI5_2(%rip) */ + QUAD $0xfffff7370d6f7ec5 // vmovdqu $-2249(%rip), %ymm9 /* LCPI5_1(%rip) */ + QUAD $0xfffff70f156f7ec5 // vmovdqu $-2289(%rip), %ymm10 /* LCPI5_0(%rip) */ + LEAQ 32(SI), AX + CMPQ AX, $48 + QUAD $0xfffff75f3d6ffac5 // vmovdqu $-2209(%rip), %xmm7 /* LCPI5_3(%rip) */ + QUAD $0xfffff7672d6ffac5 // vmovdqu $-2201(%rip), %xmm5 /* LCPI5_4(%rip) */ + QUAD $0xfffff76f256ffac5 // vmovdqu $-2193(%rip), %xmm4 /* LCPI5_5(%rip) */ + LONG $0xf676c9c5 // vpcmpeqd %xmm6, %xmm6, %xmm6 + JGE LBB5_111 + JMP LBB5_100 + +LBB5_110: + WORD $0xf8c5; BYTE $0x77 // vzeroupper + LONG $0x763d41c4; BYTE $0xc0 // vpcmpeqd %ymm8, %ymm8, %ymm8 + QUAD $0xfffff7141d6f7ec5 // vmovdqu $-2284(%rip), %ymm11 /* LCPI5_2(%rip) */ + QUAD $0xfffff6ec0d6f7ec5 // vmovdqu $-2324(%rip), %ymm9 /* LCPI5_1(%rip) */ + QUAD $0xfffff6c4156f7ec5 // vmovdqu $-2364(%rip), %ymm10 /* LCPI5_0(%rip) */ + QUAD $0xfffff71c3d6ffac5 // vmovdqu $-2276(%rip), %xmm7 /* LCPI5_3(%rip) */ + QUAD $0xfffff7242d6ffac5 // vmovdqu $-2268(%rip), %xmm5 /* LCPI5_4(%rip) */ + QUAD $0xfffff72c256ffac5 // vmovdqu $-2260(%rip), %xmm4 /* LCPI5_5(%rip) */ + LONG $0xf676c9c5 // vpcmpeqd %xmm6, %xmm6, %xmm6 + +LBB5_111: + MOVQ SI, CX + NEGQ CX + ADDQ $16, SI + +LBB5_112: + LONG $0x076ffac5 // vmovdqu (%rdi), %xmm0 + LONG $0xc864c1c5 // vpcmpgtb %xmm0, %xmm7, %xmm1 + LONG $0xd574f9c5 // vpcmpeqb %xmm5, %xmm0, %xmm2 + LONG $0xdc74f9c5 // vpcmpeqb %xmm4, %xmm0, %xmm3 + LONG $0xd2ebe1c5 // vpor %xmm2, %xmm3, %xmm2 + LONG $0x037ffac5 // vmovdqu %xmm0, (%rbx) + LONG $0xc664f9c5 // vpcmpgtb %xmm6, %xmm0, %xmm0 + LONG $0xc0dbf1c5 // vpand %xmm0, %xmm1, %xmm0 + LONG $0xc0ebe9c5 // vpor %xmm0, %xmm2, %xmm0 + LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax + TESTL AX, AX + JNE LBB5_115 + ADDQ $16, DI + ADDQ $16, BX + ADDQ $16, CX + ADDQ $-16, SI + CMPQ SI, $31 + JG LBB5_112 + NEGQ CX + MOVQ CX, SI + CMPQ SI, $8 + JGE LBB5_101 + JMP LBB5_104 + +LBB5_115: + BSFW AX, AX + MOVWLZX AX, AX + ADDQ AX, DI + ADDQ AX, CX + NEGQ CX + JMP LBB5_117 + +LBB5_116: + BSFL AX, AX + ADDQ AX, DI + SUBQ AX, CX + +LBB5_117: + ADDQ AX, BX + MOVQ CX, SI + +LBB5_118: + MOVB 0(DI), AX + +LBB5_119: + MOVQ BX, CX + MOVBLZX AX, AX + SHLQ $4, AX + MOVLQSX 0(R8)(AX*1), BX + MOVQ 8(R8)(AX*1), AX + MOVQ AX, 0(CX) + ADDQ CX, BX + CMPQ SI, $2 + JL LBB5_123 + MOVBLZX 1(DI), AX + ADDQ $1, DI + ADDQ $-1, SI + CMPB 0(AX)(R9*1), $0 + JNE LBB5_119 + JMP LBB5_94 + +LBB5_121: + MOVBLZX AX, AX + BSFL AX, AX + ADDQ AX, DI + SUBQ AX, SI + ADDQ AX, BX + JMP LBB5_118 + +LBB5_122: + MOVBLZX AX, AX + BSFL AX, AX + LEAQ 4(AX), CX + ADDQ AX, DI + ADDQ $4, DI + SUBQ CX, SI + ADDQ AX, BX + ADDQ $4, BX + JMP LBB5_118 + +LBB5_123: + SUBQ DX, BX + MOVQ BX, 0(R15) + JMP LBB5_127 + +LBB5_124: + ADDQ R13, R11 + MOVQ -64(BP), DX + +LBB5_125: + SUBQ DX, R9 + MOVQ R9, 0(R15) + SUBQ DI, R11 + MOVQ R11, R14 + JMP LBB5_127 + +LBB5_126: + SUBQ -64(BP), R9 + MOVQ R9, 0(R15) + SUBQ R11, DI + NOTQ R13 + ADDQ DI, R13 + MOVQ R13, R14 + +LBB5_127: + MOVQ R14, AX + ADDQ $24, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -2113,15 +2284,16 @@ LBB5_86: WORD $0xf8c5; BYTE $0x77 // vzeroupper RET -LBB5_87: - SUBQ DX, R8 - NOTQ R9 - ADDQ R9, R8 - MOVQ R8, 0(R15) - SUBQ DI, AX - ADDQ R9, AX +LBB5_128: + MOVQ -64(BP), AX + ADDQ R13, AX NOTQ AX - JMP LBB5_86 + ADDQ R9, AX + MOVQ AX, 0(R15) + SUBQ R11, DI + ADDQ R13, DI + MOVQ DI, R14 + JMP LBB5_127 LCPI6_0: QUAD $0x5c5c5c5c5c5c5c5c; QUAD $0x5c5c5c5c5c5c5c5c // .space 16, '\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' @@ -2140,693 +2312,886 @@ _unquote: BYTE $0x53 // pushq %rbx SUBQ $24, SP TESTQ SI, SI - JE LBB6_2 + JE LBB6_125 MOVQ CX, -48(BP) - MOVL R8, R10 - ANDL $1, R10 - QUAD $0xffffffa70d6ffec5 // vmovdqu $-89(%rip), %ymm1 /* LCPI6_0(%rip) */ - QUAD $0xffffffbf156ffac5 // vmovdqu $-65(%rip), %xmm2 /* LCPI6_1(%rip) */ - MOVQ DI, R9 + MOVQ R8, AX + MOVQ R8, -64(BP) + MOVL R8, R9 + ANDL $1, R9 + QUAD $0xffffff9c0d6ffec5 // vmovdqu $-100(%rip), %ymm1 /* LCPI6_0(%rip) */ + QUAD $0xffffffb4156ffac5 // vmovdqu $-76(%rip), %xmm2 /* LCPI6_1(%rip) */ + MOVQ DI, R11 MOVQ SI, R13 - MOVQ DX, AX - JMP LBB6_8 - -LBB6_2: - XORL R13, R13 - MOVQ DX, AX - -LBB6_3: - ADDQ R13, AX - SUBQ DX, AX - -LBB6_4: - ADDQ $24, SP - BYTE $0x5b // popq %rbx - WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 - WORD $0x5e41 // popq %r14 - WORD $0x5f41 // popq %r15 - BYTE $0x5d // popq %rbp - WORD $0xf8c5; BYTE $0x77 // vzeroupper - RET - -LBB6_5: - LEAQ 4(R9)(R12*1), R9 - MOVL R15, CX - SHRL $6, CX - ORB $-64, CX - MOVB CX, 0(AX) - ANDB $63, R15 - ORB $-128, R15 - MOVB R15, 1(AX) - ADDQ $2, AX - -LBB6_6: - MOVQ R14, R13 - -LBB6_7: - TESTQ R13, R13 - JE LBB6_101 + MOVQ DX, R8 -LBB6_8: - CMPB 0(R9), $92 - JNE LBB6_10 - XORL BX, BX - JMP LBB6_24 +LBB6_22: + CMPB 0(R11), $92 + JNE LBB6_24 + XORL R14, R14 + JMP LBB6_41 -LBB6_10: - MOVQ R13, R12 - MOVQ AX, R15 - MOVQ R9, R14 +LBB6_24: + MOVQ R13, R15 + MOVQ R8, AX + MOVQ R11, R14 CMPQ R13, $32 - JL LBB6_14 - MOVQ R9, R14 - MOVQ AX, R15 - MOVQ R13, R12 + JL LBB6_29 + XORL AX, AX + MOVQ R13, BX -LBB6_12: - LONG $0x6f7ec1c4; BYTE $0x06 // vmovdqu (%r14), %ymm0 - LONG $0x7f7ec1c4; BYTE $0x07 // vmovdqu %ymm0, (%r15) - LONG $0xc174fdc5 // vpcmpeqb %ymm1, %ymm0, %ymm0 - LONG $0xd8d7fdc5 // vpmovmskb %ymm0, %ebx - TESTL BX, BX - JNE LBB6_22 - ADDQ $32, R14 - ADDQ $32, R15 - CMPQ R12, $63 - LEAQ -32(R12), R12 - JG LBB6_12 +LBB6_26: + LONG $0x6f7ec1c4; WORD $0x0304 // vmovdqu (%r11,%rax), %ymm0 + LONG $0x7f7ec1c4; WORD $0x0004 // vmovdqu %ymm0, (%r8,%rax) + LONG $0xc174fdc5 // vpcmpeqb %ymm1, %ymm0, %ymm0 + LONG $0xc8d7fdc5 // vpmovmskb %ymm0, %ecx + TESTL CX, CX + JNE LBB6_39 + LEAQ -32(BX), R15 + ADDQ $32, AX + CMPQ BX, $63 + MOVQ R15, BX + JG LBB6_26 + LEAQ 0(R11)(AX*1), R14 + ADDQ R8, AX -LBB6_14: +LBB6_29: WORD $0xf8c5; BYTE $0x77 // vzeroupper - QUAD $0xffffff0d156ffac5 // vmovdqu $-243(%rip), %xmm2 /* LCPI6_1(%rip) */ - CMPQ R12, $16 - JL LBB6_17 - -LBB6_15: + CMPQ R15, $16 + JL LBB6_33 + MOVQ R11, R12 + SUBQ R14, R12 + QUAD $0xffffff1e0d6ffec5 // vmovdqu $-226(%rip), %ymm1 /* LCPI6_0(%rip) */ + QUAD $0xffffff36156ffac5 // vmovdqu $-202(%rip), %xmm2 /* LCPI6_1(%rip) */ + +LBB6_31: LONG $0x6f7ac1c4; BYTE $0x06 // vmovdqu (%r14), %xmm0 - LONG $0x7f7ac1c4; BYTE $0x07 // vmovdqu %xmm0, (%r15) + LONG $0x007ffac5 // vmovdqu %xmm0, (%rax) LONG $0xc274f9c5 // vpcmpeqb %xmm2, %xmm0, %xmm0 LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx - TESTW BX, BX - JNE LBB6_23 + TESTL BX, BX + JNE LBB6_40 ADDQ $16, R14 - ADDQ $16, R15 - CMPQ R12, $31 - LEAQ -16(R12), R12 - JG LBB6_15 + ADDQ $16, AX + LEAQ -16(R15), R10 + ADDQ $-16, R12 + CMPQ R15, $31 + MOVQ R10, R15 + JG LBB6_31 + JMP LBB6_34 + +LBB6_33: + MOVQ R15, R10 + QUAD $0xfffffedb0d6ffec5 // vmovdqu $-293(%rip), %ymm1 /* LCPI6_0(%rip) */ + QUAD $0xfffffef3156ffac5 // vmovdqu $-269(%rip), %xmm2 /* LCPI6_1(%rip) */ + +LBB6_34: + TESTQ R10, R10 + JE LBB6_126 + XORL BX, BX -LBB6_17: - TESTQ R12, R12 - JE LBB6_3 - XORL BX, BX - QUAD $0xfffffeaa0d6ffec5 // vmovdqu $-342(%rip), %ymm1 /* LCPI6_0(%rip) */ - -LBB6_19: - MOVBLZX 0(R14)(BX*1), R11 - CMPB R11, $92 - JE LBB6_21 - MOVB R11, 0(R15)(BX*1) - INCQ BX - CMPQ R12, BX - JNE LBB6_19 - JMP LBB6_3 - -LBB6_21: +LBB6_36: + MOVBLZX 0(R14)(BX*1), CX + CMPB CX, $92 + JE LBB6_38 + MOVB CX, 0(AX)(BX*1) + ADDQ $1, BX + CMPQ R10, BX + JNE LBB6_36 + JMP LBB6_126 + +LBB6_38: + SUBQ R11, R14 ADDQ BX, R14 - SUBQ R9, R14 - MOVQ R14, BX - CMPQ BX, $-1 - JNE LBB6_24 - JMP LBB6_3 - -LBB6_22: - MOVLQSX BX, BX - SUBQ R9, R14 - BSFQ BX, BX - ADDQ R14, BX - CMPQ BX, $-1 - JNE LBB6_24 - JMP LBB6_3 - -LBB6_23: - MOVWLZX BX, BX - SUBQ R9, R14 - BSFQ BX, BX - ADDQ R14, BX - QUAD $0xfffffe4d0d6ffec5 // vmovdqu $-435(%rip), %ymm1 /* LCPI6_0(%rip) */ - CMPQ BX, $-1 - JE LBB6_3 - -LBB6_24: - LEAQ 2(BX), CX - SUBQ CX, R13 - JS LBB6_99 - LEAQ 2(R9)(BX*1), R9 - TESTQ R10, R10 - JNE LBB6_67 + CMPQ R14, $-1 + JNE LBB6_41 + JMP LBB6_126 -LBB6_26: - ADDQ BX, AX - MOVBLZX -1(R9), CX - LONG $0x551d8d48; WORD $0x00c1; BYTE $0x00 // leaq $49493(%rip), %rbx /* __UnquoteTab(%rip) */ - MOVB 0(CX)(BX*1), BX - CMPB BX, $-1 - JE LBB6_29 - TESTB BX, BX - JE LBB6_100 - MOVB BX, 0(AX) - INCQ AX - JMP LBB6_7 +LBB6_39: + BSFL CX, R14 + ADDQ AX, R14 + CMPQ R14, $-1 + JNE LBB6_41 + JMP LBB6_126 -LBB6_29: +LBB6_40: + BSFW BX, AX + MOVWLZX AX, R14 + SUBQ R12, R14 + CMPQ R14, $-1 + JE LBB6_126 + +LBB6_41: + LEAQ 2(R14), AX + SUBQ AX, R13 + JS LBB6_164 + ADDQ R14, R11 + ADDQ $2, R11 + TESTQ R9, R9 + JNE LBB6_55 + +LBB6_43: + ADDQ R14, R8 + MOVBLZX -1(R11), AX + LONG $0xca0d8d48; WORD $0x00bf; BYTE $0x00 // leaq $49098(%rip), %rcx /* __UnquoteTab(%rip) */ + MOVB 0(AX)(CX*1), AX + CMPB AX, $-1 + JE LBB6_46 + TESTB AX, AX + JE LBB6_142 + MOVB AX, 0(R8) + ADDQ $1, R8 + TESTQ R13, R13 + JNE LBB6_22 + JMP LBB6_141 + +LBB6_46: CMPQ R13, $3 - JLE LBB6_99 - MOVL 0(R9), R14 - MOVL R14, R15 - NOTL R15 - LEAL -808464432(R14), CX - ANDL $-2139062144, R15 - TESTL CX, R15 - JNE LBB6_90 - LEAL 421075225(R14), CX - ORL R14, CX - TESTL $-2139062144, CX - JNE LBB6_90 + JLE LBB6_164 + MOVL 0(R11), R14 MOVL R14, BX - ANDL $2139062143, BX - MOVL $-1061109568, CX - SUBL BX, CX - LEAL 1179010630(BX), R11 - ANDL R15, CX - TESTL R11, CX - JNE LBB6_90 - MOVL $-522133280, CX - SUBL BX, CX - ADDL $960051513, BX - ANDL CX, R15 - TESTL BX, R15 - JNE LBB6_90 - BSWAPL R14 - MOVL R14, CX - SHRL $4, CX - NOTL CX - ANDL $16843009, CX - LEAL 0(CX)(CX*8), CX - ANDL $252645135, R14 - ADDL CX, R14 - MOVL R14, CX - SHRL $4, CX - ORL R14, CX - MOVBLZX CX, R15 - SHRL $8, CX - ANDL $65280, CX - ORL CX, R15 - LEAQ -4(R13), R14 - CMPL R15, $128 - JB LBB6_75 - XORL R12, R12 - TESTQ R10, R10 - JE LBB6_51 - -LBB6_36: - CMPL R15, $2048 - JB LBB6_5 - MOVL R15, CX - ANDL $-2048, CX - CMPL CX, $55296 - JNE LBB6_65 - TESTQ R14, R14 - JLE LBB6_80 - CMPB 4(R9)(R12*1), $92 - JNE LBB6_81 - CMPL R15, $56319 - JA LBB6_78 - CMPQ R14, $7 - JL LBB6_78 - CMPB 5(R9)(R12*1), $92 - JNE LBB6_78 - CMPB 6(R9)(R12*1), $117 - JNE LBB6_78 - MOVL 7(R9)(R12*1), R11 - MOVL R11, BX NOTL BX - LEAL -808464432(R11), CX + LEAL -808464432(R14), AX ANDL $-2139062144, BX - MOVL BX, -52(BP) - TESTL CX, BX - JNE LBB6_104 - LEAL 421075225(R11), CX - ORL R11, CX - TESTL $-2139062144, CX - JNE LBB6_104 - MOVL R11, BX - ANDL $2139062143, BX + TESTL AX, BX + JNE LBB6_128 + LEAL 421075225(R14), AX + ORL R14, AX + TESTL $-2139062144, AX + JNE LBB6_128 + MOVL R14, AX + ANDL $2139062143, AX MOVL $-1061109568, CX - SUBL BX, CX - MOVQ R11, -64(BP) - LEAL 1179010630(BX), R11 - ANDL -52(BP), CX - TESTL R11, CX - MOVQ -64(BP), R11 - JNE LBB6_104 + SUBL AX, CX + MOVQ DX, R15 + LEAL 1179010630(AX), DX + ANDL BX, CX + TESTL DX, CX + JNE LBB6_128 MOVL $-522133280, CX - SUBL BX, CX - MOVL CX, -56(BP) - ADDL $960051513, BX - MOVL -52(BP), CX - ANDL -56(BP), CX - TESTL BX, CX - JNE LBB6_104 - BSWAPL R11 - MOVL R11, CX - SHRL $4, CX - NOTL CX - ANDL $16843009, CX - LEAL 0(CX)(CX*8), CX - ANDL $252645135, R11 - ADDL CX, R11 - MOVL R11, CX - SHRL $4, CX - ORL R11, CX - MOVL CX, BX - SHRL $8, BX - ANDL $65280, BX - MOVBLZX CX, R11 - ORL BX, R11 - ANDL $16515072, CX - CMPL CX, $14417920 - JE LBB6_85 - TESTB $2, R8 - JE LBB6_119 - ADDQ $-7, R14 - MOVW $-16401, 0(AX) - MOVB $-67, 2(AX) - ADDQ $3, AX - ADDQ $7, R12 - MOVL R11, R15 - CMPL R11, $127 - JA LBB6_36 - JMP LBB6_64 - -LBB6_51: - CMPL R15, $2048 - JB LBB6_5 - MOVL R15, CX - ANDL $-2048, CX - CMPL CX, $55296 + SUBL AX, CX + ADDL $960051513, AX + ANDL CX, BX + TESTL AX, BX + JNE LBB6_128 + MOVQ R15, DX + BSWAPL R14 + MOVL R14, AX + SHRL $4, AX + NOTL AX + ANDL $16843009, AX + LEAL 0(AX)(AX*8), AX + ANDL $252645135, R14 + ADDL AX, R14 + MOVL R14, R10 + SHRL $4, R10 + ORL R14, R10 + MOVL R10, AX + SHRL $8, AX + ANDL $65280, AX + MOVBLZX R10, R14 + ORL AX, R14 + LEAQ 4(R11), R12 + LEAQ -4(R13), CX + CMPL R14, $128 + JB LBB6_63 + TESTQ R9, R9 JNE LBB6_65 - CMPL R15, $56319 - JA LBB6_77 - CMPQ R14, $6 - JL LBB6_77 - CMPB 4(R9)(R12*1), $92 - JNE LBB6_77 - CMPB 5(R9)(R12*1), $117 - JNE LBB6_77 - MOVL 6(R9)(R12*1), R11 - MOVL R11, BX - NOTL BX - LEAL -808464432(R11), CX - ANDL $-2139062144, BX - MOVL BX, -52(BP) - TESTL CX, BX - JNE LBB6_103 - LEAL 421075225(R11), CX - ORL R11, CX - TESTL $-2139062144, CX - JNE LBB6_103 - MOVL R11, BX - ANDL $2139062143, BX - MOVL $-1061109568, CX - SUBL BX, CX - MOVQ R11, -64(BP) - LEAL 1179010630(BX), R11 - ANDL -52(BP), CX - TESTL R11, CX - MOVQ -64(BP), R11 - JNE LBB6_103 - MOVL $-522133280, CX - SUBL BX, CX - MOVL CX, -56(BP) - ADDL $960051513, BX - MOVL -52(BP), CX - ANDL -56(BP), CX - TESTL BX, CX - JNE LBB6_103 - BSWAPL R11 - MOVL R11, CX - SHRL $4, CX - NOTL CX - ANDL $16843009, CX - LEAL 0(CX)(CX*8), CX - ANDL $252645135, R11 - ADDL CX, R11 - MOVL R11, CX - SHRL $4, CX - ORL R11, CX - MOVL CX, BX - SHRL $8, BX - ANDL $65280, BX - MOVBLZX CX, R11 - ORL BX, R11 - ANDL $16515072, CX - CMPL CX, $14417920 - JE LBB6_84 - TESTB $2, R8 - JE LBB6_118 - ADDQ $-6, R14 - MOVW $-16401, 0(AX) - MOVB $-67, 2(AX) - ADDQ $3, AX - ADDQ $6, R12 - MOVL R11, R15 - CMPL R11, $128 - JAE LBB6_51 + TESTB $2, -64(BP) + JE LBB6_86 + XORL BX, BX -LBB6_64: - LEAQ 4(R9)(R12*1), R9 - MOVL R11, R15 - JMP LBB6_76 - -LBB6_65: - LEAQ 4(R9)(R12*1), R9 - MOVL R15, CX - SHRL $12, CX - ORB $-32, CX - MOVB CX, 0(AX) - MOVL R15, CX - SHRL $6, CX - ANDB $63, CX - ORB $-128, CX - MOVB CX, 1(AX) - ANDB $63, R15 - ORB $-128, R15 - MOVB R15, 2(AX) +LBB6_2: + CMPL R14, $2048 + JB LBB6_84 + MOVL R14, AX + ANDL $-2048, AX + CMPL AX, $55296 + JNE LBB6_81 + CMPQ CX, $6 + JL LBB6_15 + CMPL R14, $56319 + JA LBB6_15 + CMPB 4(R11)(BX*1), $92 + JNE LBB6_15 + CMPB 5(R11)(BX*1), $117 + JNE LBB6_15 + MOVL 6(R11)(BX*1), R12 + MOVL R12, R10 + NOTL R10 + LEAL -808464432(R12), AX + ANDL $-2139062144, R10 + TESTL AX, R10 + JNE LBB6_143 + LEAL 421075225(R12), AX + ORL R12, AX + TESTL $-2139062144, AX + JNE LBB6_143 + MOVL R12, AX + ANDL $2139062143, AX + MOVL $-1061109568, DX + SUBL AX, DX + MOVL DX, -52(BP) + LEAL 1179010630(AX), DX + MOVL DX, -56(BP) + MOVL -52(BP), DX + ANDL R10, DX + TESTL DX, -56(BP) + JNE LBB6_143 + MOVL $-522133280, DX + SUBL AX, DX + ADDL $960051513, AX + ANDL DX, R10 + TESTL AX, R10 + JNE LBB6_143 + BSWAPL R12 + MOVL R12, AX + SHRL $4, AX + NOTL AX + ANDL $16843009, AX + LEAL 0(AX)(AX*8), AX + ANDL $252645135, R12 + ADDL AX, R12 + MOVL R12, AX + SHRL $4, AX + ORL R12, AX + MOVL AX, DX + SHRL $8, DX + ANDL $65280, DX + MOVBLZX AX, R10 + ORL DX, R10 + ANDL $16515072, AX + CMPL AX, $14417920 + JE LBB6_18 + MOVQ R15, DX + MOVW $-16401, 0(R8) + MOVB $-67, 2(R8) + ADDQ $3, R8 + ADDQ $6, BX + ADDQ $-6, CX + MOVL R10, R14 + CMPL R10, $127 + JA LBB6_2 -LBB6_66: - ADDQ $3, AX - JMP LBB6_6 +LBB6_14: + LEAQ 0(R11)(BX*1), R12 + ADDQ $4, R12 + JMP LBB6_64 -LBB6_67: +LBB6_55: TESTL R13, R13 - JE LBB6_99 - CMPB -1(R9), $92 - JNE LBB6_102 - CMPB 0(R9), $92 - JNE LBB6_74 + JE LBB6_164 + CMPB -1(R11), $92 + JNE LBB6_156 + CMPB 0(R11), $92 + JNE LBB6_62 CMPL R13, $1 - JLE LBB6_99 - MOVB 1(R9), R11 - CMPB R11, $34 - JE LBB6_73 - CMPB R11, $92 - JNE LBB6_114 - -LBB6_73: - INCQ R9 - DECQ R13 - -LBB6_74: - INCQ R9 - DECQ R13 - JMP LBB6_26 - -LBB6_75: - ADDQ $4, R9 - -LBB6_76: - MOVB R15, 0(AX) - INCQ AX - JMP LBB6_6 - -LBB6_77: - LEAQ 4(R9)(R12*1), R9 - JMP LBB6_79 - -LBB6_78: - LEAQ 5(R9)(R12*1), R9 - SUBQ R12, R13 - ADDQ $-5, R13 - MOVQ R13, R14 + JLE LBB6_164 + MOVB 1(R11), AX + CMPB AX, $34 + JE LBB6_61 + CMPB AX, $92 + JNE LBB6_158 + +LBB6_61: + ADDQ $1, R11 + ADDQ $-1, R13 + +LBB6_62: + ADDQ $1, R11 + ADDQ $-1, R13 + JMP LBB6_43 -LBB6_79: - TESTB $2, R8 - JNE LBB6_83 - JMP LBB6_121 +LBB6_63: + MOVL R14, R10 -LBB6_80: - TESTB $2, R8 - JNE LBB6_82 - JMP LBB6_99 +LBB6_64: + MOVB R10, 0(R8) + ADDQ $1, R8 + MOVQ CX, R13 + MOVQ R12, R11 + TESTQ R13, R13 + JNE LBB6_22 + JMP LBB6_141 + +LBB6_65: + TESTB $2, -64(BP) + JE LBB6_98 + XORL BX, BX + +LBB6_67: + CMPL R14, $2048 + JB LBB6_84 + MOVL R14, AX + ANDL $-2048, AX + CMPL AX, $55296 + JNE LBB6_81 + TESTQ CX, CX + JLE LBB6_162 + CMPB 4(R11)(BX*1), $92 + JNE LBB6_107 + CMPQ CX, $7 + JL LBB6_105 + CMPL R14, $56319 + JA LBB6_105 + CMPB 5(R11)(BX*1), $92 + JNE LBB6_105 + CMPB 6(R11)(BX*1), $117 + JNE LBB6_105 + MOVL 7(R11)(BX*1), R12 + MOVL R12, R10 + NOTL R10 + LEAL -808464432(R12), AX + ANDL $-2139062144, R10 + TESTL AX, R10 + JNE LBB6_159 + LEAL 421075225(R12), AX + ORL R12, AX + TESTL $-2139062144, AX + JNE LBB6_159 + MOVL R12, AX + ANDL $2139062143, AX + MOVL $-1061109568, DX + SUBL AX, DX + MOVL DX, -52(BP) + LEAL 1179010630(AX), DX + MOVL DX, -56(BP) + MOVL -52(BP), DX + ANDL R10, DX + TESTL DX, -56(BP) + JNE LBB6_159 + MOVL $-522133280, DX + SUBL AX, DX + ADDL $960051513, AX + ANDL DX, R10 + TESTL AX, R10 + JNE LBB6_159 + BSWAPL R12 + MOVL R12, AX + SHRL $4, AX + NOTL AX + ANDL $16843009, AX + LEAL 0(AX)(AX*8), AX + ANDL $252645135, R12 + ADDL AX, R12 + MOVL R12, AX + SHRL $4, AX + ORL R12, AX + MOVL AX, DX + SHRL $8, DX + ANDL $65280, DX + MOVBLZX AX, R10 + ORL DX, R10 + ANDL $16515072, AX + CMPL AX, $14417920 + JE LBB6_108 + MOVW $-16401, 0(R8) + MOVB $-67, 2(R8) + ADDQ $3, R8 + ADDQ $7, BX + ADDQ $-7, CX + MOVL R10, R14 + CMPL R10, $128 + MOVQ R15, DX + JAE LBB6_67 + JMP LBB6_14 LBB6_81: - TESTB $2, R8 - JE LBB6_122 + LEAQ 0(R11)(BX*1), R12 + ADDQ $4, R12 LBB6_82: - LEAQ 4(R9)(R12*1), R9 - -LBB6_83: - MOVW $-16401, 0(AX) - MOVB $-67, 2(AX) - JMP LBB6_66 + MOVL R14, AX + SHRL $12, AX + ORB $-32, AX + MOVB AX, 0(R8) + MOVL R14, AX + SHRL $6, AX + ANDB $63, AX + ORB $-128, AX + MOVB AX, 1(R8) + ANDB $63, R14 + ORB $-128, R14 + MOVB R14, 2(R8) + ADDQ $3, R8 + MOVQ CX, R13 + MOVQ R12, R11 + TESTQ R13, R13 + JNE LBB6_22 + JMP LBB6_141 LBB6_84: - LEAQ 10(R9)(R12*1), R9 - SUBQ R12, R13 - ADDQ $-10, R13 - JMP LBB6_86 + LEAQ 0(R11)(BX*1), R12 + ADDQ $4, R12 LBB6_85: - LEAQ 11(R9)(R12*1), R9 - SUBQ R12, R13 - ADDQ $-11, R13 + MOVL R14, AX + SHRL $6, AX + ORB $-64, AX + MOVB AX, 0(R8) + ANDB $63, R14 + ORB $-128, R14 + MOVB R14, 1(R8) + ADDQ $2, R8 + MOVQ CX, R13 + MOVQ R12, R11 + TESTQ R13, R13 + JNE LBB6_22 + JMP LBB6_141 LBB6_86: - SHLL $10, R15 - LEAL -56613888(R15)(R11*1), BX - CMPL BX, $1114112 - JB LBB6_89 - TESTB $2, R8 - JE LBB6_116 - MOVW $-16401, 0(AX) - MOVB $-67, 2(AX) - ADDQ $3, AX - JMP LBB6_7 - -LBB6_89: - MOVL BX, CX - SHRL $18, CX - ORB $-16, CX - MOVB CX, 0(AX) - MOVL BX, CX - SHRL $12, CX - ANDB $63, CX - ORB $-128, CX - MOVB CX, 1(AX) - MOVL BX, CX - SHRL $6, CX - ANDB $63, CX - ORB $-128, CX - MOVB CX, 2(AX) - ANDB $63, BX - ORB $-128, BX - MOVB BX, 3(AX) - ADDQ $4, AX - JMP LBB6_7 + CMPL R14, $2048 + JB LBB6_85 + ANDL $16252928, R10 + CMPL R10, $14155776 + JNE LBB6_82 + CMPQ R13, $10 + JL LBB6_101 + CMPL R14, $56319 + JA LBB6_101 + CMPB 0(R12), $92 + JNE LBB6_101 + CMPB 5(R11), $117 + JNE LBB6_101 + MOVL 6(R11), R10 + MOVL R10, BX + NOTL BX + LEAL -808464432(R10), AX + ANDL $-2139062144, BX + TESTL AX, BX + JNE LBB6_144 + LEAL 421075225(R10), AX + ORL R10, AX + TESTL $-2139062144, AX + JNE LBB6_144 + MOVL R10, AX + ANDL $2139062143, AX + MOVL $-1061109568, CX + SUBL AX, CX + LEAL 1179010630(AX), DX + ANDL BX, CX + TESTL DX, CX + JNE LBB6_144 + MOVL $-522133280, CX + SUBL AX, CX + ADDL $960051513, AX + ANDL CX, BX + TESTL AX, BX + JNE LBB6_144 + BSWAPL R10 + MOVL R10, AX + SHRL $4, AX + NOTL AX + ANDL $16843009, AX + LEAL 0(AX)(AX*8), AX + ANDL $252645135, R10 + ADDL AX, R10 + MOVL R10, AX + SHRL $4, AX + ORL R10, AX + ADDQ $10, R11 + MOVL AX, CX + ANDL $16515072, CX + CMPL CX, $14417920 + JNE LBB6_124 + MOVL AX, CX + SHRL $8, CX + ANDL $65280, CX + MOVBLZX AX, R10 + ORL CX, R10 + JMP LBB6_19 -LBB6_90: - MOVQ R9, DX - SUBQ DI, DX - MOVQ -48(BP), DI - MOVQ DX, 0(DI) - MOVB 0(R9), SI - LEAL -48(SI), CX - MOVQ $-2, AX - CMPB CX, $10 - JB LBB6_92 - ANDB $-33, SI - ADDB $-65, SI - CMPB SI, $5 - JA LBB6_4 - -LBB6_92: - LEAQ 1(DX), CX - MOVQ CX, 0(DI) - MOVB 1(R9), SI - LEAL -48(SI), CX - CMPB CX, $10 - JB LBB6_94 - ANDB $-33, SI - ADDB $-65, SI - CMPB SI, $5 - JA LBB6_4 - -LBB6_94: - LEAQ 2(DX), CX - MOVQ CX, 0(DI) - MOVB 2(R9), SI - LEAL -48(SI), CX - CMPB CX, $10 - JB LBB6_96 - ANDB $-33, SI - ADDB $-65, SI - CMPB SI, $5 - JA LBB6_4 - -LBB6_96: - LEAQ 3(DX), CX - MOVQ CX, 0(DI) - MOVB 3(R9), SI - LEAL -48(SI), CX - CMPB CX, $10 - JB LBB6_98 - ANDB $-33, SI - ADDB $-65, SI - CMPB SI, $5 - JA LBB6_4 +LBB6_15: + ADDQ BX, R11 + ADDQ $4, R11 -LBB6_98: - ADDQ $4, DX - MOVQ DX, 0(DI) - JMP LBB6_4 +LBB6_16: + TESTB $2, -64(BP) + JE LBB6_160 -LBB6_99: - MOVQ -48(BP), AX - MOVQ SI, 0(AX) - MOVQ $-1, AX - JMP LBB6_4 +LBB6_17: + MOVW $-16401, 0(R8) + MOVB $-67, 2(R8) + ADDQ $3, R8 + MOVQ CX, R13 + JMP LBB6_21 -LBB6_100: - NOTQ DI - ADDQ DI, R9 - MOVQ -48(BP), AX - MOVQ R9, 0(AX) - MOVQ $-3, AX - JMP LBB6_4 +LBB6_18: + ADDQ BX, R11 + ADDQ $10, R11 + SUBQ BX, R13 -LBB6_101: - XORL R13, R13 - JMP LBB6_3 +LBB6_19: + ADDQ $-10, R13 -LBB6_102: - NOTQ DI - ADDQ DI, R9 - JMP LBB6_115 +LBB6_20: + SHLL $10, R14 + MOVL R10, AX + ADDL R14, AX + LEAL 0(R10)(R14*1), CX + ADDL $-56613888, CX + MOVL CX, DX + SHRL $18, DX + ORB $-16, DX + MOVB DX, 0(R8) + MOVL CX, DX + SHRL $12, DX + ANDB $63, DX + ORB $-128, DX + MOVB DX, 1(R8) + SHRL $6, CX + ANDB $63, CX + ORB $-128, CX + MOVB CX, 2(R8) + ANDB $63, AX + ORB $-128, AX + MOVB AX, 3(R8) + ADDQ $4, R8 -LBB6_103: - LEAQ 4(R9)(R12*1), SI - JMP LBB6_105 +LBB6_21: + MOVQ R15, DX + TESTQ R13, R13 + JNE LBB6_22 + JMP LBB6_141 -LBB6_104: - LEAQ 5(R9)(R12*1), SI +LBB6_98: + CMPL R14, $2048 + JB LBB6_110 + ANDL $16252928, R10 + CMPL R10, $14155776 + JE LBB6_111 + MOVQ R15, DX + JMP LBB6_82 + +LBB6_101: + MOVQ R12, R11 + JMP LBB6_16 LBB6_105: - MOVQ SI, DX - SUBQ DI, DX - ADDQ $2, DX - MOVQ -48(BP), AX - MOVQ DX, 0(AX) - MOVB 2(SI), DI - LEAL -48(DI), CX - MOVQ $-2, AX - CMPB CX, $10 - JB LBB6_107 - ANDB $-33, DI - ADDB $-65, DI - CMPB DI, $5 - JA LBB6_4 + LEAQ 0(R11)(BX*1), R12 + ADDQ $5, R12 + +LBB6_106: + ADDQ $-1, CX + MOVQ R12, R11 + JMP LBB6_16 LBB6_107: - LEAQ 1(DX), CX - MOVQ -48(BP), DI - MOVQ CX, 0(DI) - MOVB 3(SI), DI - LEAL -48(DI), CX - CMPB CX, $10 - JB LBB6_109 - ANDB $-33, DI - ADDB $-65, DI - CMPB DI, $5 - JA LBB6_4 + ADDQ BX, R11 + ADDQ $4, R11 + TESTB $2, -64(BP) + JNE LBB6_17 + JMP LBB6_124 + +LBB6_108: + ADDQ BX, R11 + ADDQ $11, R11 + SUBQ BX, R13 LBB6_109: - LEAQ 2(DX), CX - MOVQ -48(BP), DI - MOVQ CX, 0(DI) - MOVB 4(SI), DI - LEAL -48(DI), CX - CMPB CX, $10 - JB LBB6_111 - ANDB $-33, DI - ADDB $-65, DI - CMPB DI, $5 - JA LBB6_4 + ADDQ $-11, R13 + JMP LBB6_20 + +LBB6_110: + MOVQ R15, DX + JMP LBB6_85 LBB6_111: - LEAQ 3(DX), CX - MOVQ -48(BP), DI - MOVQ CX, 0(DI) - MOVB 5(SI), SI - LEAL -48(SI), CX - CMPB CX, $10 - JB LBB6_113 - ANDB $-33, SI - ADDB $-65, SI - CMPB SI, $5 - JA LBB6_4 + CMPQ R13, $5 + JL LBB6_162 + CMPB 0(R12), $92 + JNE LBB6_123 + LEAQ 5(R11), R12 + CMPQ R13, $11 + JL LBB6_106 + CMPL R14, $56319 + JA LBB6_106 + CMPB 0(R12), $92 + JNE LBB6_106 + CMPB 6(R11), $117 + JNE LBB6_106 + MOVL 7(R11), R10 + MOVL R10, BX + NOTL BX + LEAL -808464432(R10), AX + ANDL $-2139062144, BX + TESTL AX, BX + JNE LBB6_144 + LEAL 421075225(R10), AX + ORL R10, AX + TESTL $-2139062144, AX + JNE LBB6_144 + MOVL R10, AX + ANDL $2139062143, AX + MOVL $-1061109568, CX + SUBL AX, CX + LEAL 1179010630(AX), DX + ANDL BX, CX + TESTL DX, CX + JNE LBB6_144 + MOVL $-522133280, CX + SUBL AX, CX + ADDL $960051513, AX + ANDL CX, BX + TESTL AX, BX + JNE LBB6_144 + BSWAPL R10 + MOVL R10, AX + SHRL $4, AX + NOTL AX + ANDL $16843009, AX + LEAL 0(AX)(AX*8), AX + ANDL $252645135, R10 + ADDL AX, R10 + MOVL R10, AX + SHRL $4, AX + ORL R10, AX + ADDQ $11, R11 + MOVL AX, CX + ANDL $16515072, CX + CMPL CX, $14417920 + JNE LBB6_124 + MOVL AX, CX + SHRL $8, CX + ANDL $65280, CX + MOVBLZX AX, R10 + ORL CX, R10 + JMP LBB6_109 -LBB6_113: - ADDQ $4, DX - MOVQ -48(BP), CX - MOVQ DX, 0(CX) - JMP LBB6_4 +LBB6_123: + MOVQ R12, R11 + TESTB $2, -64(BP) + JNE LBB6_17 + +LBB6_124: + SUBQ DI, R11 + JMP LBB6_161 + +LBB6_125: + XORL R13, R13 + MOVQ DX, R8 + +LBB6_126: + ADDQ R13, R8 + SUBQ DX, R8 + +LBB6_127: + MOVQ R8, AX + ADDQ $24, SP + BYTE $0x5b // popq %rbx + WORD $0x5c41 // popq %r12 + WORD $0x5d41 // popq %r13 + WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 + BYTE $0x5d // popq %rbp + WORD $0xf8c5; BYTE $0x77 // vzeroupper + RET + +LBB6_128: + MOVQ R11, AX + SUBQ DI, AX + MOVQ -48(BP), SI + MOVQ AX, 0(SI) + MOVB 0(R11), CX + LEAL -48(CX), DX + CMPB DX, $10 + JB LBB6_131 + MOVQ $-2, R8 + ADDB $-65, CX + CMPB CX, $37 + JA LBB6_127 + MOVBLZX CX, CX + MOVQ $270582939711, DX + BTQ CX, DX + JAE LBB6_127 -LBB6_114: - SUBQ DI, R9 - INCQ R9 +LBB6_131: + LEAQ 1(AX), CX + MOVQ CX, 0(SI) + MOVB 1(R11), CX + LEAL -48(CX), DX + CMPB DX, $9 + JBE LBB6_134 + MOVQ $-2, R8 + ADDB $-65, CX + CMPB CX, $37 + JA LBB6_127 + MOVBLZX CX, CX + MOVQ $270582939711, DX + BTQ CX, DX + JAE LBB6_127 -LBB6_115: +LBB6_134: + LEAQ 2(AX), CX + MOVQ CX, 0(SI) + MOVB 2(R11), CX + LEAL -48(CX), DX + CMPB DX, $10 + JB LBB6_137 + MOVQ $-2, R8 + ADDB $-65, CX + CMPB CX, $37 + JA LBB6_127 + MOVBLZX CX, CX + MOVQ $270582939711, DX + BTQ CX, DX + JAE LBB6_127 + +LBB6_137: + LEAQ 3(AX), CX + MOVQ CX, 0(SI) + MOVB 3(R11), CX + LEAL -48(CX), DX + CMPB DX, $10 + JB LBB6_140 + MOVQ $-2, R8 + ADDB $-65, CX + CMPB CX, $37 + JA LBB6_127 + MOVBLZX CX, CX + MOVQ $270582939711, DX + BTQ CX, DX + JAE LBB6_127 + +LBB6_140: + ADDQ $4, AX + MOVQ AX, 0(SI) + MOVQ $-2, R8 + JMP LBB6_127 + +LBB6_141: + XORL R13, R13 + JMP LBB6_126 + +LBB6_142: + NOTQ DI + ADDQ DI, R11 MOVQ -48(BP), AX - MOVQ R9, 0(AX) - MOVQ $-2, AX - JMP LBB6_4 + MOVQ R11, 0(AX) + MOVQ $-3, R8 + JMP LBB6_127 -LBB6_116: - SUBQ DI, R9 - ADDQ $-4, R9 +LBB6_143: + LEAQ 0(R11)(BX*1), R12 + ADDQ $4, R12 + +LBB6_144: + MOVQ R12, AX + SUBQ DI, AX + ADDQ $2, AX + MOVQ -48(BP), SI + MOVQ AX, 0(SI) + MOVB 2(R12), CX + LEAL -48(CX), DX + CMPB DX, $10 + JB LBB6_147 + MOVQ $-2, R8 + ADDB $-65, CX + CMPB CX, $37 + JA LBB6_127 + MOVBLZX CX, CX + MOVQ $270582939711, DX + BTQ CX, DX + MOVQ -48(BP), SI + JAE LBB6_127 -LBB6_117: +LBB6_147: + LEAQ 1(AX), CX + MOVQ CX, 0(SI) + MOVB 3(R12), CX + LEAL -48(CX), DX + CMPB DX, $9 + JBE LBB6_150 + MOVQ $-2, R8 + ADDB $-65, CX + CMPB CX, $37 + JA LBB6_127 + MOVBLZX CX, CX + MOVQ $270582939711, DX + BTQ CX, DX + MOVQ -48(BP), SI + JAE LBB6_127 + +LBB6_150: + LEAQ 2(AX), CX + MOVQ CX, 0(SI) + MOVB 4(R12), CX + LEAL -48(CX), DX + CMPB DX, $10 + JB LBB6_153 + MOVQ $-2, R8 + ADDB $-65, CX + CMPB CX, $37 + JA LBB6_127 + MOVBLZX CX, CX + MOVQ $270582939711, DX + BTQ CX, DX + MOVQ -48(BP), SI + JAE LBB6_127 + +LBB6_153: + LEAQ 3(AX), CX + MOVQ CX, 0(SI) + MOVB 5(R12), CX + LEAL -48(CX), DX + CMPB DX, $10 + JB LBB6_140 + MOVQ $-2, R8 + ADDB $-65, CX + CMPB CX, $37 + JA LBB6_127 + MOVBLZX CX, CX + MOVQ $270582939711, DX + BTQ CX, DX + MOVQ -48(BP), SI + JB LBB6_140 + JMP LBB6_127 + +LBB6_156: + NOTQ DI + ADDQ DI, R11 + +LBB6_157: MOVQ -48(BP), AX - MOVQ R9, 0(AX) - MOVQ $-4, AX - JMP LBB6_4 + MOVQ R11, 0(AX) + MOVQ $-2, R8 + JMP LBB6_127 -LBB6_118: - LEAQ 10(R9)(R12*1), AX - JMP LBB6_120 +LBB6_158: + SUBQ DI, R11 + ADDQ $1, R11 + JMP LBB6_157 -LBB6_119: - LEAQ 11(R9)(R12*1), AX +LBB6_159: + LEAQ 0(R11)(BX*1), R12 + ADDQ $5, R12 + JMP LBB6_144 -LBB6_120: - SUBQ DI, AX - ADDQ $-4, AX - MOVQ -48(BP), CX - MOVQ AX, 0(CX) - MOVQ $-4, AX - JMP LBB6_4 +LBB6_160: + ADDQ DI, R9 + SUBQ R9, R11 -LBB6_121: - LEAQ 4(R10)(DI*1), AX - SUBQ AX, R9 - JMP LBB6_117 +LBB6_161: + ADDQ $-4, R11 + MOVQ -48(BP), AX + MOVQ R11, 0(AX) + MOVQ $-4, R8 + JMP LBB6_127 + +LBB6_162: + TESTB $2, -64(BP) + JE LBB6_164 + MOVW $-16401, 0(R8) + MOVB $-67, 2(R8) + ADDQ $3, R8 + XORL R13, R13 + MOVQ R15, DX + JMP LBB6_126 -LBB6_122: - ADDQ R12, R9 - SUBQ DI, R9 - JMP LBB6_117 +LBB6_164: + MOVQ -48(BP), AX + MOVQ SI, 0(AX) + MOVQ $-1, R8 + JMP LBB6_127 LCPI7_0: QUAD $0x2626262626262626; QUAD $0x2626262626262626 // .space 16, '&&&&&&&&&&&&&&&&' @@ -2837,12 +3202,12 @@ LCPI7_1: QUAD $0xe2e2e2e2e2e2e2e2; QUAD $0xe2e2e2e2e2e2e2e2 // .space 16, '\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2' LCPI7_2: - QUAD $0x0202020202020202; QUAD $0x0202020202020202 // .space 16, '\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02' - QUAD $0x0202020202020202; QUAD $0x0202020202020202 // .space 16, '\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02' + QUAD $0xfdfdfdfdfdfdfdfd; QUAD $0xfdfdfdfdfdfdfdfd // .space 16, '\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd' + QUAD $0xfdfdfdfdfdfdfdfd; QUAD $0xfdfdfdfdfdfdfdfd // .space 16, '\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd' LCPI7_3: - QUAD $0x3e3e3e3e3e3e3e3e; QUAD $0x3e3e3e3e3e3e3e3e // .space 16, '>>>>>>>>>>>>>>>>' - QUAD $0x3e3e3e3e3e3e3e3e; QUAD $0x3e3e3e3e3e3e3e3e // .space 16, '>>>>>>>>>>>>>>>>' + QUAD $0x3c3c3c3c3c3c3c3c; QUAD $0x3c3c3c3c3c3c3c3c // .space 16, '<<<<<<<<<<<<<<<<' + QUAD $0x3c3c3c3c3c3c3c3c; QUAD $0x3c3c3c3c3c3c3c3c // .space 16, '<<<<<<<<<<<<<<<<' LCPI7_4: QUAD $0x2626262626262626; QUAD $0x2626262626262626 // .space 16, '&&&&&&&&&&&&&&&&' @@ -2851,10 +3216,10 @@ LCPI7_5: QUAD $0xe2e2e2e2e2e2e2e2; QUAD $0xe2e2e2e2e2e2e2e2 // .space 16, '\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2\xe2' LCPI7_6: - QUAD $0x0202020202020202; QUAD $0x0202020202020202 // .space 16, '\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02' + QUAD $0xfdfdfdfdfdfdfdfd; QUAD $0xfdfdfdfdfdfdfdfd // .space 16, '\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd\xfd' LCPI7_7: - QUAD $0x3e3e3e3e3e3e3e3e; QUAD $0x3e3e3e3e3e3e3e3e // .space 16, '>>>>>>>>>>>>>>>>' + QUAD $0x3c3c3c3c3c3c3c3c; QUAD $0x3c3c3c3c3c3c3c3c // .space 16, '<<<<<<<<<<<<<<<<' _html_escape: BYTE $0x55 // pushq %rbp @@ -2871,586 +3236,577 @@ _html_escape: MOVQ DI, -48(BP) MOVQ DI, AX TESTQ SI, SI - JLE LBB7_94 - MOVQ SI, R10 + JLE LBB7_106 MOVQ -64(BP), AX MOVQ 0(AX), R9 - QUAD $0xffffff021d6ffec5 // vmovdqu $-254(%rip), %ymm3 /* LCPI7_0(%rip) */ - QUAD $0xffffff1a256ffec5 // vmovdqu $-230(%rip), %ymm4 /* LCPI7_1(%rip) */ - QUAD $0xffffff322d6ffec5 // vmovdqu $-206(%rip), %ymm5 /* LCPI7_2(%rip) */ - QUAD $0xffffff4a356ffec5 // vmovdqu $-182(%rip), %ymm6 /* LCPI7_3(%rip) */ - LONG $0x18358d4c; WORD $0x00ba; BYTE $0x00 // leaq $47640(%rip), %r14 /* __HtmlQuoteTab(%rip) */ - MOVQ $12884901889, DI - MOVQ -48(BP), R12 + QUAD $0xffffff051d6ffec5 // vmovdqu $-251(%rip), %ymm3 /* LCPI7_0(%rip) */ + QUAD $0xffffff1d256ffec5 // vmovdqu $-227(%rip), %ymm4 /* LCPI7_1(%rip) */ + QUAD $0xffffff352d6ffec5 // vmovdqu $-203(%rip), %ymm5 /* LCPI7_2(%rip) */ + QUAD $0xffffff4d356ffec5 // vmovdqu $-179(%rip), %ymm6 /* LCPI7_3(%rip) */ + LONG $0x45358d4c; WORD $0x00b5; BYTE $0x00 // leaq $46405(%rip), %r14 /* __HtmlQuoteTab(%rip) */ + MOVQ -48(BP), R11 MOVQ -56(BP), R15 LBB7_2: TESTQ R9, R9 - JLE LBB7_96 - CMPQ R10, $31 - SETGT BX + JLE LBB7_3 + CMPQ SI, $32 + SETGE CX MOVQ R9, AX MOVQ R15, R8 - MOVQ R10, SI - MOVQ R12, R13 + MOVQ SI, R10 + MOVQ R11, R12 + JL LBB7_12 CMPQ R9, $32 - JL LBB7_9 - CMPQ R10, $32 - JL LBB7_9 - MOVQ R12, R13 - MOVQ R10, SI - MOVQ R15, R8 - MOVQ R9, DX + JL LBB7_12 + XORL R8, R8 + MOVQ SI, BX + MOVQ R9, DI -LBB7_6: - LONG $0x6f7ec1c4; WORD $0x0045 // vmovdqu (%r13), %ymm0 +LBB7_7: + LONG $0x6f7e81c4; WORD $0x0304 // vmovdqu (%r11,%r8), %ymm0 LONG $0xcb74fdc5 // vpcmpeqb %ymm3, %ymm0, %ymm1 LONG $0xd474fdc5 // vpcmpeqb %ymm4, %ymm0, %ymm2 LONG $0xc9ebedc5 // vpor %ymm1, %ymm2, %ymm1 - LONG $0xd5ebfdc5 // vpor %ymm5, %ymm0, %ymm2 + LONG $0xd5dbfdc5 // vpand %ymm5, %ymm0, %ymm2 LONG $0xd674edc5 // vpcmpeqb %ymm6, %ymm2, %ymm2 LONG $0xcaebf5c5 // vpor %ymm2, %ymm1, %ymm1 - LONG $0x7f7ec1c4; BYTE $0x00 // vmovdqu %ymm0, (%r8) + LONG $0x7f7e81c4; WORD $0x0704 // vmovdqu %ymm0, (%r15,%r8) LONG $0xc1d7fdc5 // vpmovmskb %ymm1, %eax TESTL AX, AX - JNE LBB7_19 - ADDQ $32, R13 + JNE LBB7_8 + LEAQ -32(BX), R10 + LEAQ -32(DI), AX ADDQ $32, R8 - LEAQ -32(DX), AX - CMPQ SI, $63 - SETGT BX - CMPQ SI, $64 - LEAQ -32(SI), SI - JL LBB7_9 - CMPQ DX, $63 - MOVQ AX, DX - JG LBB7_6 + CMPQ BX, $64 + SETGE CX + JL LBB7_11 + MOVQ R10, BX + CMPQ DI, $63 + MOVQ AX, DI + JG LBB7_7 -LBB7_9: - TESTB BX, BX - JE LBB7_13 - LONG $0x6f7ec1c4; WORD $0x0045 // vmovdqu (%r13), %ymm0 +LBB7_11: + LEAQ 0(R11)(R8*1), R12 + ADDQ R15, R8 + +LBB7_12: + TESTB CX, CX + JE LBB7_38 + LONG $0x6f7ec1c4; WORD $0x2404 // vmovdqu (%r12), %ymm0 LONG $0xcb74fdc5 // vpcmpeqb %ymm3, %ymm0, %ymm1 LONG $0xd474fdc5 // vpcmpeqb %ymm4, %ymm0, %ymm2 LONG $0xc9ebedc5 // vpor %ymm1, %ymm2, %ymm1 - LONG $0xc5ebfdc5 // vpor %ymm5, %ymm0, %ymm0 + LONG $0xc5dbfdc5 // vpand %ymm5, %ymm0, %ymm0 LONG $0xc674fdc5 // vpcmpeqb %ymm6, %ymm0, %ymm0 LONG $0xc0ebf5c5 // vpor %ymm0, %ymm1, %ymm0 LONG $0xc8d7fdc5 // vpmovmskb %ymm0, %ecx MOVQ $4294967296, DX ORQ DX, CX - BSFQ CX, R11 - LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 + BSFQ CX, R13 + LONG $0x6f7ac1c4; WORD $0x2404 // vmovdqu (%r12), %xmm0 LONG $0x16f9e3c4; WORD $0x01c1 // vpextrq $1, %xmm0, %rcx - LONG $0x7ef9e1c4; BYTE $0xc2 // vmovq %xmm0, %rdx - CMPQ R11, AX - JLE LBB7_20 + LONG $0x7ef9e1c4; BYTE $0xc7 // vmovq %xmm0, %rdi + CMPQ R13, AX + JLE LBB7_14 CMPQ AX, $16 - JB LBB7_23 - MOVQ DX, 0(R8) + JB LBB7_27 + MOVQ DI, 0(R8) MOVQ CX, 8(R8) - LEAQ 16(R13), R11 + LEAQ 16(R12), R10 ADDQ $16, R8 - LEAQ -16(AX), SI - CMPQ SI, $8 - JAE LBB7_24 - JMP LBB7_25 + LEAQ -16(AX), R14 + CMPQ R14, $8 + JAE LBB7_30 + JMP LBB7_31 -LBB7_13: - MOVQ R14, DX +LBB7_38: WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ SI, $15 - SETGT R14 + CMPQ R10, $16 + SETGE CX + JL LBB7_39 CMPQ AX, $16 - JL LBB7_30 - CMPQ SI, $16 - QUAD $0xfffffe273d6ffac5 // vmovdqu $-473(%rip), %xmm7 /* LCPI7_4(%rip) */ - QUAD $0xfffffe2f056f7ac5 // vmovdqu $-465(%rip), %xmm8 /* LCPI7_5(%rip) */ - QUAD $0xfffffe370d6f7ac5 // vmovdqu $-457(%rip), %xmm9 /* LCPI7_6(%rip) */ - QUAD $0xfffffe3f156f7ac5 // vmovdqu $-449(%rip), %xmm10 /* LCPI7_7(%rip) */ - JL LBB7_35 - QUAD $0xfffffd811d6ffec5 // vmovdqu $-639(%rip), %ymm3 /* LCPI7_0(%rip) */ - QUAD $0xfffffd99256ffec5 // vmovdqu $-615(%rip), %ymm4 /* LCPI7_1(%rip) */ - QUAD $0xfffffdb12d6ffec5 // vmovdqu $-591(%rip), %ymm5 /* LCPI7_2(%rip) */ - QUAD $0xfffffdc9356ffec5 // vmovdqu $-567(%rip), %ymm6 /* LCPI7_3(%rip) */ - -LBB7_16: - LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 + QUAD $0xfffffe3f3d6ffac5 // vmovdqu $-449(%rip), %xmm7 /* LCPI7_4(%rip) */ + QUAD $0xfffffe47056f7ac5 // vmovdqu $-441(%rip), %xmm8 /* LCPI7_5(%rip) */ + QUAD $0xfffffe4f0d6f7ac5 // vmovdqu $-433(%rip), %xmm9 /* LCPI7_6(%rip) */ + QUAD $0xfffffe57156f7ac5 // vmovdqu $-425(%rip), %xmm10 /* LCPI7_7(%rip) */ + JL LBB7_41 + MOVQ R11, DI + SUBQ R12, DI + QUAD $0xfffffd931d6ffec5 // vmovdqu $-621(%rip), %ymm3 /* LCPI7_0(%rip) */ + QUAD $0xfffffdab256ffec5 // vmovdqu $-597(%rip), %ymm4 /* LCPI7_1(%rip) */ + QUAD $0xfffffdc32d6ffec5 // vmovdqu $-573(%rip), %ymm5 /* LCPI7_2(%rip) */ + QUAD $0xfffffddb356ffec5 // vmovdqu $-549(%rip), %ymm6 /* LCPI7_3(%rip) */ + +LBB7_43: + LONG $0x6f7ac1c4; WORD $0x2404 // vmovdqu (%r12), %xmm0 LONG $0xcf74f9c5 // vpcmpeqb %xmm7, %xmm0, %xmm1 LONG $0xd074b9c5 // vpcmpeqb %xmm0, %xmm8, %xmm2 LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 - LONG $0xd0ebb1c5 // vpor %xmm0, %xmm9, %xmm2 + LONG $0xd0dbb1c5 // vpand %xmm0, %xmm9, %xmm2 LONG $0xd274a9c5 // vpcmpeqb %xmm2, %xmm10, %xmm2 LONG $0xcaebf1c5 // vpor %xmm2, %xmm1, %xmm1 LONG $0x7f7ac1c4; BYTE $0x00 // vmovdqu %xmm0, (%r8) LONG $0xc9d7f9c5 // vpmovmskb %xmm1, %ecx - TESTW CX, CX - JNE LBB7_22 - ADDQ $16, R13 + TESTL CX, CX + JNE LBB7_44 + ADDQ $16, R12 ADDQ $16, R8 - LEAQ -16(AX), R11 - CMPQ SI, $31 - SETGT R14 - CMPQ SI, $32 - LEAQ -16(SI), SI - JL LBB7_31 + LEAQ -16(R10), R14 + LEAQ -16(AX), R13 + CMPQ R10, $32 + SETGE CX + JL LBB7_47 + ADDQ $-16, DI + MOVQ R14, R10 CMPQ AX, $31 - MOVQ R11, AX - JG LBB7_16 - -LBB7_31: - TESTB R14, R14 - JE LBB7_36 - -LBB7_32: - LONG $0x6f7ac1c4; WORD $0x0045 // vmovdqu (%r13), %xmm0 - LONG $0xcf74f9c5 // vpcmpeqb %xmm7, %xmm0, %xmm1 - LONG $0xd074b9c5 // vpcmpeqb %xmm0, %xmm8, %xmm2 - LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 - LONG $0xd0ebb1c5 // vpor %xmm0, %xmm9, %xmm2 - LONG $0xd274a9c5 // vpcmpeqb %xmm2, %xmm10, %xmm2 - LONG $0xcaebf1c5 // vpor %xmm2, %xmm1, %xmm1 - LONG $0xc1d7f9c5 // vpmovmskb %xmm1, %eax - ORL $65536, AX - BSFL AX, R14 - LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax - CMPQ R11, R14 - JGE LBB7_55 - CMPQ R11, $8 - JB LBB7_58 - MOVQ AX, 0(R8) - LEAQ 8(R13), AX - ADDQ $8, R8 - LEAQ -8(R11), SI - MOVQ DX, R14 - CMPQ SI, $4 - JAE LBB7_59 - JMP LBB7_60 - -LBB7_19: - SUBQ R12, R13 - BSFL AX, AX - ADDQ R13, AX - TESTQ AX, AX - JNS LBB7_72 - JMP LBB7_92 - -LBB7_20: - CMPL R11, $16 - JB LBB7_43 - MOVQ DX, 0(R8) + MOVQ R13, AX + JG LBB7_43 + JMP LBB7_47 + +LBB7_8: + BSFL AX, R12 + ADDQ R8, R12 + JMP LBB7_83 + +LBB7_14: + CMPL R13, $16 + JB LBB7_15 + MOVQ DI, 0(R8) MOVQ CX, 8(R8) - LEAQ 16(R13), AX + LEAQ 16(R12), R10 ADDQ $16, R8 - LEAQ -16(R11), SI - CMPQ SI, $8 - JAE LBB7_44 - JMP LBB7_45 + LEAQ -16(R13), AX + CMPQ AX, $8 + JAE LBB7_18 + JMP LBB7_19 -LBB7_22: - MOVWLZX CX, AX - SUBQ R12, R13 - BSFL AX, AX - ADDQ R13, AX - MOVQ DX, R14 - TESTQ AX, AX - JNS LBB7_72 - JMP LBB7_92 +LBB7_44: + BSFW CX, AX + MOVWLZX AX, R12 + SUBQ DI, R12 + JMP LBB7_82 -LBB7_23: - MOVQ R13, R11 - MOVQ AX, SI - CMPQ SI, $8 - JB LBB7_25 +LBB7_27: + MOVQ R12, R10 + MOVQ AX, R14 + CMPQ R14, $8 + JB LBB7_31 -LBB7_24: - MOVQ 0(R11), CX +LBB7_30: + MOVQ 0(R10), CX MOVQ CX, 0(R8) - ADDQ $8, R11 + ADDQ $8, R10 ADDQ $8, R8 - ADDQ $-8, SI + ADDQ $-8, R14 -LBB7_25: - CMPQ SI, $4 - JB LBB7_26 - MOVL 0(R11), CX - MOVL CX, 0(R8) - ADDQ $4, R11 - ADDQ $4, R8 - ADDQ $-4, SI - CMPQ SI, $2 - JAE LBB7_52 +LBB7_31: + CMPQ R14, $4 + JAE LBB7_32 + CMPQ R14, $2 + JAE LBB7_34 -LBB7_27: - TESTQ SI, SI - JE LBB7_29 +LBB7_35: + TESTQ R14, R14 + JE LBB7_37 -LBB7_28: - MOVB 0(R11), CX +LBB7_36: + MOVB 0(R10), CX MOVB CX, 0(R8) -LBB7_29: - SUBQ R12, AX - ADDQ R13, AX - NOTQ AX - TESTQ AX, AX - JNS LBB7_72 - JMP LBB7_92 +LBB7_37: + ADDQ R12, AX + NOTQ AX + ADDQ R11, AX + MOVQ AX, R12 + JMP LBB7_82 -LBB7_26: - CMPQ SI, $2 - JB LBB7_27 +LBB7_39: + MOVQ AX, R13 + MOVQ R10, R14 + QUAD $0xfffffc791d6ffec5 // vmovdqu $-903(%rip), %ymm3 /* LCPI7_0(%rip) */ + QUAD $0xfffffc91256ffec5 // vmovdqu $-879(%rip), %ymm4 /* LCPI7_1(%rip) */ + QUAD $0xfffffca92d6ffec5 // vmovdqu $-855(%rip), %ymm5 /* LCPI7_2(%rip) */ + QUAD $0xfffffcc1356ffec5 // vmovdqu $-831(%rip), %ymm6 /* LCPI7_3(%rip) */ + QUAD $0xfffffcd93d6ffac5 // vmovdqu $-807(%rip), %xmm7 /* LCPI7_4(%rip) */ + QUAD $0xfffffce1056f7ac5 // vmovdqu $-799(%rip), %xmm8 /* LCPI7_5(%rip) */ + QUAD $0xfffffce90d6f7ac5 // vmovdqu $-791(%rip), %xmm9 /* LCPI7_6(%rip) */ + QUAD $0xfffffcf1156f7ac5 // vmovdqu $-783(%rip), %xmm10 /* LCPI7_7(%rip) */ -LBB7_52: - MOVWLZX 0(R11), CX - MOVW CX, 0(R8) - ADDQ $2, R11 - ADDQ $2, R8 - ADDQ $-2, SI - TESTQ SI, SI - JNE LBB7_28 - JMP LBB7_29 +LBB7_47: + TESTB CX, CX + JE LBB7_48 -LBB7_30: - MOVQ AX, R11 - QUAD $0xfffffbbb1d6ffec5 // vmovdqu $-1093(%rip), %ymm3 /* LCPI7_0(%rip) */ - QUAD $0xfffffbd3256ffec5 // vmovdqu $-1069(%rip), %ymm4 /* LCPI7_1(%rip) */ - QUAD $0xfffffbeb2d6ffec5 // vmovdqu $-1045(%rip), %ymm5 /* LCPI7_2(%rip) */ - QUAD $0xfffffc03356ffec5 // vmovdqu $-1021(%rip), %ymm6 /* LCPI7_3(%rip) */ - QUAD $0xfffffc1b3d6ffac5 // vmovdqu $-997(%rip), %xmm7 /* LCPI7_4(%rip) */ - QUAD $0xfffffc23056f7ac5 // vmovdqu $-989(%rip), %xmm8 /* LCPI7_5(%rip) */ - QUAD $0xfffffc2b0d6f7ac5 // vmovdqu $-981(%rip), %xmm9 /* LCPI7_6(%rip) */ - QUAD $0xfffffc33156f7ac5 // vmovdqu $-973(%rip), %xmm10 /* LCPI7_7(%rip) */ - TESTB R14, R14 - JE LBB7_36 - JMP LBB7_32 +LBB7_58: + LONG $0x6f7ac1c4; WORD $0x2404 // vmovdqu (%r12), %xmm0 + LONG $0xcf74f9c5 // vpcmpeqb %xmm7, %xmm0, %xmm1 + LONG $0xd074b9c5 // vpcmpeqb %xmm0, %xmm8, %xmm2 + LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 + LONG $0xd0dbb1c5 // vpand %xmm0, %xmm9, %xmm2 + LONG $0xd274a9c5 // vpcmpeqb %xmm2, %xmm10, %xmm2 + LONG $0xcaebf1c5 // vpor %xmm2, %xmm1, %xmm1 + LONG $0xc1d7f9c5 // vpmovmskb %xmm1, %eax + ORL $65536, AX + BSFL AX, AX + LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx + CMPQ R13, AX + JGE LBB7_59 + CMPQ R13, $8 + LONG $0x55358d4c; WORD $0x00b2; BYTE $0x00 // leaq $45653(%rip), %r14 /* __HtmlQuoteTab(%rip) */ + JB LBB7_70 + MOVQ CX, 0(R8) + LEAQ 8(R12), AX + ADDQ $8, R8 + LEAQ -8(R13), DI + CMPQ DI, $4 + JAE LBB7_73 + JMP LBB7_74 -LBB7_35: - MOVQ AX, R11 - QUAD $0xfffffb6e1d6ffec5 // vmovdqu $-1170(%rip), %ymm3 /* LCPI7_0(%rip) */ - QUAD $0xfffffb86256ffec5 // vmovdqu $-1146(%rip), %ymm4 /* LCPI7_1(%rip) */ - QUAD $0xfffffb9e2d6ffec5 // vmovdqu $-1122(%rip), %ymm5 /* LCPI7_2(%rip) */ - QUAD $0xfffffbb6356ffec5 // vmovdqu $-1098(%rip), %ymm6 /* LCPI7_3(%rip) */ - TESTB R14, R14 - JNE LBB7_32 +LBB7_41: + MOVQ AX, R13 + MOVQ R10, R14 + QUAD $0xfffffbc31d6ffec5 // vmovdqu $-1085(%rip), %ymm3 /* LCPI7_0(%rip) */ + QUAD $0xfffffbdb256ffec5 // vmovdqu $-1061(%rip), %ymm4 /* LCPI7_1(%rip) */ + QUAD $0xfffffbf32d6ffec5 // vmovdqu $-1037(%rip), %ymm5 /* LCPI7_2(%rip) */ + QUAD $0xfffffc0b356ffec5 // vmovdqu $-1013(%rip), %ymm6 /* LCPI7_3(%rip) */ + TESTB CX, CX + JNE LBB7_58 -LBB7_36: - TESTQ R11, R11 - JLE LBB7_64 - TESTQ SI, SI - MOVQ DX, R14 - JLE LBB7_65 +LBB7_48: + TESTQ R14, R14 + JLE LBB7_56 + TESTQ R13, R13 + JLE LBB7_56 + XORL CX, CX + XORL AX, AX -LBB7_38: - MOVBLZX 0(R13), AX - CMPQ AX, $62 - JA LBB7_40 - MOVQ $5764607797912141824, CX - BTQ AX, CX - JB LBB7_57 +LBB7_51: + MOVBLZX 0(R12)(CX*1), DI + CMPQ DI, $62 + JA LBB7_52 + MOVQ $5764607797912141824, DX + BTQ DI, DX + JB LBB7_80 -LBB7_40: - CMPB AX, $-30 - JE LBB7_57 - INCQ R13 - MOVB AX, 0(R8) - CMPQ SI, $2 - LEAQ -1(SI), SI - JL LBB7_65 - INCQ R8 - CMPQ R11, $1 - LEAQ -1(R11), R11 - JG LBB7_38 - JMP LBB7_65 +LBB7_52: + CMPB DI, $-30 + JE LBB7_80 + LEAQ 0(R14)(AX*1), DX + MOVB DI, 0(R8)(CX*1) + LEAQ -1(AX), DI + CMPQ DX, $2 + JL LBB7_55 + ADDQ R13, AX + ADDQ $1, CX + CMPQ AX, $1 + MOVQ DI, AX + JG LBB7_51 -LBB7_43: +LBB7_55: + SUBQ DI, R12 + ADDQ DI, R14 + +LBB7_56: + TESTQ R14, R14 + JE LBB7_57 + NOTQ R12 + ADDQ R11, R12 + JMP LBB7_82 + +LBB7_15: + MOVQ R12, R10 MOVQ R13, AX - MOVQ R11, SI - CMPQ SI, $8 - JB LBB7_45 + CMPQ AX, $8 + JB LBB7_19 -LBB7_44: - MOVQ 0(AX), CX +LBB7_18: + MOVQ 0(R10), CX MOVQ CX, 0(R8) - ADDQ $8, AX + ADDQ $8, R10 ADDQ $8, R8 - ADDQ $-8, SI + ADDQ $-8, AX -LBB7_45: - CMPQ SI, $4 - JB LBB7_46 - MOVL 0(AX), CX - MOVL CX, 0(R8) - ADDQ $4, AX - ADDQ $4, R8 - ADDQ $-4, SI - CMPQ SI, $2 - JAE LBB7_54 +LBB7_19: + CMPQ AX, $4 + JAE LBB7_20 + CMPQ AX, $2 + JAE LBB7_22 -LBB7_47: - TESTQ SI, SI - JE LBB7_49 +LBB7_23: + TESTQ AX, AX + JE LBB7_25 -LBB7_48: - MOVB 0(AX), AX +LBB7_24: + MOVB 0(R10), AX MOVB AX, 0(R8) -LBB7_49: - SUBQ R12, R13 - ADDQ R11, R13 - MOVQ R13, AX - TESTQ AX, AX - JNS LBB7_72 - JMP LBB7_92 +LBB7_25: + SUBQ R11, R12 + ADDQ R13, R12 + JMP LBB7_83 -LBB7_46: - CMPQ SI, $2 - JB LBB7_47 +LBB7_32: + MOVL 0(R10), CX + MOVL CX, 0(R8) + ADDQ $4, R10 + ADDQ $4, R8 + ADDQ $-4, R14 + CMPQ R14, $2 + JB LBB7_35 -LBB7_54: - MOVWLZX 0(AX), CX +LBB7_34: + MOVWLZX 0(R10), CX MOVW CX, 0(R8) - ADDQ $2, AX + ADDQ $2, R10 ADDQ $2, R8 - ADDQ $-2, SI - TESTQ SI, SI - JNE LBB7_48 - JMP LBB7_49 + ADDQ $-2, R14 + TESTQ R14, R14 + JNE LBB7_36 + JMP LBB7_37 -LBB7_55: - CMPL R14, $8 - JB LBB7_66 - MOVQ AX, 0(R8) - LEAQ 8(R13), SI - ADDQ $8, R8 - LEAQ -8(R14), AX - CMPQ AX, $4 - JAE LBB7_67 - JMP LBB7_68 +LBB7_20: + MOVL 0(R10), CX + MOVL CX, 0(R8) + ADDQ $4, R10 + ADDQ $4, R8 + ADDQ $-4, AX + CMPQ AX, $2 + JB LBB7_23 -LBB7_57: - SUBQ R12, R13 - MOVQ R13, AX - TESTQ AX, AX - JNS LBB7_72 - JMP LBB7_92 +LBB7_22: + MOVWLZX 0(R10), CX + MOVW CX, 0(R8) + ADDQ $2, R10 + ADDQ $2, R8 + ADDQ $-2, AX + TESTQ AX, AX + JNE LBB7_24 + JMP LBB7_25 -LBB7_58: - MOVQ R13, AX - MOVQ R11, SI - MOVQ DX, R14 - CMPQ SI, $4 +LBB7_59: + CMPL AX, $8 + LONG $0xd4358d4c; WORD $0x00b0; BYTE $0x00 // leaq $45268(%rip), %r14 /* __HtmlQuoteTab(%rip) */ JB LBB7_60 + MOVQ CX, 0(R8) + LEAQ 8(R12), R10 + ADDQ $8, R8 + LEAQ -8(AX), DI + CMPQ DI, $4 + JAE LBB7_63 + JMP LBB7_64 -LBB7_59: +LBB7_80: + SUBQ R11, R12 + SUBQ AX, R12 + JMP LBB7_82 + +LBB7_70: + MOVQ R12, AX + MOVQ R13, DI + CMPQ DI, $4 + JB LBB7_74 + +LBB7_73: MOVL 0(AX), CX MOVL CX, 0(R8) ADDQ $4, AX ADDQ $4, R8 - ADDQ $-4, SI - -LBB7_60: - CMPQ SI, $2 - JB LBB7_61 - MOVWLZX 0(AX), CX - MOVW CX, 0(R8) - ADDQ $2, AX - ADDQ $2, R8 - ADDQ $-2, SI - TESTQ SI, SI - JNE LBB7_62 - JMP LBB7_63 + ADDQ $-4, DI -LBB7_61: - TESTQ SI, SI - JE LBB7_63 +LBB7_74: + CMPQ DI, $2 + JAE LBB7_75 + TESTQ DI, DI + JE LBB7_78 -LBB7_62: +LBB7_77: MOVB 0(AX), AX MOVB AX, 0(R8) -LBB7_63: - SUBQ R12, R11 - ADDQ R13, R11 - NOTQ R11 - MOVQ R11, AX - TESTQ AX, AX - JNS LBB7_72 - JMP LBB7_92 +LBB7_78: + ADDQ R12, R13 + NOTQ R13 + ADDQ R11, R13 + MOVQ R13, R12 + JMP LBB7_83 -LBB7_64: - MOVQ DX, R14 +LBB7_60: + MOVQ R12, R10 + MOVQ AX, DI + CMPQ DI, $4 + JB LBB7_64 -LBB7_65: - SUBQ R12, R13 - NEGQ SI - SBBQ AX, AX - XORQ R13, AX - TESTQ AX, AX - JNS LBB7_72 - JMP LBB7_92 +LBB7_63: + MOVL 0(R10), CX + MOVL CX, 0(R8) + ADDQ $4, R10 + ADDQ $4, R8 + ADDQ $-4, DI -LBB7_66: - MOVQ R13, SI - MOVQ R14, AX - CMPQ AX, $4 - JB LBB7_68 +LBB7_64: + CMPQ DI, $2 + JAE LBB7_65 + TESTQ DI, DI + JE LBB7_68 LBB7_67: - MOVL 0(SI), CX - MOVL CX, 0(R8) - ADDQ $4, SI - ADDQ $4, R8 - ADDQ $-4, AX + MOVB 0(R10), CX + MOVB CX, 0(R8) LBB7_68: - CMPQ AX, $2 - JB LBB7_69 - MOVWLZX 0(SI), CX + SUBQ R11, R12 + ADDQ AX, R12 + JMP LBB7_83 + +LBB7_75: + MOVWLZX 0(AX), CX MOVW CX, 0(R8) - ADDQ $2, SI + ADDQ $2, AX ADDQ $2, R8 - ADDQ $-2, AX - TESTQ AX, AX - JNE LBB7_70 - JMP LBB7_71 + ADDQ $-2, DI + TESTQ DI, DI + JNE LBB7_77 + JMP LBB7_78 -LBB7_69: - TESTQ AX, AX - JE LBB7_71 +LBB7_65: + MOVWLZX 0(R10), CX + MOVW CX, 0(R8) + ADDQ $2, R10 + ADDQ $2, R8 + ADDQ $-2, DI + TESTQ DI, DI + JNE LBB7_67 + JMP LBB7_68 -LBB7_70: - MOVB 0(SI), AX - MOVB AX, 0(R8) +LBB7_57: + SUBQ R11, R12 -LBB7_71: - SUBQ R12, R13 - ADDQ R14, R13 - MOVQ R13, AX - MOVQ DX, R14 - TESTQ AX, AX - JS LBB7_92 +LBB7_82: + LONG $0xfe358d4c; WORD $0x00af; BYTE $0x00 // leaq $45054(%rip), %r14 /* __HtmlQuoteTab(%rip) */ -LBB7_72: - ADDQ AX, R12 - ADDQ AX, R15 - SUBQ AX, R10 - JLE LBB7_93 - SUBQ AX, R9 - MOVB 0(R12), CX - CMPB CX, $-30 - JE LBB7_86 - MOVQ R12, AX +LBB7_83: + TESTQ R12, R12 + JS LBB7_84 + ADDQ R12, R11 + ADDQ R12, R15 + SUBQ R12, SI + JLE LBB7_86 + SUBQ R12, R9 + MOVB 0(R11), CX + CMPB CX, $-30 + JE LBB7_89 + MOVQ R11, AX -LBB7_75: - MOVBLZX CX, CX - SHLQ $4, CX - MOVQ 0(CX)(R14*1), DX - MOVLQSX DX, SI - SUBQ SI, R9 - JL LBB7_95 +LBB7_93: + MOVBLZX CX, DI + SHLQ $4, DI + MOVQ 0(DI)(R14*1), DX + MOVLQSX DX, BX + SUBQ BX, R9 + JL LBB7_94 SHLQ $32, DX - LEAQ 8(CX)(R14*1), BX - CMPQ DX, DI - JL LBB7_78 - MOVL 0(BX), DX - MOVL DX, 0(R15) - LEAQ 12(CX)(R14*1), BX - LEAQ 4(R15), DX - LEAQ -4(SI), CX - CMPQ CX, $2 - JGE LBB7_79 - JMP LBB7_80 - -LBB7_78: - MOVQ R15, DX - MOVQ SI, CX - CMPQ CX, $2 - JL LBB7_80 + LEAQ 0(DI)(R14*1), R8 + ADDQ $8, R8 + MOVQ $12884901889, CX + CMPQ DX, CX + JL LBB7_98 + MOVL 0(R8), CX + MOVL CX, 0(R15) + LEAQ 0(DI)(R14*1), R8 + ADDQ $12, R8 + LEAQ 4(R15), R10 + LEAQ -4(BX), DI + CMPQ DI, $2 + JGE LBB7_101 + JMP LBB7_102 -LBB7_79: - MOVQ DI, R8 - MOVWLZX 0(BX), DI - MOVW DI, 0(DX) - MOVQ R8, DI - ADDQ $2, BX - ADDQ $2, DX - ADDQ $-2, CX +LBB7_98: + MOVQ R15, R10 + MOVQ BX, DI + CMPQ DI, $2 + JL LBB7_102 + +LBB7_101: + MOVWLZX 0(R8), DX + MOVW DX, 0(R10) + ADDQ $2, R8 + ADDQ $2, R10 + ADDQ $-2, DI -LBB7_80: - TESTQ CX, CX - JLE LBB7_82 - MOVB 0(BX), CX - MOVB CX, 0(DX) +LBB7_102: + TESTQ DI, DI + JLE LBB7_104 + MOVB 0(R8), CX + MOVB CX, 0(R10) -LBB7_82: - ADDQ SI, R15 +LBB7_104: + ADDQ BX, R15 -LBB7_83: - INCQ AX - MOVQ AX, R12 - CMPQ R10, $1 - LEAQ -1(R10), R10 +LBB7_105: + ADDQ $1, AX + LEAQ -1(SI), CX + MOVQ AX, R11 + CMPQ SI, $1 + MOVQ CX, SI JG LBB7_2 - JMP LBB7_94 - -LBB7_86: - CMPQ R10, $3 - JL LBB7_90 - CMPB 1(R12), $-128 - JNE LBB7_90 - MOVB 2(R12), CX + JMP LBB7_106 + +LBB7_89: + CMPQ SI, $3 + JL LBB7_95 + CMPB 1(R11), $-128 + JNE LBB7_95 + MOVB 2(R11), CX MOVL CX, AX ANDB $-2, AX CMPB AX, $-88 - JNE LBB7_90 - LEAQ 2(R12), AX - ADDQ $-2, R10 - JMP LBB7_75 + JNE LBB7_95 + LEAQ 2(R11), AX + ADDQ $-2, SI + JMP LBB7_93 -LBB7_90: +LBB7_95: TESTQ R9, R9 - JLE LBB7_96 + JLE LBB7_3 MOVB $-30, 0(R15) - INCQ R15 - DECQ R9 - MOVQ R12, AX - JMP LBB7_83 + ADDQ $1, R15 + ADDQ $-1, R9 + MOVQ R11, AX + JMP LBB7_105 -LBB7_92: - SUBQ -56(BP), R15 - NOTQ AX - ADDQ AX, R15 - MOVQ -64(BP), CX - MOVQ R15, 0(CX) - SUBQ -48(BP), R12 - ADDQ AX, R12 - NOTQ R12 - JMP LBB7_97 +LBB7_84: + MOVQ -56(BP), CX + ADDQ R12, CX + NOTQ CX + ADDQ R15, CX + MOVQ -64(BP), AX + MOVQ CX, 0(AX) + MOVQ -48(BP), AX + SUBQ R11, AX + ADDQ R12, AX + JMP LBB7_107 -LBB7_93: - MOVQ R12, AX +LBB7_86: + MOVQ R11, AX -LBB7_94: +LBB7_106: SUBQ -56(BP), R15 MOVQ -64(BP), CX MOVQ R15, 0(CX) SUBQ -48(BP), AX - JMP LBB7_98 + JMP LBB7_107 -LBB7_95: +LBB7_94: SUBQ -56(BP), R15 MOVQ -64(BP), AX MOVQ R15, 0(AX) -LBB7_96: - NOTQ R12 - ADDQ -48(BP), R12 - -LBB7_97: - MOVQ R12, AX +LBB7_3: + NOTQ R11 + ADDQ -48(BP), R11 + MOVQ R11, AX -LBB7_98: +LBB7_107: ADDQ $24, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 @@ -3486,73 +3842,74 @@ LBB8_5: SHLQ CX, DI MOVL AX, CX SHLQ $4, CX - LONG $0x373d8d4c; WORD $0x003b; BYTE $0x00 // leaq $15159(%rip), %r15 /* _POW10_M128_TAB(%rip) */ + LONG $0x853d8d4c; WORD $0x0035; BYTE $0x00 // leaq $13701(%rip), %r15 /* _POW10_M128_TAB(%rip) */ MOVQ DI, AX MULQ 8(CX)(R15*1) MOVQ AX, R11 MOVQ DX, R14 ANDL $511, DX + CMPQ DX, $511 + JNE LBB8_11 MOVQ DI, BX NOTQ BX - CMPQ AX, BX + CMPQ R11, BX JBE LBB8_11 - CMPL DX, $511 - JNE LBB8_11 MOVQ DI, AX MULQ 0(CX)(R15*1) ADDQ DX, R11 ADCQ $0, R14 MOVL R14, DX ANDL $511, DX - CMPQ AX, BX - JBE LBB8_11 + CMPQ DX, $511 + JNE LBB8_11 CMPQ R11, $-1 JNE LBB8_11 - CMPL DX, $511 - JE LBB8_1 + CMPQ AX, BX + JA LBB8_1 LBB8_11: - MOVQ R14, DI - SHRQ $63, DI - LEAL 9(DI), CX - SHRQ CX, R14 - ORQ R11, DX - JNE LBB8_14 - MOVL R14, AX - ANDL $3, AX - CMPL AX, $1 - JE LBB8_1 - -LBB8_14: - LONG $0x526ac669; WORD $0x0003 // imull $217706, %esi, %eax - SARL $16, AX - ADDL $1087, AX - WORD $0x9848 // cltq - SUBQ R10, AX - XORQ $1, DI - SUBQ DI, AX - MOVL R14, DX - ANDL $1, DX - ADDQ R14, DX - MOVQ $126100789566373888, CX - ANDQ DX, CX - CMPQ CX, $1 - SBBQ $-1, AX - LEAQ -1(AX), SI - CMPQ SI, $2045 - JBE LBB8_16 + MOVQ R14, AX + SHRQ $63, AX + LEAL 9(AX), CX + SHRQ CX, R14 + TESTQ R11, R11 + JNE LBB8_15 + TESTQ DX, DX + JNE LBB8_15 + MOVL R14, CX + ANDL $3, CX + CMPL CX, $1 + JE LBB8_1 + +LBB8_15: + LONG $0x526ace69; WORD $0x0003 // imull $217706, %esi, %ecx + SARL $16, CX + ADDL $1087, CX + MOVLQSX CX, SI + SUBQ R10, SI + MOVL R14, DX + ANDL $1, DX + ADDQ R14, DX + MOVQ DX, CX + SHRQ $54, CX + ADDQ SI, AX + CMPQ CX, $1 + SBBQ $0, AX + LEAQ -1(AX), SI + CMPQ SI, $2045 + JBE LBB8_17 LBB8_1: XORL AX, AX -LBB8_17: +LBB8_18: BYTE $0x5b // popq %rbx WORD $0x5e41 // popq %r14 WORD $0x5f41 // popq %r15 BYTE $0x5d // popq %rbp RET -LBB8_16: +LBB8_17: CMPQ CX, $1 MOVB $2, CX SBBB $0, CX @@ -3567,7 +3924,7 @@ LBB8_16: LONG $0xc1450f48 // cmovneq %rcx, %rax MOVQ AX, 0(R8) MOVB $1, AX - JMP LBB8_17 + JMP LBB8_18 LCPI9_0: QUAD $0x0000000000000000 // .space 8, '\x00\x00\x00\x00\x00\x00\x00\x00' @@ -3593,284 +3950,216 @@ _decimal_to_f64: WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx BYTE $0x50 // pushq %rax - MOVQ SI, R13 - MOVQ DI, R12 - MOVQ $4503599627370496, BX + MOVQ SI, BX + MOVQ DI, R15 + MOVQ $4503599627370496, R13 CMPL 16(DI), $0 JE LBB9_4 MOVQ $9218868437227405312, R14 - MOVL 20(R12), AX - XORL R15, R15 + MOVL 20(R15), AX + XORL R12, R12 CMPL AX, $310 - JG LBB9_84 + JG LBB9_69 CMPL AX, $-330 JGE LBB9_5 XORL R14, R14 - JMP LBB9_84 + JMP LBB9_69 LBB9_4: XORL R14, R14 - XORL R15, R15 - JMP LBB9_84 + XORL R12, R12 + JMP LBB9_69 LBB9_5: TESTL AX, AX - MOVQ R13, -48(BP) - JLE LBB9_20 - XORL R15, R15 - LONG $0x262d8d4c; WORD $0x0065; BYTE $0x00 // leaq $25894(%rip), %r13 /* _POW_TAB(%rip) */ - JMP LBB9_9 - -LBB9_7: - NEGL BX - MOVQ R12, DI - MOVL BX, SI - LONG $0x003843e8; BYTE $0x00 // callq _right_shift - -LBB9_8: - ADDL R14, R15 - MOVL 20(R12), AX - TESTL AX, AX - JLE LBB9_20 + MOVQ BX, -48(BP) + JLE LBB9_12 + XORL R12, R12 + LONG $0x7a358d4c; WORD $0x005f; BYTE $0x00 // leaq $24442(%rip), %r14 /* _POW_TAB(%rip) */ + JMP LBB9_8 -LBB9_9: - MOVL $27, R14 - CMPL AX, $8 - JG LBB9_11 +LBB9_10: MOVL AX, AX - MOVL 0(R13)(AX*4), R14 + MOVL 0(R14)(AX*4), BX + CMPL 16(R15), $0 + JE LBB9_7 LBB9_11: - TESTL R14, R14 - JE LBB9_8 - CMPL 16(R12), $0 - JE LBB9_8 - MOVL R14, BX - NEGL BX - TESTL R14, R14 - JS LBB9_16 - CMPL R14, $61 - JL LBB9_7 + MOVQ R15, DI + MOVL BX, SI + LONG $0x003267e8; BYTE $0x00 // callq _right_shift -LBB9_15: - MOVQ R12, DI - MOVL $60, SI - LONG $0x0037fbe8; BYTE $0x00 // callq _right_shift - LEAL 60(BX), AX - CMPL BX, $-120 - MOVL AX, BX - JL LBB9_15 - JMP LBB9_7 +LBB9_7: + ADDL BX, R12 + MOVL 20(R15), AX + TESTL AX, AX + JLE LBB9_12 -LBB9_16: - CMPL R14, $-61 - JG LBB9_18 +LBB9_8: + CMPL AX, $8 + JLE LBB9_10 + MOVL $27, BX + CMPL 16(R15), $0 + JNE LBB9_11 + JMP LBB9_7 -LBB9_17: - MOVQ R12, DI - MOVL $60, SI - LONG $0x00366de8; BYTE $0x00 // callq _left_shift - LEAL -60(BX), SI - CMPL BX, $120 - MOVL SI, BX - JG LBB9_17 - JMP LBB9_19 +LBB9_12: + LONG $0x3c358d4c; WORD $0x005f; BYTE $0x00 // leaq $24380(%rip), %r14 /* _POW_TAB(%rip) */ + JMP LBB9_14 LBB9_18: - MOVL BX, SI - -LBB9_19: - MOVQ R12, DI - LONG $0x003657e8; BYTE $0x00 // callq _left_shift - JMP LBB9_8 + MOVL $27, BX + CMPL 16(R15), $0 + JE LBB9_13 LBB9_20: - LONG $0x92358d4c; WORD $0x0064; BYTE $0x00 // leaq $25746(%rip), %r14 /* _POW_TAB(%rip) */ - JMP LBB9_23 - -LBB9_21: - MOVQ R12, DI - LONG $0x003644e8; BYTE $0x00 // callq _left_shift + MOVQ R15, DI + MOVL BX, SI + LONG $0x00308be8; BYTE $0x00 // callq _left_shift + MOVL 20(R15), AX -LBB9_22: - SUBL R13, R15 - MOVL 20(R12), AX +LBB9_13: + SUBL BX, R12 -LBB9_23: +LBB9_14: TESTL AX, AX - JS LBB9_26 - JNE LBB9_37 - MOVQ 0(R12), CX + JS LBB9_17 + JNE LBB9_21 + MOVQ 0(R15), CX CMPB 0(CX), $53 - JL LBB9_27 - JMP LBB9_37 + JL LBB9_19 + JMP LBB9_21 -LBB9_26: - MOVL $27, R13 +LBB9_17: CMPL AX, $-8 - JL LBB9_28 - -LBB9_27: - NEGL AX - WORD $0x9848 // cltq - MOVL 0(R14)(AX*4), R13 - -LBB9_28: - TESTL R13, R13 - JE LBB9_22 - CMPL 16(R12), $0 - JE LBB9_22 - TESTL R13, R13 - JLE LBB9_34 - MOVL R13, SI - CMPL R13, $61 - JL LBB9_21 - MOVL R13, BX - -LBB9_33: - MOVQ R12, DI - MOVL $60, SI - LONG $0x0035e9e8; BYTE $0x00 // callq _left_shift - LEAL -60(BX), SI - CMPL BX, $120 - MOVL SI, BX - JG LBB9_33 - JMP LBB9_21 + JL LBB9_18 -LBB9_34: - MOVL R13, BX - CMPL R13, $-61 - JG LBB9_36 - -LBB9_35: - MOVQ R12, DI - MOVL $60, SI - LONG $0x003736e8; BYTE $0x00 // callq _right_shift - LEAL 60(BX), AX - CMPL BX, $-120 - MOVL AX, BX - JL LBB9_35 - -LBB9_36: - NEGL BX - MOVQ R12, DI - MOVL BX, SI - LONG $0x003720e8; BYTE $0x00 // callq _right_shift - JMP LBB9_22 +LBB9_19: + MOVL AX, CX + NEGL CX + MOVL 0(R14)(CX*4), BX + CMPL 16(R15), $0 + JNE LBB9_20 + JMP LBB9_13 -LBB9_37: - CMPL R15, $-1022 - JG LBB9_43 - CMPL 16(R12), $0 - MOVQ -48(BP), R13 - MOVQ $4503599627370496, BX - JE LBB9_45 - CMPL R15, $-1082 - JG LBB9_46 - ADDL $961, R15 +LBB9_21: + CMPL R12, $-1022 + JG LBB9_27 + CMPL 16(R15), $0 + MOVQ -48(BP), BX + JE LBB9_29 + CMPL R12, $-1082 + JG LBB9_30 + ADDL $961, R12 -LBB9_41: - MOVQ R12, DI +LBB9_25: + MOVQ R15, DI MOVL $60, SI - LONG $0x0036dfe8; BYTE $0x00 // callq _right_shift - ADDL $60, R15 - CMPL R15, $-120 - JL LBB9_41 - ADDL $60, R15 - JMP LBB9_47 + LONG $0x0031cce8; BYTE $0x00 // callq _right_shift + ADDL $60, R12 + CMPL R12, $-120 + JL LBB9_25 + ADDL $60, R12 + JMP LBB9_31 -LBB9_43: - CMPL R15, $1024 - MOVQ -48(BP), R13 - MOVQ $4503599627370496, BX - JG LBB9_81 - DECL R15 - MOVL R15, R14 - JMP LBB9_48 +LBB9_27: + CMPL R12, $1024 + MOVQ -48(BP), BX + JG LBB9_66 + ADDL $-1, R12 + MOVL R12, R14 + JMP LBB9_32 -LBB9_45: +LBB9_29: MOVL $-1022, R14 - JMP LBB9_50 + JMP LBB9_34 -LBB9_46: - ADDL $1021, R15 +LBB9_30: + ADDL $1021, R12 -LBB9_47: - NEGL R15 - MOVQ R12, DI - MOVL R15, SI - LONG $0x00368fe8; BYTE $0x00 // callq _right_shift +LBB9_31: + NEGL R12 + MOVQ R15, DI + MOVL R12, SI + LONG $0x003185e8; BYTE $0x00 // callq _right_shift MOVL $-1022, R14 -LBB9_48: - CMPL 16(R12), $0 - JE LBB9_50 - MOVQ R12, DI +LBB9_32: + CMPL 16(R15), $0 + JE LBB9_34 + MOVQ R15, DI MOVL $53, SI - LONG $0x003505e8; BYTE $0x00 // callq _left_shift + LONG $0x002fcce8; BYTE $0x00 // callq _left_shift -LBB9_50: - MOVLQSX 20(R12), R10 - MOVQ $-1, R15 - CMPQ R10, $20 - JG LBB9_83 - MOVL R10, CX - TESTL R10, R10 - JLE LBB9_55 - MOVLQSX 16(R12), SI - XORL DX, DX - XORL R15, R15 +LBB9_34: + MOVL 20(R15), AX + MOVQ $-1, R12 + CMPL AX, $20 + JG LBB9_68 + TESTL AX, AX + JLE LBB9_40 + MOVL 16(R15), DX + XORL SI, SI + TESTL DX, DX + WORD $0x4e0f; BYTE $0xd6 // cmovlel %esi, %edx + LEAQ -1(AX), R9 + CMPQ R9, DX + LONG $0xca430f4c // cmovaeq %rdx, %r9 + LEAL 1(R9), R8 + XORL R12, R12 -LBB9_53: +LBB9_37: CMPQ DX, SI - JGE LBB9_56 - LEAQ 0(R15)(R15*4), AX - MOVQ 0(R12), DI - MOVBQSX 0(DI)(DX*1), DI - LEAQ -48(DI)(AX*2), R15 - INCQ DX - CMPQ CX, DX - JNE LBB9_53 - JMP LBB9_56 - -LBB9_55: - XORL DX, DX - XORL R15, R15 + JE LBB9_41 + LEAQ 0(R12)(R12*4), DI + MOVQ 0(R15), CX + MOVBQSX 0(CX)(SI*1), CX + LEAQ 0(CX)(DI*2), R12 + ADDQ $-48, R12 + ADDQ $1, SI + CMPQ AX, SI + JNE LBB9_37 + MOVL R8, R9 + JMP LBB9_41 + +LBB9_40: + XORL R9, R9 + XORL R12, R12 -LBB9_56: - CMPL CX, DX - JLE LBB9_69 - MOVL R10, R9 - SUBL DX, R9 - CMPL R9, $16 - JB LBB9_67 - MOVL R9, R8 - QUAD $0xfffffd09056ffac5 // vmovdqu $-759(%rip), %xmm0 /* LCPI9_0(%rip) */ - LONG $0x22f9c3c4; WORD $0x00c7 // vpinsrq $0, %r15, %xmm0, %xmm0 +LBB9_41: + CMPL AX, R9 + JLE LBB9_54 + MOVL AX, DX + SUBL R9, DX + CMPL DX, $16 + JB LBB9_52 + MOVL DX, R8 + QUAD $0xfffffdc3056ffac5 // vmovdqu $-573(%rip), %xmm0 /* LCPI9_0(%rip) */ + LONG $0x22f9c3c4; WORD $0x00c4 // vpinsrq $0, %r12, %xmm0, %xmm0 ANDL $-16, R8 - QUAD $0xfffcf505027de3c4; WORD $0xf0ff // vpblendd $240, $-779(%rip), %ymm0, %ymm0 /* LCPI9_0(%rip) */ - LEAL -16(R8), DI - MOVL DI, AX - SHRL $4, AX - INCL AX - MOVL AX, SI + QUAD $0xfffdaf05027de3c4; WORD $0xf0ff // vpblendd $240, $-593(%rip), %ymm0, %ymm0 /* LCPI9_0(%rip) */ + LEAL -16(R8), CX + MOVL CX, DI + SHRL $4, DI + ADDL $1, DI + MOVL DI, SI ANDL $3, SI - CMPL DI, $48 - JAE LBB9_60 - QUAD $0xfffcf715597de2c4; BYTE $0xff // vpbroadcastq $-777(%rip), %ymm2 /* LCPI9_1(%rip) */ + CMPL CX, $48 + JAE LBB9_45 + QUAD $0xfffdb015597de2c4; BYTE $0xff // vpbroadcastq $-592(%rip), %ymm2 /* LCPI9_1(%rip) */ LONG $0xda6ffdc5 // vmovdqa %ymm2, %ymm3 LONG $0xca6ffdc5 // vmovdqa %ymm2, %ymm1 - JMP LBB9_62 + JMP LBB9_47 -LBB9_60: - MOVL SI, DI - SUBL AX, DI - QUAD $0xfffce015597de2c4; BYTE $0xff // vpbroadcastq $-800(%rip), %ymm2 /* LCPI9_1(%rip) */ - QUAD $0xfffcdf25597de2c4; BYTE $0xff // vpbroadcastq $-801(%rip), %ymm4 /* LCPI9_2(%rip) */ +LBB9_45: + ANDL $-4, DI + NEGL DI + QUAD $0xfffd9815597de2c4; BYTE $0xff // vpbroadcastq $-616(%rip), %ymm2 /* LCPI9_1(%rip) */ + QUAD $0xfffd9725597de2c4; BYTE $0xff // vpbroadcastq $-617(%rip), %ymm4 /* LCPI9_2(%rip) */ LONG $0xda6ffdc5 // vmovdqa %ymm2, %ymm3 LONG $0xca6ffdc5 // vmovdqa %ymm2, %ymm1 -LBB9_61: +LBB9_46: LONG $0xecf4fdc5 // vpmuludq %ymm4, %ymm0, %ymm5 LONG $0xd073fdc5; BYTE $0x20 // vpsrlq $32, %ymm0, %ymm0 LONG $0xc4f4fdc5 // vpmuludq %ymm4, %ymm0, %ymm0 @@ -3892,15 +4181,15 @@ LBB9_61: LONG $0xf173f5c5; BYTE $0x20 // vpsllq $32, %ymm1, %ymm1 LONG $0xc9d4d5c5 // vpaddq %ymm1, %ymm5, %ymm1 ADDL $4, DI - JNE LBB9_61 + JNE LBB9_46 -LBB9_62: +LBB9_47: TESTL SI, SI - JE LBB9_65 + JE LBB9_50 NEGL SI - QUAD $0xfffc7325597de2c4; BYTE $0xff // vpbroadcastq $-909(%rip), %ymm4 /* LCPI9_3(%rip) */ + QUAD $0xfffd2b25597de2c4; BYTE $0xff // vpbroadcastq $-725(%rip), %ymm4 /* LCPI9_3(%rip) */ -LBB9_64: +LBB9_49: LONG $0xecf4fdc5 // vpmuludq %ymm4, %ymm0, %ymm5 LONG $0xd073fdc5; BYTE $0x20 // vpsrlq $32, %ymm0, %ymm0 LONG $0xc4f4fdc5 // vpmuludq %ymm4, %ymm0, %ymm0 @@ -3922,9 +4211,9 @@ LBB9_64: LONG $0xf173f5c5; BYTE $0x20 // vpsllq $32, %ymm1, %ymm1 LONG $0xc9d4d5c5 // vpaddq %ymm1, %ymm5, %ymm1 INCL SI - JNE LBB9_64 + JNE LBB9_49 -LBB9_65: +LBB9_50: LONG $0xd273ddc5; BYTE $0x20 // vpsrlq $32, %ymm2, %ymm4 LONG $0xe0f4ddc5 // vpmuludq %ymm0, %ymm4, %ymm4 LONG $0xd073d5c5; BYTE $0x20 // vpsrlq $32, %ymm0, %ymm5 @@ -3958,91 +4247,92 @@ LBB9_65: LONG $0xf273e9c5; BYTE $0x20 // vpsllq $32, %xmm2, %xmm2 LONG $0xc1f4f9c5 // vpmuludq %xmm1, %xmm0, %xmm0 LONG $0xc2d4f9c5 // vpaddq %xmm2, %xmm0, %xmm0 - LONG $0xc870f9c5; BYTE $0x4e // vpshufd $78, %xmm0, %xmm1 + LONG $0xc870f9c5; BYTE $0xee // vpshufd $238, %xmm0, %xmm1 LONG $0xd073e9c5; BYTE $0x20 // vpsrlq $32, %xmm0, %xmm2 LONG $0xd1f4e9c5 // vpmuludq %xmm1, %xmm2, %xmm2 - LONG $0xd873e1c5; BYTE $0x0c // vpsrldq $12, %xmm0, %xmm3 + LONG $0xd870f9c5; BYTE $0xff // vpshufd $255, %xmm0, %xmm3 LONG $0xdbf4f9c5 // vpmuludq %xmm3, %xmm0, %xmm3 LONG $0xd2d4e1c5 // vpaddq %xmm2, %xmm3, %xmm2 LONG $0xf273e9c5; BYTE $0x20 // vpsllq $32, %xmm2, %xmm2 LONG $0xc1f4f9c5 // vpmuludq %xmm1, %xmm0, %xmm0 LONG $0xc2d4f9c5 // vpaddq %xmm2, %xmm0, %xmm0 - LONG $0x7ef9c1c4; BYTE $0xc7 // vmovq %xmm0, %r15 - CMPL R9, R8 - JE LBB9_69 - ADDL R8, DX + LONG $0x7ef9c1c4; BYTE $0xc4 // vmovq %xmm0, %r12 + CMPL DX, R8 + JE LBB9_54 + ADDL R8, R9 -LBB9_67: - MOVL CX, SI - SUBL DX, SI +LBB9_52: + MOVL AX, DX + SUBL R9, DX -LBB9_68: - ADDQ R15, R15 - LEAQ 0(R15)(R15*4), R15 - DECL SI - JNE LBB9_68 +LBB9_53: + ADDQ R12, R12 + LEAQ 0(R12)(R12*4), R12 + ADDL $-1, DX + JNE LBB9_53 -LBB9_69: - TESTL CX, CX - JS LBB9_77 - MOVL 16(R12), AX - CMPL AX, CX - JLE LBB9_77 - MOVQ 0(R12), SI - MOVB 0(SI)(CX*1), DX - LEAL 1(R10), DI - CMPL DI, AX - JNE LBB9_78 +LBB9_54: + TESTL AX, AX + JS LBB9_62 + MOVL 16(R15), CX + CMPL CX, AX + JLE LBB9_62 + MOVQ 0(R15), SI + MOVB 0(SI)(AX*1), DX CMPB DX, $53 - JNE LBB9_78 - CMPL 28(R12), $0 - SETNE DX - JNE LBB9_79 - TESTL CX, CX - JLE LBB9_79 - MOVB -1(R10)(SI*1), DX - ANDB $1, DX - JMP LBB9_79 + JNE LBB9_63 + LEAL 1(AX), DI + CMPL DI, CX + JNE LBB9_63 + CMPL 28(R15), $0 + SETNE CX + JNE LBB9_64 + TESTL AX, AX + JLE LBB9_64 + ADDL $-1, AX + MOVB 0(SI)(AX*1), CX + ANDB $1, CX + JMP LBB9_64 -LBB9_77: - XORL DX, DX +LBB9_62: + XORL CX, CX -LBB9_79: - MOVBLZX DX, AX - ADDQ AX, R15 +LBB9_64: + MOVBLZX CX, AX + ADDQ AX, R12 MOVQ $9007199254740992, AX - CMPQ R15, AX - JNE LBB9_83 + CMPQ R12, AX + JNE LBB9_68 CMPL R14, $1022 - JLE LBB9_82 + JLE LBB9_67 -LBB9_81: - XORL R15, R15 +LBB9_66: + XORL R12, R12 MOVQ $9218868437227405312, R14 - JMP LBB9_84 + JMP LBB9_69 -LBB9_82: - INCL R14 - MOVQ BX, R15 +LBB9_67: + ADDL $1, R14 + MOVQ R13, R12 -LBB9_83: - MOVQ R15, AX - ANDQ BX, AX +LBB9_68: + MOVQ R12, AX + ANDQ R13, AX ADDL $1023, R14 ANDL $2047, R14 SHLQ $52, R14 TESTQ AX, AX LONG $0xf0440f4c // cmoveq %rax, %r14 -LBB9_84: - DECQ BX - ANDQ R15, BX - ORQ R14, BX +LBB9_69: + ADDQ $-1, R13 + ANDQ R12, R13 + ORQ R14, R13 MOVQ $-9223372036854775808, AX - ORQ BX, AX - CMPL 24(R12), $0 - LONG $0xc3440f48 // cmoveq %rbx, %rax - MOVQ AX, 0(R13) + ORQ R13, AX + CMPL 24(R15), $0 + LONG $0xc5440f49 // cmoveq %r13, %rax + MOVQ AX, 0(BX) XORL AX, AX ADDQ $8, SP BYTE $0x5b // popq %rbx @@ -4054,10 +4344,10 @@ LBB9_84: WORD $0xf8c5; BYTE $0x77 // vzeroupper RET -LBB9_78: - CMPB DX, $52 - SETGT DX - JMP LBB9_79 +LBB9_63: + CMPB DX, $53 + SETGE CX + JMP LBB9_64 _atof_native: BYTE $0x55 // pushq %rbp @@ -4079,503 +4369,263 @@ _atof_native: LBB10_4: MOVQ -32(BP), CX MOVB $0, 0(CX)(AX*1) - INCQ AX + ADDQ $1, AX CMPQ -24(BP), AX JA LBB10_4 LBB10_5: LONG $0xc057f8c5 // vxorps %xmm0, %xmm0, %xmm0 LONG $0x4511f8c5; BYTE $0xf0 // vmovups %xmm0, $-16(%rbp) + XORL DX, DX CMPB 0(DI), $45 - JNE LBB10_6 + JNE LBB10_7 MOVL $1, -8(BP) MOVL $1, AX CMPQ AX, SI - JL LBB10_9 - -LBB10_41: - MOVL $0, -12(BP) - JMP LBB10_40 + JL LBB10_8 + JMP LBB10_39 -LBB10_6: +LBB10_7: XORL AX, AX CMPQ AX, SI - JGE LBB10_41 + JGE LBB10_39 -LBB10_9: +LBB10_8: MOVB $1, R11 XORL R9, R9 XORL R10, R10 XORL R8, R8 - JMP LBB10_10 + JMP LBB10_12 -LBB10_13: - DECL -12(BP) - XORL R10, R10 +LBB10_20: + MOVL $1, -4(BP) -LBB10_22: - INCQ AX +LBB10_11: + ADDQ $1, AX CMPQ AX, SI SETLT R11 CMPQ SI, AX - JE LBB10_23 + JE LBB10_22 -LBB10_10: +LBB10_12: MOVBLZX 0(DI)(AX*1), CX LEAL -48(CX), DX CMPB DX, $9 - JA LBB10_19 - TESTL R10, R10 - JNE LBB10_14 + JA LBB10_17 CMPB CX, $48 - JE LBB10_13 - -LBB10_14: - MOVLQSX R9, R10 - CMPQ -24(BP), R10 - JBE LBB10_16 - MOVQ -32(BP), DX - MOVB CX, 0(DX)(R10*1) - MOVL -16(BP), R9 - INCL R9 - MOVL R9, -16(BP) - MOVL R9, R10 - JMP LBB10_22 + JNE LBB10_19 + TESTL R10, R10 + JE LBB10_21 + MOVLQSX R9, R11 + CMPQ -24(BP), R11 + JA LBB10_9 + JMP LBB10_10 -LBB10_19: +LBB10_17: CMPB CX, $46 - JNE LBB10_20 + JNE LBB10_23 MOVL R10, -12(BP) MOVL $1, R8 - JMP LBB10_22 - -LBB10_16: - CMPB CX, $48 - JNE LBB10_18 - MOVL R9, R10 - JMP LBB10_22 - -LBB10_18: - MOVL $1, -4(BP) - MOVL R9, R10 - JMP LBB10_22 - -LBB10_23: - MOVL SI, CX - MOVQ SI, AX - TESTL R8, R8 - JNE LBB10_26 - -LBB10_25: - MOVL R9, -12(BP) - -LBB10_26: - TESTB $1, R11 - JE LBB10_40 - MOVB 0(DI)(CX*1), CX - ORB $32, CX - CMPB CX, $101 - JNE LBB10_40 - MOVL AX, DX - MOVB 1(DI)(DX*1), CX - CMPB CX, $45 - JE LBB10_32 - MOVL $1, R8 - CMPB CX, $43 - JNE LBB10_30 - ADDL $2, AX - JMP LBB10_33 - -LBB10_20: - MOVQ AX, CX - TESTL R8, R8 - JNE LBB10_26 - JMP LBB10_25 - -LBB10_32: - ADDL $2, AX - MOVL $-1, R8 - -LBB10_33: - MOVL AX, DX - MOVLQSX DX, DX - XORL R9, R9 - CMPQ DX, SI - JL LBB10_35 - JMP LBB10_39 - -LBB10_30: - INCQ DX - MOVLQSX DX, DX - XORL R9, R9 - CMPQ DX, SI - JGE LBB10_39 - -LBB10_35: - XORL R9, R9 - -LBB10_36: - CMPL R9, $9999 - JG LBB10_39 - MOVBLZX 0(DI)(DX*1), CX - LEAL -48(CX), AX - CMPB AX, $9 - JA LBB10_39 - LEAL 0(R9)(R9*4), AX - LEAL -48(CX)(AX*2), R9 - INCQ DX - CMPQ SI, DX - JNE LBB10_36 - -LBB10_39: - IMULL R8, R9 - ADDL R9, -12(BP) - -LBB10_40: - LEAQ -32(BP), DI - LEAQ -40(BP), SI - LONG $0xfff8cbe8; BYTE $0xff // callq _decimal_to_f64 - LONG $0x4510fbc5; BYTE $0xd8 // vmovsd $-40(%rbp), %xmm0 - ADDQ $48, SP - BYTE $0x5d // popq %rbp - RET - -LCPI11_0: - QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' - QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' - -LCPI11_1: - QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' - QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' - -LCPI11_2: - QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' - QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' - -LCPI11_3: - QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' - QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' - -LCPI11_4: - QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' - -LCPI11_5: - QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' - -LCPI11_6: - QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' - -LCPI11_7: - QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' - -_value: - BYTE $0x55 // pushq %rbp - WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - WORD $0x5741 // pushq %r15 - WORD $0x5641 // pushq %r14 - WORD $0x5541 // pushq %r13 - WORD $0x5441 // pushq %r12 - BYTE $0x53 // pushq %rbx - SUBQ $40, SP - MOVQ CX, R10 - MOVQ DX, R12 - MOVQ SI, R13 - MOVQ DI, R15 - MOVQ DX, -48(BP) - MOVQ DI, -80(BP) - MOVQ SI, -72(BP) - CMPQ DX, SI - JAE LBB11_5 - MOVB 0(R15)(R12*1), AX - CMPB AX, $13 - JE LBB11_5 - CMPB AX, $32 - JE LBB11_5 - LEAL -9(AX), CX - CMPB CX, $1 - JBE LBB11_5 - MOVQ R12, BX - JMP LBB11_48 + JMP LBB10_11 -LBB11_5: - LEAQ 1(R12), BX - CMPQ BX, R13 - JAE LBB11_9 - MOVB 0(R15)(BX*1), AX - CMPB AX, $13 - JE LBB11_9 - CMPB AX, $32 - JE LBB11_9 - LEAL -9(AX), CX - CMPB CX, $1 - JA LBB11_48 - -LBB11_9: - LEAQ 2(R12), BX - CMPQ BX, R13 - JAE LBB11_13 - MOVB 0(R15)(BX*1), AX - CMPB AX, $13 - JE LBB11_13 - CMPB AX, $32 - JE LBB11_13 - LEAL -9(AX), CX - CMPB CX, $1 - JA LBB11_48 - -LBB11_13: - LEAQ 3(R12), BX - CMPQ BX, R13 - JAE LBB11_17 - MOVB 0(R15)(BX*1), AX - CMPB AX, $13 - JE LBB11_17 - CMPB AX, $32 - JE LBB11_17 - LEAL -9(AX), CX - CMPB CX, $1 - JA LBB11_48 - -LBB11_17: - LEAQ 4(R12), CX - CMPQ CX, R13 - JAE LBB11_43 - LEAQ 0(R15)(CX*1), BX - MOVQ R13, AX - SUBQ CX, AX - JE LBB11_26 - MOVL BX, CX - ANDL $31, CX - TESTQ CX, CX - JE LBB11_26 - LEAQ 0(R15)(R12*1), CX - MOVQ R13, AX - SUBQ R12, AX - LEAQ -5(AX), DX - XORL DI, DI - MOVQ $4294977024, R9 - -LBB11_21: - MOVBLSX 4(CX)(DI*1), SI - CMPL SI, $32 - JA LBB11_45 - BTQ SI, R9 - JAE LBB11_45 - LEAQ 1(DI), SI - CMPQ DX, DI - JE LBB11_25 - LEAQ 5(CX)(DI*1), BX - ANDL $31, BX - MOVQ SI, DI - TESTQ BX, BX - JNE LBB11_21 - -LBB11_25: - LEAQ 4(SI)(CX*1), BX - SUBQ SI, AX - ADDQ $-4, AX - -LBB11_26: - CMPQ AX, $32 - JB LBB11_31 - LEAQ -32(AX), CX - MOVQ CX, DX - ANDQ $-32, DX - LEAQ 32(DX)(BX*1), DX - ANDL $31, CX - QUAD $0xfffffdf4056ffec5 // vmovdqu $-524(%rip), %ymm0 /* LCPI11_0(%rip) */ - QUAD $0xfffffe0c0d6ffec5 // vmovdqu $-500(%rip), %ymm1 /* LCPI11_1(%rip) */ - QUAD $0xfffffe24156ffec5 // vmovdqu $-476(%rip), %ymm2 /* LCPI11_2(%rip) */ - QUAD $0xfffffe3c1d6ffec5 // vmovdqu $-452(%rip), %ymm3 /* LCPI11_3(%rip) */ - -LBB11_28: - LONG $0x236ffdc5 // vmovdqa (%rbx), %ymm4 - LONG $0xe874ddc5 // vpcmpeqb %ymm0, %ymm4, %ymm5 - LONG $0xf174ddc5 // vpcmpeqb %ymm1, %ymm4, %ymm6 - LONG $0xeeebd5c5 // vpor %ymm6, %ymm5, %ymm5 - LONG $0xf274ddc5 // vpcmpeqb %ymm2, %ymm4, %ymm6 - LONG $0xe374ddc5 // vpcmpeqb %ymm3, %ymm4, %ymm4 - LONG $0xe6ebddc5 // vpor %ymm6, %ymm4, %ymm4 - LONG $0xe5ebddc5 // vpor %ymm5, %ymm4, %ymm4 - LONG $0xf4d7fdc5 // vpmovmskb %ymm4, %esi - CMPL SI, $-1 - JNE LBB11_44 - ADDQ $32, BX - ADDQ $-32, AX - CMPQ AX, $31 - JA LBB11_28 - MOVQ CX, AX - MOVQ DX, BX +LBB10_19: + MOVLQSX R10, R11 + CMPQ -24(BP), R11 + JBE LBB10_20 -LBB11_31: - WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ AX, $16 - JB LBB11_36 - LEAQ -16(AX), CX - MOVQ CX, DX - ANDQ $-16, DX - LEAQ 16(DX)(BX*1), DX - ANDL $15, CX - QUAD $0xfffffdf7056ffac5 // vmovdqu $-521(%rip), %xmm0 /* LCPI11_4(%rip) */ - QUAD $0xfffffdff0d6ffac5 // vmovdqu $-513(%rip), %xmm1 /* LCPI11_5(%rip) */ - QUAD $0xfffffe07156ffac5 // vmovdqu $-505(%rip), %xmm2 /* LCPI11_6(%rip) */ - QUAD $0xfffffe0f1d6ffac5 // vmovdqu $-497(%rip), %xmm3 /* LCPI11_7(%rip) */ +LBB10_9: + MOVQ -32(BP), DX + MOVB CX, 0(DX)(R11*1) + MOVL -16(BP), R9 + ADDL $1, R9 + MOVL R9, -16(BP) -LBB11_33: - LONG $0x236ff9c5 // vmovdqa (%rbx), %xmm4 - LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 - LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 - LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 - LONG $0xf274d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm6 - LONG $0xe374d9c5 // vpcmpeqb %xmm3, %xmm4, %xmm4 - LONG $0xe6ebd9c5 // vpor %xmm6, %xmm4, %xmm4 - LONG $0xe5ebd9c5 // vpor %xmm5, %xmm4, %xmm4 - LONG $0xf4d7f9c5 // vpmovmskb %xmm4, %esi - CMPW SI, $-1 - JNE LBB11_96 - ADDQ $16, BX - ADDQ $-16, AX - CMPQ AX, $15 - JA LBB11_33 - MOVQ CX, AX - MOVQ DX, BX +LBB10_10: + MOVL R9, R10 + JMP LBB10_11 -LBB11_36: - TESTQ AX, AX - JE LBB11_42 - LEAQ 0(BX)(AX*1), CX - INCQ BX - MOVQ $4294977024, DX +LBB10_21: + ADDL $-1, -12(BP) + XORL R10, R10 + JMP LBB10_11 -LBB11_38: - MOVBLSX -1(BX), SI - CMPL SI, $32 - JA LBB11_101 - BTQ SI, DX - JAE LBB11_101 - DECQ AX - INCQ BX - TESTQ AX, AX - JNE LBB11_38 - MOVQ CX, BX +LBB10_22: + MOVQ SI, AX -LBB11_42: - SUBQ R15, BX - JMP LBB11_46 +LBB10_23: + TESTL R8, R8 + JE LBB10_25 + TESTB $1, R11 + JNE LBB10_26 + JMP LBB10_40 -LBB11_43: - MOVQ CX, -48(BP) - MOVQ CX, R12 - JMP LBB11_52 +LBB10_25: + MOVL R9, -12(BP) + TESTB $1, R11 + JE LBB10_40 -LBB11_44: - WORD $0xf8c5; BYTE $0x77 // vzeroupper - SUBQ R15, BX - NOTL SI - MOVLQSX SI, AX - BSFQ AX, AX - ADDQ AX, BX - JMP LBB11_46 +LBB10_26: + MOVL AX, DX + MOVB 0(DI)(DX*1), CX + ORB $32, CX + CMPB CX, $101 + JNE LBB10_40 + MOVB 1(DI)(DX*1), CX + CMPB CX, $45 + JE LBB10_30 + MOVL $1, R8 + CMPB CX, $43 + JNE LBB10_32 + ADDL $2, AX + JMP LBB10_31 -LBB11_45: - ADDQ DI, CX - MOVQ R15, AX - NOTQ AX - LEAQ 5(AX)(CX*1), BX +LBB10_30: + ADDL $2, AX + MOVL $-1, R8 -LBB11_46: - CMPQ BX, R13 - JAE LBB11_52 - MOVB 0(R15)(BX*1), AX +LBB10_31: + MOVL AX, DX + MOVLQSX DX, AX + XORL DX, DX + CMPQ AX, SI + JL LBB10_33 + JMP LBB10_38 -LBB11_48: - LEAQ 1(BX), R12 - MOVQ R12, -48(BP) - MOVBLSX AX, CX - CMPL CX, $125 - JA LBB11_61 - LEAQ 0(R15)(BX*1), R14 - LONG $0xf8158d48; WORD $0x0002; BYTE $0x00 // leaq $760(%rip), %rdx /* LJTI11_0(%rip) */ - MOVLQSX 0(DX)(CX*4), CX - ADDQ DX, CX - JMP CX +LBB10_32: + ADDQ $1, DX + MOVLQSX DX, AX + XORL DX, DX + CMPQ AX, SI + JGE LBB10_38 -LBB11_50: - MOVQ BX, -48(BP) - TESTB $2, R8 - JNE LBB11_55 - LEAQ -80(BP), DI - LEAQ -48(BP), SI - MOVQ R10, DX - LONG $0x00083ae8; BYTE $0x00 // callq _vnumber - MOVQ -48(BP), BX - JMP LBB11_54 +LBB10_33: + XORL DX, DX -LBB11_52: - MOVQ $1, 0(R10) +LBB10_34: + MOVBLSX 0(DI)(AX*1), CX + CMPL CX, $48 + JL LBB10_38 + CMPB CX, $57 + JG LBB10_38 + CMPL DX, $9999 + JG LBB10_38 + LEAL 0(DX)(DX*4), DX + LEAL 0(CX)(DX*2), DX + ADDL $-48, DX + ADDQ $1, AX + CMPQ SI, AX + JNE LBB10_34 + +LBB10_38: + IMULL R8, DX + ADDL -12(BP), DX -LBB11_53: - MOVQ R12, BX +LBB10_39: + MOVL DX, -12(BP) -LBB11_54: - MOVQ BX, AX - ADDQ $40, SP - BYTE $0x5b // popq %rbx - WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 - WORD $0x5e41 // popq %r14 - WORD $0x5f41 // popq %r15 - BYTE $0x5d // popq %rbp +LBB10_40: + LEAQ -32(BP), DI + LEAQ -40(BP), SI + LONG $0xfff985e8; BYTE $0xff // callq _decimal_to_f64 + LONG $0x4510fbc5; BYTE $0xd8 // vmovsd $-40(%rbp), %xmm0 + ADDQ $48, SP + BYTE $0x5d // popq %rbp RET -LBB11_55: - XORL CX, CX - CMPB AX, $45 - SETEQ CX - ADDQ CX, R14 - SUBQ CX, R13 - JE LBB11_97 - MOVQ R10, -56(BP) - CMPQ BX, R13 - JAE LBB11_58 - MOVB 0(R14), AX +_value: + BYTE $0x55 // pushq %rbp + WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + WORD $0x5741 // pushq %r15 + WORD $0x5641 // pushq %r14 + WORD $0x5541 // pushq %r13 + WORD $0x5441 // pushq %r12 + BYTE $0x53 // pushq %rbx + SUBQ $40, SP + MOVQ R8, BX + MOVQ CX, R14 + MOVQ SI, R12 + MOVQ DI, R15 + MOVQ DX, -48(BP) + MOVQ DI, -80(BP) + MOVQ SI, -72(BP) + LEAQ -80(BP), DI + LEAQ -48(BP), SI + LONG $0x0005cee8; BYTE $0x00 // callq _advance_ns + MOVBLSX AX, AX + CMPL AX, $125 + JA LBB11_11 + LONG $0x030d8d48; WORD $0x0003; BYTE $0x00 // leaq $771(%rip), %rcx /* LJTI11_0(%rip) */ + MOVLQSX 0(CX)(AX*4), AX + ADDQ CX, AX + JMP AX + +LBB11_2: + MOVQ R14, -56(BP) + MOVQ -48(BP), R14 + LEAQ -1(R14), R13 + MOVQ R13, -48(BP) + TESTB $2, BX + JNE LBB11_4 + LEAQ -80(BP), DI + LEAQ -48(BP), SI + MOVQ -56(BP), DX + LONG $0x000b80e8; BYTE $0x00 // callq _vnumber + MOVQ -48(BP), BX + JMP LBB11_49 + +LBB11_4: + XORL AX, AX + CMPB 0(R15)(R13*1), $45 + LEAQ 0(R15)(R13*1), BX + SETEQ AX + ADDQ AX, BX + SUBQ AX, R12 + JE LBB11_44 + CMPQ R13, R12 + JAE LBB11_7 + MOVB 0(BX), AX ADDB $-48, AX - CMPB AX, $10 - JAE LBB11_99 + CMPB AX, $9 + JA LBB11_46 -LBB11_58: - MOVQ R14, DI - MOVQ R13, SI - LONG $0x002367e8; BYTE $0x00 // callq _do_skip_number +LBB11_7: + MOVQ BX, DI + MOVQ R12, SI + LONG $0x0021d2e8; BYTE $0x00 // callq _do_skip_number TESTQ AX, AX - JS LBB11_98 - ADDQ AX, R14 - SUBQ R15, R14 - MOVQ R14, -48(BP) - TESTQ R12, R12 - MOVQ -56(BP), R10 - JLE LBB11_100 - MOVQ $8, 0(R10) - MOVQ BX, 24(R10) - MOVQ R14, BX - JMP LBB11_54 + JS LBB11_45 + ADDQ AX, BX + SUBQ R15, BX + TESTQ R14, R14 + JLE LBB11_48 + MOVQ -56(BP), AX + MOVQ $8, 0(AX) + MOVQ R13, 24(AX) + JMP LBB11_49 + +LBB11_10: + MOVQ $1, 0(R14) + MOVQ -48(BP), BX + JMP LBB11_49 -LBB11_61: - MOVQ $-2, 0(R10) - JMP LBB11_54 +LBB11_11: + MOVQ $-2, 0(R14) + MOVQ -48(BP), BX + ADDQ $-1, BX + JMP LBB11_49 -LBB11_62: - MOVQ R10, R14 +LBB11_12: MOVQ $-1, -64(BP) + MOVQ -48(BP), R15 LEAQ -80(BP), DI LEAQ -64(BP), DX - MOVQ R12, SI - LONG $0x0004d2e8; BYTE $0x00 // callq _advance_string + MOVQ R15, SI + LONG $0x0007fce8; BYTE $0x00 // callq _advance_string MOVQ AX, BX TESTQ AX, AX - JS LBB11_81 + JS LBB11_33 MOVQ BX, -48(BP) - MOVQ R12, 16(R14) + MOVQ R15, 16(R14) MOVQ -64(BP), AX CMPQ AX, BX MOVQ $-1, CX @@ -4583,333 +4633,584 @@ LBB11_62: MOVQ CX, 24(R14) MOVL $7, AX MOVQ AX, 0(R14) - JMP LBB11_54 + JMP LBB11_49 -LBB11_64: - XORL AX, AX - TESTL R8, R8 - SETPL AX +LBB11_14: + TESTL BX, BX + MOVQ $-2, AX MOVL $11, CX - JMP LBB11_80 + JMP LBB11_32 -LBB11_65: - XORL AX, AX - TESTL R8, R8 - SETPL AX +LBB11_15: + TESTL BX, BX + MOVQ $-2, AX MOVL $10, CX - JMP LBB11_80 + JMP LBB11_32 -LBB11_66: - MOVQ $5, 0(R10) - JMP LBB11_53 +LBB11_16: + MOVQ $5, 0(R14) + MOVQ -48(BP), BX + JMP LBB11_49 -LBB11_67: - XORL AX, AX - TESTL R8, R8 - SETPL AX +LBB11_17: + TESTL BX, BX + MOVQ $-2, AX MOVL $12, CX - JMP LBB11_80 + JMP LBB11_32 -LBB11_68: - LEAQ -4(R13), AX - CMPQ BX, AX - JAE LBB11_77 - MOVL 0(R15)(R12*1), CX +LBB11_18: + MOVQ -48(BP), BX + LEAQ -4(R12), CX + MOVQ $-1, AX + CMPQ BX, CX + JA LBB11_25 + MOVL 0(R15)(BX*1), CX CMPL CX, $1702063201 - JNE LBB11_82 - ADDQ $5, BX - MOVQ BX, -48(BP) + JNE LBB11_34 + ADDQ $4, BX MOVL $4, AX - JMP LBB11_94 + JMP LBB11_24 -LBB11_71: - LEAQ -3(R13), AX - CMPQ BX, AX - JAE LBB11_77 - MOVL 0(R14), CX +LBB11_21: + MOVQ -48(BP), BX + LEAQ -3(R12), CX + MOVQ $-1, AX + CMPQ BX, CX + JA LBB11_25 + MOVL -1(R15)(BX*1), CX CMPL CX, $1819047278 - JNE LBB11_87 - ADDQ $4, BX - MOVQ BX, -48(BP) + JNE LBB11_37 + ADDQ $3, BX MOVL $2, AX - JMP LBB11_94 + JMP LBB11_24 -LBB11_74: - LEAQ -3(R13), AX - CMPQ BX, AX - JAE LBB11_77 - MOVL 0(R14), CX +LBB11_27: + MOVQ -48(BP), BX + LEAQ -3(R12), CX + MOVQ $-1, AX + CMPQ BX, CX + JA LBB11_25 + MOVL -1(R15)(BX*1), CX CMPL CX, $1702195828 - JNE LBB11_90 - ADDQ $4, BX - MOVQ BX, -48(BP) + JNE LBB11_41 + ADDQ $3, BX MOVL $3, AX - JMP LBB11_94 -LBB11_77: - MOVQ R13, -48(BP) - MOVQ $-1, AX - JMP LBB11_95 +LBB11_24: + MOVQ BX, R12 -LBB11_78: - MOVQ $6, 0(R10) - JMP LBB11_53 +LBB11_25: + MOVQ R12, -48(BP) + MOVQ R12, BX -LBB11_79: - XORL AX, AX - TESTL R8, R8 - SETPL AX +LBB11_26: + MOVQ AX, 0(R14) + JMP LBB11_49 + +LBB11_30: + MOVQ $6, 0(R14) + MOVQ -48(BP), BX + JMP LBB11_49 + +LBB11_31: + TESTL BX, BX + MOVQ $-2, AX MOVL $13, CX -LBB11_80: - MOVQ $-2, DX - LONG $0xd1480f48 // cmovsq %rcx, %rdx - MOVQ DX, 0(R10) - SUBQ AX, R12 - JMP LBB11_53 +LBB11_32: + LONG $0xc8490f48 // cmovnsq %rax, %rcx + MOVQ CX, 0(R14) + SARL $31, BX + NOTL BX + MOVLQSX BX, BX + ADDQ -48(BP), BX + JMP LBB11_49 -LBB11_81: - MOVQ R13, -48(BP) +LBB11_33: + MOVQ R12, -48(BP) MOVQ BX, 0(R14) - MOVQ R13, BX - JMP LBB11_54 + MOVQ R12, BX + JMP LBB11_49 -LBB11_82: +LBB11_34: MOVQ $-2, AX CMPB CX, $97 - JNE LBB11_86 + JNE LBB11_26 MOVL $1702063201, CX -LBB11_84: +LBB11_36: SHRL $8, CX - MOVBLSX 1(R15)(R12*1), DX - INCQ R12 + MOVBLSX 1(R15)(BX*1), DX + ADDQ $1, BX MOVBLZX CX, SI CMPL SI, DX - JE LBB11_84 - MOVQ R12, -48(BP) - -LBB11_86: - MOVQ R12, R13 - JMP LBB11_95 + JE LBB11_36 + JMP LBB11_40 -LBB11_87: - MOVQ BX, -48(BP) +LBB11_37: + ADDQ $-1, BX MOVQ $-2, AX CMPB CX, $110 - JNE LBB11_94 + JNE LBB11_26 MOVL $1819047278, CX -LBB11_89: +LBB11_39: + SHRL $8, CX + MOVBLSX 1(R15)(BX*1), DX + ADDQ $1, BX + MOVBLZX CX, SI + CMPL SI, DX + JE LBB11_39 + JMP LBB11_40 + +LBB11_41: + ADDQ $-1, BX + MOVQ $-2, AX + CMPB CX, $116 + JNE LBB11_26 + MOVL $1702195828, CX + +LBB11_43: SHRL $8, CX MOVBLSX 1(R15)(BX*1), DX - INCQ BX + ADDQ $1, BX MOVBLZX CX, SI CMPL SI, DX - JE LBB11_89 - JMP LBB11_93 + JE LBB11_43 + +LBB11_40: + MOVQ BX, -48(BP) + MOVQ AX, 0(R14) + JMP LBB11_49 + +LBB11_44: + MOVQ $-1, R13 + JMP LBB11_47 + +LBB11_45: + NOTQ AX + ADDQ AX, BX + +LBB11_46: + MOVQ $-2, R13 + +LBB11_47: + SUBQ R15, BX + MOVQ BX, -48(BP) + +LBB11_48: + MOVQ -56(BP), AX + MOVQ R13, 0(AX) + +LBB11_49: + MOVQ BX, AX + ADDQ $40, SP + BYTE $0x5b // popq %rbx + WORD $0x5c41 // popq %r12 + WORD $0x5d41 // popq %r13 + WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 + BYTE $0x5d // popq %rbp + RET + +// .set L11_0_set_10, LBB11_10-LJTI11_0 +// .set L11_0_set_11, LBB11_11-LJTI11_0 +// .set L11_0_set_12, LBB11_12-LJTI11_0 +// .set L11_0_set_14, LBB11_14-LJTI11_0 +// .set L11_0_set_2, LBB11_2-LJTI11_0 +// .set L11_0_set_15, LBB11_15-LJTI11_0 +// .set L11_0_set_16, LBB11_16-LJTI11_0 +// .set L11_0_set_17, LBB11_17-LJTI11_0 +// .set L11_0_set_18, LBB11_18-LJTI11_0 +// .set L11_0_set_21, LBB11_21-LJTI11_0 +// .set L11_0_set_27, LBB11_27-LJTI11_0 +// .set L11_0_set_30, LBB11_30-LJTI11_0 +// .set L11_0_set_31, LBB11_31-LJTI11_0 +LJTI11_0: + LONG $0xfffffd97 // .long L11_0_set_10 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffdbb // .long L11_0_set_12 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffe0e // .long L11_0_set_14 + LONG $0xfffffd06 // .long L11_0_set_2 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffd06 // .long L11_0_set_2 + LONG $0xfffffd06 // .long L11_0_set_2 + LONG $0xfffffd06 // .long L11_0_set_2 + LONG $0xfffffd06 // .long L11_0_set_2 + LONG $0xfffffd06 // .long L11_0_set_2 + LONG $0xfffffd06 // .long L11_0_set_2 + LONG $0xfffffd06 // .long L11_0_set_2 + LONG $0xfffffd06 // .long L11_0_set_2 + LONG $0xfffffd06 // .long L11_0_set_2 + LONG $0xfffffd06 // .long L11_0_set_2 + LONG $0xfffffe21 // .long L11_0_set_15 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffe34 // .long L11_0_set_16 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffe44 // .long L11_0_set_17 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffe57 // .long L11_0_set_18 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffe87 // .long L11_0_set_21 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffeb8 // .long L11_0_set_27 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xfffffef9 // .long L11_0_set_30 + LONG $0xfffffda7 // .long L11_0_set_11 + LONG $0xffffff09 // .long L11_0_set_31 + +LCPI12_0: + QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' + QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' + +LCPI12_1: + QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' + QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' + +LCPI12_2: + QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' + QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' + +LCPI12_3: + QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' + QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' + +LCPI12_4: + QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' + +LCPI12_5: + QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' + +LCPI12_6: + QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' + +LCPI12_7: + QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' + +_advance_ns: + BYTE $0x55 // pushq %rbp + WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + BYTE $0x53 // pushq %rbx + MOVQ 0(SI), R11 + MOVQ 0(DI), R9 + MOVQ 8(DI), R8 + CMPQ R11, R8 + JAE LBB12_4 + MOVB 0(R9)(R11*1), AX + CMPB AX, $13 + JE LBB12_4 + CMPB AX, $32 + JE LBB12_4 + ADDB $-9, AX + CMPB AX, $1 + JA LBB12_48 + +LBB12_4: + LEAQ 1(R11), AX + CMPQ AX, R8 + JAE LBB12_9 + MOVB 0(R9)(AX*1), CX + CMPB CX, $13 + JE LBB12_9 + CMPB CX, $32 + JE LBB12_9 + ADDB $-9, CX + CMPB CX, $1 + JA LBB12_8 + +LBB12_9: + LEAQ 2(R11), AX + CMPQ AX, R8 + JAE LBB12_14 + MOVB 0(R9)(AX*1), CX + CMPB CX, $13 + JE LBB12_14 + CMPB CX, $32 + JE LBB12_14 + ADDB $-9, CX + CMPB CX, $1 + JA LBB12_8 + +LBB12_14: + LEAQ 3(R11), AX + CMPQ AX, R8 + JAE LBB12_19 + MOVB 0(R9)(AX*1), CX + CMPB CX, $13 + JE LBB12_19 + CMPB CX, $32 + JE LBB12_19 + ADDB $-9, CX + CMPB CX, $1 + JBE LBB12_19 + +LBB12_8: + MOVQ AX, R11 + JMP LBB12_48 + +LBB12_19: + ADDQ $4, R11 + CMPQ R8, R11 + JBE LBB12_44 + LEAQ 0(R9)(R11*1), BX + MOVQ R8, CX + SUBQ R11, CX + JE LBB12_28 + MOVL BX, AX + ANDL $31, AX + TESTQ AX, AX + JE LBB12_28 + MOVL $5, DX + SUBQ R8, DX + MOVQ $4294977024, CX + +LBB12_23: + MOVBLSX 0(R9)(R11*1), AX + CMPL AX, $32 + JA LBB12_46 + BTQ AX, CX + JAE LBB12_46 + LEAQ 0(DX)(R11*1), BX + LEAQ 1(R11), AX + CMPQ BX, $4 + JE LBB12_27 + LEAL 0(R9)(R11*1), BX + ADDL $1, BX + ANDL $31, BX + MOVQ AX, R11 + TESTQ BX, BX + JNE LBB12_23 + +LBB12_27: + LEAQ 0(R9)(AX*1), BX + MOVQ R8, CX + SUBQ AX, CX + +LBB12_28: + CMPQ CX, $32 + JB LBB12_32 + MOVQ R9, AX + SUBQ BX, AX + QUAD $0xfffffe26056ffec5 // vmovdqu $-474(%rip), %ymm0 /* LCPI12_0(%rip) */ + QUAD $0xfffffe3e0d6ffec5 // vmovdqu $-450(%rip), %ymm1 /* LCPI12_1(%rip) */ + QUAD $0xfffffe56156ffec5 // vmovdqu $-426(%rip), %ymm2 /* LCPI12_2(%rip) */ + QUAD $0xfffffe6e1d6ffec5 // vmovdqu $-402(%rip), %ymm3 /* LCPI12_3(%rip) */ + +LBB12_30: + LONG $0x236ffdc5 // vmovdqa (%rbx), %ymm4 + LONG $0xe874ddc5 // vpcmpeqb %ymm0, %ymm4, %ymm5 + LONG $0xf174ddc5 // vpcmpeqb %ymm1, %ymm4, %ymm6 + LONG $0xeeebd5c5 // vpor %ymm6, %ymm5, %ymm5 + LONG $0xf274ddc5 // vpcmpeqb %ymm2, %ymm4, %ymm6 + LONG $0xe374ddc5 // vpcmpeqb %ymm3, %ymm4, %ymm4 + LONG $0xe6ebddc5 // vpor %ymm6, %ymm4, %ymm4 + LONG $0xe5ebddc5 // vpor %ymm5, %ymm4, %ymm4 + LONG $0xd4d7fdc5 // vpmovmskb %ymm4, %edx + CMPL DX, $-1 + JNE LBB12_45 + ADDQ $32, BX + ADDQ $-32, CX + ADDQ $-32, AX + CMPQ CX, $31 + JA LBB12_30 -LBB11_90: - MOVQ BX, -48(BP) - MOVQ $-2, AX - CMPB CX, $116 - JNE LBB11_94 - MOVL $1702195828, CX +LBB12_32: + WORD $0xf8c5; BYTE $0x77 // vzeroupper + CMPQ CX, $16 + JB LBB12_36 + MOVQ R9, R10 + SUBQ BX, R10 + QUAD $0xfffffe38056ffac5 // vmovdqu $-456(%rip), %xmm0 /* LCPI12_4(%rip) */ + QUAD $0xfffffe400d6ffac5 // vmovdqu $-448(%rip), %xmm1 /* LCPI12_5(%rip) */ + QUAD $0xfffffe48156ffac5 // vmovdqu $-440(%rip), %xmm2 /* LCPI12_6(%rip) */ + QUAD $0xfffffe501d6ffac5 // vmovdqu $-432(%rip), %xmm3 /* LCPI12_7(%rip) */ -LBB11_92: - SHRL $8, CX - MOVBLSX 1(R15)(BX*1), DX - INCQ BX - MOVBLZX CX, SI - CMPL SI, DX - JE LBB11_92 +LBB12_34: + LONG $0x236ff9c5 // vmovdqa (%rbx), %xmm4 + LONG $0xe874d9c5 // vpcmpeqb %xmm0, %xmm4, %xmm5 + LONG $0xf174d9c5 // vpcmpeqb %xmm1, %xmm4, %xmm6 + LONG $0xeeebd1c5 // vpor %xmm6, %xmm5, %xmm5 + LONG $0xf274d9c5 // vpcmpeqb %xmm2, %xmm4, %xmm6 + LONG $0xe374d9c5 // vpcmpeqb %xmm3, %xmm4, %xmm4 + LONG $0xe6ebd9c5 // vpor %xmm6, %xmm4, %xmm4 + LONG $0xe5ebd9c5 // vpor %xmm5, %xmm4, %xmm4 + LONG $0xc4d7f9c5 // vpmovmskb %xmm4, %eax + CMPW AX, $-1 + JNE LBB12_49 + ADDQ $16, BX + ADDQ $-16, CX + ADDQ $-16, R10 + CMPQ CX, $15 + JA LBB12_34 -LBB11_93: - MOVQ BX, -48(BP) +LBB12_36: + TESTQ CX, CX + JE LBB12_42 + LEAQ 0(BX)(CX*1), R10 + XORL AX, AX + MOVQ $4294977024, R11 + +LBB12_38: + MOVBLSX 0(BX)(AX*1), DX + CMPL DX, $32 + JA LBB12_51 + BTQ DX, R11 + JAE LBB12_51 + ADDQ $1, AX + CMPQ CX, AX + JNE LBB12_38 + MOVQ R10, BX -LBB11_94: - MOVQ BX, R13 +LBB12_42: + SUBQ R9, BX + MOVQ BX, R11 + CMPQ R11, R8 + JB LBB12_47 + JMP LBB12_50 -LBB11_95: - MOVQ AX, 0(R10) - MOVQ R13, BX - JMP LBB11_54 +LBB12_44: + MOVQ R11, 0(SI) + JMP LBB12_50 -LBB11_96: - MOVWLZX SI, AX - SUBQ R15, BX - NOTL AX - BSFL AX, AX - ADDQ AX, BX - JMP LBB11_46 +LBB12_45: + WORD $0xf8c5; BYTE $0x77 // vzeroupper + NOTL DX + BSFL DX, R11 + SUBQ AX, R11 -LBB11_97: - SUBQ R15, R14 - MOVQ R14, -48(BP) - MOVQ $-1, BX - JMP LBB11_100 +LBB12_46: + CMPQ R11, R8 + JAE LBB12_50 -LBB11_98: - NOTQ AX - ADDQ AX, R14 +LBB12_47: + MOVQ 0(DI), R9 -LBB11_99: - SUBQ R15, R14 - MOVQ R14, -48(BP) - MOVQ $-2, BX - MOVQ -56(BP), R10 +LBB12_48: + LEAQ 1(R11), AX + MOVQ AX, 0(SI) + MOVB 0(R9)(R11*1), AX + MOVBLSX AX, AX + BYTE $0x5b // popq %rbx + BYTE $0x5d // popq %rbp + RET -LBB11_100: - MOVQ BX, 0(R10) - MOVQ R14, BX - JMP LBB11_54 +LBB12_49: + MOVWLZX AX, AX + NOTL AX + BSFL AX, R11 + SUBQ R10, R11 + CMPQ R11, R8 + JB LBB12_47 -LBB11_101: - MOVQ R15, AX - NOTQ AX +LBB12_50: + XORL AX, AX + MOVBLSX AX, AX + BYTE $0x5b // popq %rbx + BYTE $0x5d // popq %rbp + RET + +LBB12_51: + SUBQ R9, BX ADDQ AX, BX - JMP LBB11_46 - -// .set L11_0_set_52, LBB11_52-LJTI11_0 -// .set L11_0_set_61, LBB11_61-LJTI11_0 -// .set L11_0_set_62, LBB11_62-LJTI11_0 -// .set L11_0_set_64, LBB11_64-LJTI11_0 -// .set L11_0_set_50, LBB11_50-LJTI11_0 -// .set L11_0_set_65, LBB11_65-LJTI11_0 -// .set L11_0_set_66, LBB11_66-LJTI11_0 -// .set L11_0_set_67, LBB11_67-LJTI11_0 -// .set L11_0_set_68, LBB11_68-LJTI11_0 -// .set L11_0_set_71, LBB11_71-LJTI11_0 -// .set L11_0_set_74, LBB11_74-LJTI11_0 -// .set L11_0_set_78, LBB11_78-LJTI11_0 -// .set L11_0_set_79, LBB11_79-LJTI11_0 -LJTI11_0: - LONG $0xfffffd31 // .long L11_0_set_52 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdba // .long L11_0_set_62 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffe0c // .long L11_0_set_64 - LONG $0xfffffd11 // .long L11_0_set_50 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffd11 // .long L11_0_set_50 - LONG $0xfffffd11 // .long L11_0_set_50 - LONG $0xfffffd11 // .long L11_0_set_50 - LONG $0xfffffd11 // .long L11_0_set_50 - LONG $0xfffffd11 // .long L11_0_set_50 - LONG $0xfffffd11 // .long L11_0_set_50 - LONG $0xfffffd11 // .long L11_0_set_50 - LONG $0xfffffd11 // .long L11_0_set_50 - LONG $0xfffffd11 // .long L11_0_set_50 - LONG $0xfffffd11 // .long L11_0_set_50 - LONG $0xfffffe1e // .long L11_0_set_65 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffe30 // .long L11_0_set_66 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffe3c // .long L11_0_set_67 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffe4e // .long L11_0_set_68 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffe79 // .long L11_0_set_71 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffea3 // .long L11_0_set_74 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffedd // .long L11_0_set_78 - LONG $0xfffffdb1 // .long L11_0_set_61 - LONG $0xfffffee9 // .long L11_0_set_79 + MOVQ BX, R11 + CMPQ R11, R8 + JB LBB12_47 + JMP LBB12_50 _vstring: BYTE $0x55 // pushq %rbp @@ -4928,7 +5229,7 @@ _vstring: MOVQ R12, SI LONG $0x000080e8; BYTE $0x00 // callq _advance_string TESTQ AX, AX - JS LBB12_1 + JS LBB13_1 MOVQ AX, 0(BX) MOVQ R12, 16(R14) MOVQ -40(BP), CX @@ -4937,13 +5238,13 @@ _vstring: LONG $0xc14c0f48 // cmovlq %rcx, %rax MOVQ AX, 24(R14) MOVL $7, AX - JMP LBB12_3 + JMP LBB13_3 -LBB12_1: +LBB13_1: MOVQ 8(R15), CX MOVQ CX, 0(BX) -LBB12_3: +LBB13_3: MOVQ AX, 0(R14) ADDQ $16, SP BYTE $0x5b // popq %rbx @@ -4953,11 +5254,11 @@ LBB12_3: BYTE $0x5d // popq %rbp RET -LCPI13_0: +LCPI14_0: QUAD $0x2222222222222222; QUAD $0x2222222222222222 // .space 16, '""""""""""""""""' QUAD $0x2222222222222222; QUAD $0x2222222222222222 // .space 16, '""""""""""""""""' -LCPI13_1: +LCPI14_1: QUAD $0x5c5c5c5c5c5c5c5c; QUAD $0x5c5c5c5c5c5c5c5c // .space 16, '\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' QUAD $0x5c5c5c5c5c5c5c5c; QUAD $0x5c5c5c5c5c5c5c5c // .space 16, '\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' @@ -4969,98 +5270,85 @@ _advance_string: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - MOVQ 8(DI), R12 - SUBQ SI, R12 - JE LBB13_18 - MOVQ 0(DI), R8 - ADDQ R8, SI + MOVQ 8(DI), R15 + SUBQ SI, R15 + JE LBB14_17 + MOVQ 0(DI), R9 MOVQ $-1, 0(DX) - CMPQ R12, $64 - JB LBB13_19 - MOVL R12, R9 - ANDL $63, R9 - MOVQ $-1, R14 - XORL R15, R15 - QUAD $0xffffff76056ffec5 // vmovdqu $-138(%rip), %ymm0 /* LCPI13_0(%rip) */ - QUAD $0xffffff8e0d6ffec5 // vmovdqu $-114(%rip), %ymm1 /* LCPI13_1(%rip) */ + CMPQ R15, $64 + JB LBB14_18 + MOVQ SI, DI + NOTQ DI + MOVQ $-1, R8 + XORL R14, R14 + QUAD $0xffffff7a056ffec5 // vmovdqu $-134(%rip), %ymm0 /* LCPI14_0(%rip) */ + QUAD $0xffffff920d6ffec5 // vmovdqu $-110(%rip), %ymm1 /* LCPI14_1(%rip) */ MOVQ $-6148914691236517206, R10 MOVQ $6148914691236517205, R11 -LBB13_3: - LONG $0x166ffec5 // vmovdqu (%rsi), %ymm2 - LONG $0x5e6ffec5; BYTE $0x20 // vmovdqu $32(%rsi), %ymm3 - LONG $0xe074edc5 // vpcmpeqb %ymm0, %ymm2, %ymm4 - LONG $0xc4d7fdc5 // vpmovmskb %ymm4, %eax - LONG $0xe074e5c5 // vpcmpeqb %ymm0, %ymm3, %ymm4 - LONG $0xdcd7fdc5 // vpmovmskb %ymm4, %ebx - LONG $0xd174edc5 // vpcmpeqb %ymm1, %ymm2, %ymm2 - LONG $0xfad7fdc5 // vpmovmskb %ymm2, %edi - LONG $0xd174e5c5 // vpcmpeqb %ymm1, %ymm3, %ymm2 - LONG $0xcad7fdc5 // vpmovmskb %ymm2, %ecx - SHLQ $32, BX - SHLQ $32, CX - ORQ CX, DI - JE LBB13_5 - CMPQ R14, $-1 - JE LBB13_8 - -LBB13_5: - ORQ AX, BX - MOVQ DI, AX - ORQ R15, AX - JNE LBB13_9 - -LBB13_6: - TESTQ BX, BX - JNE LBB13_10 +LBB14_3: + LONG $0x6f7ec1c4; WORD $0x3114 // vmovdqu (%r9,%rsi), %ymm2 + LONG $0x6f7ec1c4; WORD $0x315c; BYTE $0x20 // vmovdqu $32(%r9,%rsi), %ymm3 + LONG $0xe074edc5 // vpcmpeqb %ymm0, %ymm2, %ymm4 + LONG $0xe4d77dc5 // vpmovmskb %ymm4, %r12d + LONG $0xe074e5c5 // vpcmpeqb %ymm0, %ymm3, %ymm4 + LONG $0xccd7fdc5 // vpmovmskb %ymm4, %ecx + LONG $0xd174edc5 // vpcmpeqb %ymm1, %ymm2, %ymm2 + LONG $0xc2d7fdc5 // vpmovmskb %ymm2, %eax + LONG $0xd174e5c5 // vpcmpeqb %ymm1, %ymm3, %ymm2 + LONG $0xdad7fdc5 // vpmovmskb %ymm2, %ebx + SHLQ $32, CX + ORQ CX, R12 + SHLQ $32, BX + ORQ BX, AX + JNE LBB14_7 + TESTQ R14, R14 + JNE LBB14_9 + XORL R14, R14 + TESTQ R12, R12 + JNE LBB14_10 -LBB13_7: +LBB14_6: + ADDQ $-64, R15 + ADDQ $-64, DI ADDQ $64, SI - ADDQ $-64, R12 - CMPQ R12, $63 - JA LBB13_3 - JMP LBB13_13 + CMPQ R15, $63 + JA LBB14_3 + JMP LBB14_12 -LBB13_8: - MOVQ SI, CX - SUBQ R8, CX - BSFQ DI, R14 - ADDQ CX, R14 - MOVQ R14, 0(DX) - ORQ AX, BX - MOVQ DI, AX - ORQ R15, AX - JE LBB13_6 +LBB14_7: + CMPQ R8, $-1 + JNE LBB14_9 + BSFQ AX, R8 + ADDQ SI, R8 + MOVQ R8, 0(DX) -LBB13_9: - MOVQ R15, AX - NOTQ AX - ANDQ DI, AX - LEAQ 0(AX)(AX*1), R13 - ORQ R15, R13 - MOVQ R13, CX - NOTQ CX - ANDQ DI, CX - ANDQ R10, CX - XORL R15, R15 - ADDQ AX, CX - SETCS R15 - ADDQ CX, CX - XORQ R11, CX - ANDQ R13, CX +LBB14_9: + MOVQ R14, CX NOTQ CX - ANDQ CX, BX - TESTQ BX, BX - JE LBB13_7 - -LBB13_10: - BSFQ BX, AX + ANDQ AX, CX + LEAQ 0(CX)(CX*1), R13 + ORQ R14, R13 + MOVQ R13, BX + NOTQ BX + ANDQ AX, BX + ANDQ R10, BX + XORL R14, R14 + ADDQ CX, BX + SETCS R14 + ADDQ BX, BX + XORQ R11, BX + ANDQ R13, BX + NOTQ BX + ANDQ BX, R12 + TESTQ R12, R12 + JE LBB14_6 -LBB13_11: - SUBQ R8, SI - LEAQ 1(SI)(AX*1), AX +LBB14_10: + BSFQ R12, AX + SUBQ DI, AX -LBB13_12: +LBB14_11: BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -5070,154 +5358,172 @@ LBB13_12: WORD $0xf8c5; BYTE $0x77 // vzeroupper RET -LBB13_13: - MOVQ R9, R12 - CMPQ R12, $32 - JB LBB13_24 +LBB14_12: + ADDQ R9, SI + CMPQ R15, $32 + JB LBB14_23 -LBB13_14: +LBB14_13: LONG $0x066ffec5 // vmovdqu (%rsi), %ymm0 - QUAD $0xfffffe760d74fdc5 // vpcmpeqb $-394(%rip), %ymm0, %ymm1 /* LCPI13_0(%rip) */ - LONG $0xc9d77dc5 // vpmovmskb %ymm1, %r9d - QUAD $0xfffffe8a0574fdc5 // vpcmpeqb $-374(%rip), %ymm0, %ymm0 /* LCPI13_1(%rip) */ - LONG $0xf8d7fdc5 // vpmovmskb %ymm0, %edi - TESTL DI, DI - JNE LBB13_20 - TESTQ R15, R15 - JNE LBB13_22 - XORL R15, R15 - TESTQ R9, R9 - JE LBB13_23 + QUAD $0xfffffe880d74fdc5 // vpcmpeqb $-376(%rip), %ymm0, %ymm1 /* LCPI14_0(%rip) */ + LONG $0xf9d7fdc5 // vpmovmskb %ymm1, %edi + QUAD $0xfffffe9c0574fdc5 // vpcmpeqb $-356(%rip), %ymm0, %ymm0 /* LCPI14_1(%rip) */ + LONG $0xc0d7fdc5 // vpmovmskb %ymm0, %eax + TESTL AX, AX + JNE LBB14_19 + TESTQ R14, R14 + JNE LBB14_21 + XORL R14, R14 + TESTQ DI, DI + JE LBB14_22 -LBB13_17: - BSFQ R9, AX - JMP LBB13_11 +LBB14_16: + BSFQ DI, AX + SUBQ R9, SI + ADDQ SI, AX + ADDQ $1, AX + JMP LBB14_11 -LBB13_19: - MOVQ $-1, R14 - XORL R15, R15 - CMPQ R12, $32 - JAE LBB13_14 - JMP LBB13_24 +LBB14_18: + ADDQ R9, SI + MOVQ $-1, R8 + XORL R14, R14 + CMPQ R15, $32 + JAE LBB14_13 + JMP LBB14_23 -LBB13_20: - CMPQ R14, $-1 - JNE LBB13_22 - MOVQ SI, AX - SUBQ R8, AX - BSFQ DI, R14 - ADDQ AX, R14 - MOVQ R14, 0(DX) - -LBB13_22: - MOVL R15, AX - NOTL AX - ANDL DI, AX - LEAL 0(AX)(AX*1), BX - ORL R15, BX - MOVL BX, CX - NOTL CX - ANDL DI, CX - ANDL $-1431655766, CX - XORL R15, R15 - ADDL AX, CX - SETCS R15 - ADDL CX, CX - XORL $1431655765, CX - ANDL BX, CX +LBB14_19: + CMPQ R8, $-1 + JNE LBB14_21 + MOVQ SI, CX + SUBQ R9, CX + BSFQ AX, R8 + ADDQ CX, R8 + MOVQ R8, 0(DX) + +LBB14_21: + MOVL R14, CX NOTL CX - ANDL CX, R9 - TESTQ R9, R9 - JNE LBB13_17 + ANDL AX, CX + LEAL 0(CX)(CX*1), BX + LEAL 0(R14)(CX*2), R10 + NOTL BX + ANDL AX, BX + ANDL $-1431655766, BX + XORL R14, R14 + ADDL CX, BX + SETCS R14 + ADDL BX, BX + XORL $1431655765, BX + ANDL R10, BX + NOTL BX + ANDL BX, DI + TESTQ DI, DI + JNE LBB14_16 -LBB13_23: +LBB14_22: ADDQ $32, SI - ADDQ $-32, R12 + ADDQ $-32, R15 -LBB13_24: +LBB14_23: + TESTQ R14, R14 + JNE LBB14_37 TESTQ R15, R15 - JNE LBB13_35 - MOVQ $-1, AX - TESTQ R12, R12 - JE LBB13_12 + JE LBB14_36 + +LBB14_25: + MOVQ R9, R10 + NOTQ R10 + ADDQ $1, R10 -LBB13_26: - MOVQ R8, R9 - NOTQ R9 +LBB14_26: + XORL AX, AX -LBB13_27: - LEAQ 1(SI), DI - MOVBLZX 0(SI), BX +LBB14_27: + MOVQ AX, DI + MOVBLZX 0(SI)(AX*1), BX CMPB BX, $34 - JE LBB13_34 - LEAQ -1(R12), R10 + JE LBB14_35 CMPB BX, $92 - JE LBB13_30 - MOVQ R10, R12 - MOVQ DI, SI - TESTQ R10, R10 - JNE LBB13_27 - JMP LBB13_12 + JE LBB14_30 + LEAQ 1(DI), AX + CMPQ R15, AX + JNE LBB14_27 + JMP LBB14_34 -LBB13_30: - TESTQ R10, R10 - JE LBB13_12 - CMPQ R14, $-1 - JNE LBB13_33 - ADDQ R9, DI - MOVQ DI, 0(DX) - MOVQ DI, R14 - -LBB13_33: - ADDQ $2, SI - ADDQ $-2, R12 - MOVQ R12, R10 - TESTQ R10, R10 - JNE LBB13_27 - JMP LBB13_12 +LBB14_30: + LEAQ -1(R15), CX + MOVQ $-1, AX + CMPQ CX, DI + JE LBB14_11 + CMPQ R8, $-1 + JNE LBB14_33 + LEAQ 0(R10)(SI*1), R8 + ADDQ DI, R8 + MOVQ R8, 0(DX) -LBB13_34: - SUBQ R8, DI - MOVQ DI, AX - JMP LBB13_12 +LBB14_33: + ADDQ DI, SI + ADDQ $2, SI + MOVQ R15, CX + SUBQ DI, CX + ADDQ $-2, CX + ADDQ $-2, R15 + CMPQ R15, DI + MOVQ CX, R15 + JNE LBB14_26 + JMP LBB14_11 -LBB13_35: - TESTQ R12, R12 - JE LBB13_18 - CMPQ R14, $-1 - JNE LBB13_38 - MOVQ R8, R14 - NOTQ R14 - ADDQ SI, R14 - MOVQ R14, 0(DX) - -LBB13_38: - INCQ SI - DECQ R12 - MOVQ $-1, AX - TESTQ R12, R12 - JNE LBB13_26 - JMP LBB13_12 +LBB14_34: + MOVQ $-1, AX + CMPB BX, $34 + JNE LBB14_11 + +LBB14_35: + ADDQ DI, SI + ADDQ $1, SI + +LBB14_36: + SUBQ R9, SI + MOVQ SI, AX + JMP LBB14_11 + +LBB14_37: + TESTQ R15, R15 + JE LBB14_17 + CMPQ R8, $-1 + JNE LBB14_40 + MOVQ R9, R8 + NOTQ R8 + ADDQ SI, R8 + MOVQ R8, 0(DX) + +LBB14_40: + ADDQ $1, SI + ADDQ $-1, R15 + TESTQ R15, R15 + JNE LBB14_25 + JMP LBB14_36 -LBB13_18: +LBB14_17: MOVQ $-1, AX - JMP LBB13_12 + JMP LBB14_11 -LCPI14_0: +LCPI15_0: LONG $0x43300000 // .long 1127219200 LONG $0x45300000 // .long 1160773632 LONG $0x00000000 // .long 0 LONG $0x00000000 // .long 0 -LCPI14_1: - QUAD $0x4330000000000000 // .quad 4841369599423283200 - QUAD $0x4530000000000000 // .quad 4985484787499139072 +LCPI15_1: + QUAD $0x4330000000000000 // .quad 0x4330000000000000 + QUAD $0x4530000000000000 // .quad 0x4530000000000000 -LCPI14_2: - QUAD $0x430c6bf526340000 // .quad 4831355200913801216 +LCPI15_2: + QUAD $0x430c6bf526340000 // .quad 0x430c6bf526340000 -LCPI14_3: - QUAD $0xc30c6bf526340000 // .quad -4392016835940974592 +LCPI15_3: + QUAD $0xc30c6bf526340000 // .quad 0xc30c6bf526340000 _vnumber: BYTE $0x55 // pushq %rbp @@ -5230,7 +5536,6 @@ _vnumber: SUBQ $56, SP MOVQ DX, BX MOVQ SI, R14 - MOVQ $0, -48(BP) MOVQ 0(SI), AX MOVQ 0(DI), R15 MOVQ 8(DI), R13 @@ -5242,372 +5547,261 @@ _vnumber: MOVQ 0(SI), CX MOVQ CX, 24(DX) CMPQ AX, R13 - JAE LBB14_52 + JAE LBB15_52 MOVB 0(R15)(AX*1), DI - MOVL $1, R9 + MOVL $1, DX CMPB DI, $45 - JNE LBB14_4 - INCQ AX + JNE LBB15_4 + ADDQ $1, AX CMPQ AX, R13 - JAE LBB14_52 + JAE LBB15_52 MOVB 0(R15)(AX*1), DI - MOVL $-1, R9 + MOVL $-1, DX -LBB14_4: +LBB15_4: LEAL -48(DI), CX CMPB CX, $10 - JB LBB14_6 + JB LBB15_6 -LBB14_5: +LBB15_5: MOVQ AX, 0(R14) MOVQ $-2, 0(BX) - JMP LBB14_53 + JMP LBB15_53 -LBB14_6: +LBB15_6: CMPB DI, $48 - JNE LBB14_10 - LEAQ 1(AX), SI + JNE LBB15_10 + LEAQ 1(AX), R8 CMPQ AX, R13 - JAE LBB14_19 - MOVB 0(R15)(SI*1), DX - ADDB $-46, DX - CMPB DX, $55 - JA LBB14_19 - MOVBLZX DX, R8 - MOVQ $36028797027352577, DX - BTQ R8, DX - JAE LBB14_19 + JAE LBB15_22 + MOVB 0(R15)(R8*1), CX + ADDB $-46, CX + CMPB CX, $55 + JA LBB15_22 + MOVBLZX CX, CX + MOVQ $36028797027352577, SI + BTQ CX, SI + JAE LBB15_22 -LBB14_10: +LBB15_10: + MOVL DX, -44(BP) + MOVB $1, CX + MOVL CX, -56(BP) CMPQ AX, R13 - JAE LBB14_18 - CMPB CX, $9 - JA LBB14_20 - LEAQ -1(R13), R8 + JAE LBB15_21 + MOVL $4294967248, R9 + ADDQ $1, AX XORL CX, CX - XORL SI, SI + XORL R8, R8 XORL R12, R12 -LBB14_13: - CMPL SI, $18 - JG LBB14_15 - MOVBQSX DI, DI +LBB15_12: + CMPL R8, $18 + JG LBB15_14 LEAQ 0(R12)(R12*4), DX - LEAQ -48(DI)(DX*2), R12 - INCL SI - JMP LBB14_16 + MOVBLZX DI, DI + ADDL R9, DI + LEAQ 0(DI)(DX*2), R12 + ADDL $1, R8 + JMP LBB15_15 -LBB14_15: - INCL CX +LBB15_14: + ADDL $1, CX -LBB14_16: - CMPQ R8, AX - JE LBB14_24 - MOVBLZX 1(R15)(AX*1), DI - INCQ AX +LBB15_15: + CMPQ R13, AX + JE LBB15_23 + MOVBLZX 0(R15)(AX*1), DI LEAL -48(DI), DX + ADDQ $1, AX + CMPB DX, $10 + JB LBB15_12 + CMPB DI, $46 + JNE LBB15_24 + MOVQ $8, 0(BX) + CMPQ AX, R13 + JAE LBB15_52 + MOVB 0(R15)(AX*1), DX + ADDB $-48, DX CMPB DX, $10 - JB LBB14_13 - JMP LBB14_21 + JAE LBB15_5 + MOVL $0, -56(BP) + JMP LBB15_25 -LBB14_18: +LBB15_21: XORL CX, CX - XORL SI, SI + XORL R8, R8 XORL R12, R12 - JMP LBB14_25 - -LBB14_19: - MOVQ SI, 0(R14) - JMP LBB14_53 + JMP LBB15_25 -LBB14_20: - XORL R12, R12 - XORL SI, SI - XORL CX, CX +LBB15_22: + MOVQ R8, 0(R14) + JMP LBB15_53 -LBB14_21: - XORL DX, DX - TESTL CX, CX - SETGT DX - MOVL DX, -52(BP) - MOVL $9, R8 - CMPB DI, $46 - JNE LBB14_26 - INCQ AX - MOVQ $8, 0(BX) - CMPQ AX, R13 - JAE LBB14_52 - MOVB 0(R15)(AX*1), DX - ADDB $-48, DX - MOVL $8, R8 - CMPB DX, $10 - JAE LBB14_5 - JMP LBB14_26 - -LBB14_24: +LBB15_23: MOVQ R13, AX + JMP LBB15_25 -LBB14_25: +LBB15_24: + ADDQ $-1, AX + +LBB15_25: XORL DX, DX TESTL CX, CX SETGT DX - MOVL DX, -52(BP) - MOVL $9, R8 - -LBB14_26: - TESTL CX, CX - JNE LBB14_35 + MOVL DX, -68(BP) TESTQ R12, R12 - JNE LBB14_35 + JNE LBB15_34 + TESTL CX, CX + JNE LBB15_34 CMPQ AX, R13 - JAE LBB14_33 - MOVL AX, DI - SUBL R13, DI - XORL SI, SI + JAE LBB15_32 + MOVL AX, SI + SUBL R13, SI + XORL R8, R8 XORL CX, CX -LBB14_30: - CMPB 0(R15)(AX*1), $48 - JNE LBB14_34 - INCQ AX - DECL CX - CMPQ R13, AX - JNE LBB14_30 - XORL R12, R12 - CMPL R8, $9 - JE LBB14_55 - JMP LBB14_59 +LBB15_29: + CMPB 0(R15)(AX*1), $48 + JNE LBB15_33 + ADDQ $1, AX + ADDL $-1, CX + CMPQ R13, AX + JNE LBB15_29 + XORL R12, R12 + MOVL -56(BP), AX + TESTB AX, AX + JNE LBB15_55 + JMP LBB15_60 -LBB14_33: +LBB15_32: XORL CX, CX - XORL SI, SI + XORL R8, R8 -LBB14_34: +LBB15_33: XORL R12, R12 -LBB14_35: +LBB15_34: CMPQ AX, R13 - JAE LBB14_40 - CMPL SI, $18 - JG LBB14_40 + JAE LBB15_40 + CMPL R8, $18 + JG LBB15_40 + MOVL $4294967248, R9 -LBB14_37: +LBB15_37: MOVBLZX 0(R15)(AX*1), DI LEAL -48(DI), DX CMPB DX, $9 - JA LBB14_40 + JA LBB15_40 LEAQ 0(R12)(R12*4), DX - LEAQ -48(DI)(DX*2), R12 - DECL CX - INCQ AX + ADDL R9, DI + LEAQ 0(DI)(DX*2), R12 + ADDL $-1, CX + ADDQ $1, AX CMPQ AX, R13 - JAE LBB14_40 - LEAL 1(SI), DX - CMPL SI, $18 - MOVL DX, SI - JL LBB14_37 + JAE LBB15_40 + LEAL 1(R8), DX + CMPL R8, $18 + MOVL DX, R8 + JL LBB15_37 -LBB14_40: +LBB15_40: CMPQ AX, R13 - JAE LBB14_54 - MOVB 0(R15)(AX*1), SI - LEAL -48(SI), DX - CMPB DX, $9 - JA LBB14_46 - LEAQ -1(R13), DI - -LBB14_43: - CMPQ DI, AX - JE LBB14_76 - MOVBLZX 1(R15)(AX*1), SI - INCQ AX - LEAL -48(SI), DX - CMPB DX, $9 - JBE LBB14_43 - MOVL $1, -52(BP) - -LBB14_46: - ORB $32, SI - CMPB SI, $101 - JNE LBB14_54 + JAE LBB15_54 + MOVB 0(R15)(AX*1), DX + LEAL -48(DX), SI + CMPB SI, $9 + JA LBB15_46 + LEAQ -1(R13), SI + +LBB15_43: + CMPQ SI, AX + JE LBB15_59 + MOVBLZX 1(R15)(AX*1), DX + LEAL -48(DX), DI + ADDQ $1, AX + CMPB DI, $9 + JBE LBB15_43 + MOVL $1, -68(BP) + +LBB15_46: + ORB $32, DX + CMPB DX, $101 + JNE LBB15_54 LEAQ 1(AX), DI MOVQ $8, 0(BX) CMPQ DI, R13 - JAE LBB14_52 + JAE LBB15_52 MOVB 0(R15)(DI*1), SI CMPB SI, $45 - JE LBB14_50 + JE LBB15_50 MOVL $1, R8 CMPB SI, $43 - JNE LBB14_85 + JNE LBB15_87 -LBB14_50: +LBB15_50: ADDQ $2, AX CMPQ AX, R13 - JAE LBB14_52 + JAE LBB15_52 XORL DX, DX CMPB SI, $43 SETEQ DX - LEAL -1(DX)(DX*1), R8 - MOVB 0(R15)(AX*1), SI - JMP LBB14_86 - -LBB14_52: - MOVQ R13, 0(R14) - MOVQ $-1, 0(BX) - -LBB14_53: - ADDQ $56, SP - BYTE $0x5b // popq %rbx - WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 - WORD $0x5e41 // popq %r14 - WORD $0x5f41 // popq %r15 - BYTE $0x5d // popq %rbp - RET - -LBB14_54: - MOVL CX, DI - MOVQ AX, R13 - CMPL R8, $9 - JNE LBB14_59 - -LBB14_55: - TESTL DI, DI - JNE LBB14_58 - MOVQ $-9223372036854775808, AX - MOVLQSX R9, CX - TESTQ R12, R12 - JNS LBB14_80 - MOVQ R12, DX - ANDQ CX, DX - CMPQ DX, AX - JE LBB14_80 - -LBB14_58: - MOVQ $8, 0(BX) - -LBB14_59: - MOVQ $0, -64(BP) - LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 - QUAD $0xfffffcf60562f9c5 // vpunpckldq $-778(%rip), %xmm0, %xmm0 /* LCPI14_0(%rip) */ - QUAD $0xfffffcfe055cf9c5 // vsubpd $-770(%rip), %xmm0, %xmm0 /* LCPI14_1(%rip) */ - LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 - LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 - LONG $0x4511fbc5; BYTE $0xd0 // vmovsd %xmm0, $-48(%rbp) - MOVQ R12, AX - SHRQ $52, AX - JE LBB14_71 - -LBB14_60: - MOVQ R11, -80(BP) - MOVQ R10, -88(BP) - LEAQ -48(BP), CX - MOVQ DI, SI - MOVQ R12, DI - MOVQ SI, -72(BP) - MOVL R9, DX - MOVL R9, -56(BP) - LONG $0xffe855e8; BYTE $0xff // callq _atof_eisel_lemire64 - TESTB AX, AX - JE LBB14_64 - MOVQ -72(BP), SI - MOVL -56(BP), DX - CMPL -52(BP), $0 - JE LBB14_79 - INCQ R12 - LEAQ -64(BP), CX - MOVQ R12, DI - LONG $0xffe831e8; BYTE $0xff // callq _atof_eisel_lemire64 - TESTB AX, AX - JE LBB14_64 - LONG $0x4d10fbc5; BYTE $0xc0 // vmovsd $-64(%rbp), %xmm1 - LONG $0x4510fbc5; BYTE $0xd0 // vmovsd $-48(%rbp), %xmm0 - LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 - JNE LBB14_64 - JNP LBB14_66 - -LBB14_64: - MOVQ 0(R14), AX - ADDQ AX, R15 - MOVQ R13, SI - SUBQ AX, SI - MOVQ R15, DI - MOVQ -88(BP), DX - MOVQ -80(BP), CX - LONG $0xffef07e8; BYTE $0xff // callq _atof_native - -LBB14_65: - LONG $0x4511fbc5; BYTE $0xd0 // vmovsd %xmm0, $-48(%rbp) - -LBB14_66: - LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax - -LBB14_67: - MOVQ $-9223372036854775808, CX - DECQ CX - ANDQ AX, CX - MOVQ $9218868437227405312, DX - CMPQ CX, DX - JNE LBB14_69 - MOVQ $-8, 0(BX) - -LBB14_69: - MOVQ AX, 8(BX) + LEAL 0(DX)(DX*1), R8 + ADDL $-1, R8 + MOVB 0(R15)(AX*1), SI + JMP LBB15_88 -LBB14_70: +LBB15_52: MOVQ R13, 0(R14) - JMP LBB14_53 + MOVQ $-1, 0(BX) -LBB14_71: - LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx - MOVL R9, AX - SHRL $31, AX - SHLQ $63, AX - ORQ CX, AX - MOVQ AX, -48(BP) +LBB15_53: + ADDQ $56, SP + BYTE $0x5b // popq %rbx + WORD $0x5c41 // popq %r12 + WORD $0x5d41 // popq %r13 + WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 + BYTE $0x5d // popq %rbp + RET + +LBB15_54: + MOVL CX, SI + MOVQ AX, R13 + MOVL -56(BP), AX + TESTB AX, AX + JE LBB15_60 + +LBB15_55: + TESTL SI, SI + MOVL -44(BP), DX + JNE LBB15_58 + MOVQ $-9223372036854775808, AX + MOVLQSX DX, CX TESTQ R12, R12 - JE LBB14_67 - TESTL DI, DI - JE LBB14_67 - LONG $0x6ef9e1c4; BYTE $0xc0 // vmovq %rax, %xmm0 - LEAL -1(DI), AX - CMPL AX, $36 - JA LBB14_77 - CMPL DI, $23 - JL LBB14_81 - MOVLQSX DI, AX - LONG $0xa30d8d48; WORD $0x00c3; BYTE $0x00 // leaq $50083(%rip), %rcx /* _P10_TAB(%rip) */ - QUAD $0xffff50c18459fbc5; BYTE $0xff // vmulsd $-176(%rcx,%rax,8), %xmm0, %xmm0 - LONG $0x4511fbc5; BYTE $0xd0 // vmovsd %xmm0, $-48(%rbp) - MOVL $22, AX - JMP LBB14_82 - -LBB14_76: - MOVL $1, -52(BP) - MOVL CX, DI - CMPL R8, $9 - JE LBB14_55 - JMP LBB14_59 - -LBB14_77: - CMPL DI, $-22 - JB LBB14_60 - NEGL DI - MOVLQSX DI, AX - LONG $0x610d8d48; WORD $0x00c3; BYTE $0x00 // leaq $50017(%rip), %rcx /* _P10_TAB(%rip) */ - LONG $0x045efbc5; BYTE $0xc1 // vdivsd (%rcx,%rax,8), %xmm0, %xmm0 - JMP LBB14_65 - -LBB14_79: - MOVQ -48(BP), AX - JMP LBB14_67 + JNS LBB15_69 + MOVQ R12, DI + ANDQ CX, DI + CMPQ DI, AX + JE LBB15_69 + +LBB15_58: + MOVQ $8, 0(BX) + JMP LBB15_61 + +LBB15_59: + MOVL $1, -68(BP) + MOVL CX, SI + MOVL -56(BP), AX + TESTB AX, AX + JNE LBB15_55 + JMP LBB15_60 -LBB14_80: +LBB15_69: LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 IMULQ CX, R12 - QUAD $0xfffffb790562f9c5 // vpunpckldq $-1159(%rip), %xmm0, %xmm0 /* LCPI14_0(%rip) */ - QUAD $0xfffffb81055cf9c5 // vsubpd $-1151(%rip), %xmm0, %xmm0 /* LCPI14_1(%rip) */ + QUAD $0xfffffcd10562f9c5 // vpunpckldq $-815(%rip), %xmm0, %xmm0 /* LCPI15_0(%rip) */ + QUAD $0xfffffcd9055cf9c5 // vsubpd $-807(%rip), %xmm0, %xmm0 /* LCPI15_1(%rip) */ MOVQ R12, 16(BX) LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 @@ -5615,172 +5809,287 @@ LBB14_80: LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx ORQ AX, CX MOVQ CX, 8(BX) - JMP LBB14_70 + JMP LBB15_86 -LBB14_81: - MOVL DI, AX - -LBB14_82: - QUAD $0xfffffb65052ef9c5 // vucomisd $-1179(%rip), %xmm0 /* LCPI14_2(%rip) */ - JA LBB14_60 - QUAD $0xfffffb5f0d10fbc5 // vmovsd $-1185(%rip), %xmm1 /* LCPI14_3(%rip) */ - LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 - JA LBB14_60 - MOVL AX, AX - LONG $0xe80d8d48; WORD $0x00c2; BYTE $0x00 // leaq $49896(%rip), %rcx /* _P10_TAB(%rip) */ - LONG $0x0459fbc5; BYTE $0xc1 // vmulsd (%rcx,%rax,8), %xmm0, %xmm0 - JMP LBB14_65 - -LBB14_85: +LBB15_87: MOVQ DI, AX -LBB14_86: +LBB15_88: LEAL -48(SI), DI CMPB DI, $9 - JA LBB14_5 - MOVL R9, -56(BP) + JA LBB15_5 CMPQ AX, R13 - JAE LBB14_93 - CMPB DI, $9 - JA LBB14_93 + JAE LBB15_93 LEAQ -1(R13), R9 XORL DI, DI -LBB14_90: +LBB15_91: MOVL DI, DX + MOVBLZX SI, SI CMPL DI, $10000 LEAL 0(DX)(DX*4), DI - MOVBLZX SI, SI LEAL -48(SI)(DI*2), DI WORD $0x4d0f; BYTE $0xfa // cmovgel %edx, %edi CMPQ R9, AX - JE LBB14_94 + JE LBB15_94 MOVBLZX 1(R15)(AX*1), SI - INCQ AX LEAL -48(SI), DX + ADDQ $1, AX CMPB DX, $10 - JB LBB14_90 - JMP LBB14_95 + JB LBB15_91 + JMP LBB15_95 -LBB14_93: +LBB15_93: XORL DI, DI - JMP LBB14_95 + JMP LBB15_95 -LBB14_94: +LBB15_94: MOVQ R13, AX -LBB14_95: - IMULL R8, DI - ADDL CX, DI +LBB15_95: + MOVQ DI, SI + IMULL R8, SI + ADDL CX, SI MOVQ AX, R13 - MOVL -56(BP), R9 - JMP LBB14_59 + +LBB15_60: + MOVL -44(BP), DX + +LBB15_61: + MOVQ $0, -80(BP) + LONG $0x6ef9c1c4; BYTE $0xc4 // vmovq %r12, %xmm0 + QUAD $0xfffffc320562f9c5 // vpunpckldq $-974(%rip), %xmm0, %xmm0 /* LCPI15_0(%rip) */ + QUAD $0xfffffc3a055cf9c5 // vsubpd $-966(%rip), %xmm0, %xmm0 /* LCPI15_1(%rip) */ + LONG $0x0579e3c4; WORD $0x01c8 // vpermilpd $1, %xmm0, %xmm1 + LONG $0xc058f3c5 // vaddsd %xmm0, %xmm1, %xmm0 + LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) + MOVQ R12, AX + SHRQ $52, AX + JNE LBB15_74 + LONG $0x7ef9e1c4; BYTE $0xc1 // vmovq %xmm0, %rcx + MOVL DX, AX + SHRL $31, AX + SHLQ $63, AX + ORQ CX, AX + MOVQ AX, -64(BP) + TESTL SI, SI + JE LBB15_82 + TESTQ R12, R12 + JE LBB15_82 + LONG $0x6ef9e1c4; BYTE $0xc0 // vmovq %rax, %xmm0 + LEAL -1(SI), AX + CMPL AX, $36 + JA LBB15_67 + CMPL SI, $23 + JL LBB15_70 + LEAL -22(SI), AX + LONG $0x740d8d48; WORD $0x00bf; BYTE $0x00 // leaq $49012(%rip), %rcx /* _P10_TAB(%rip) */ + LONG $0x0459fbc5; BYTE $0xc1 // vmulsd (%rcx,%rax,8), %xmm0, %xmm0 + LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) + MOVL $22, AX + JMP LBB15_71 + +LBB15_67: + CMPL SI, $-22 + JB LBB15_74 + NEGL SI + LONG $0x55058d48; WORD $0x00bf; BYTE $0x00 // leaq $48981(%rip), %rax /* _P10_TAB(%rip) */ + LONG $0x045efbc5; BYTE $0xf0 // vdivsd (%rax,%rsi,8), %xmm0, %xmm0 + LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) + JMP LBB15_78 + +LBB15_70: + MOVL SI, AX + +LBB15_71: + QUAD $0xfffffbb7052ef9c5 // vucomisd $-1097(%rip), %xmm0 /* LCPI15_2(%rip) */ + JA LBB15_74 + QUAD $0xfffffbb50d10fbc5 // vmovsd $-1099(%rip), %xmm1 /* LCPI15_3(%rip) */ + LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 + JA LBB15_74 + MOVL AX, AX + LONG $0x260d8d48; WORD $0x00bf; BYTE $0x00 // leaq $48934(%rip), %rcx /* _P10_TAB(%rip) */ + LONG $0x0459fbc5; BYTE $0xc1 // vmulsd (%rcx,%rax,8), %xmm0, %xmm0 + LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) + JMP LBB15_78 + +LBB15_74: + MOVQ R11, -96(BP) + MOVQ R10, -56(BP) + LEAQ -64(BP), CX + MOVQ R12, DI + MOVQ SI, -88(BP) + LONG $0xffe795e8; BYTE $0xff // callq _atof_eisel_lemire64 + TESTB AX, AX + JE LBB15_80 + MOVQ -88(BP), SI + CMPL -68(BP), $0 + JE LBB15_81 + ADDQ $1, R12 + LEAQ -80(BP), CX + MOVQ R12, DI + MOVL -44(BP), DX + LONG $0xffe774e8; BYTE $0xff // callq _atof_eisel_lemire64 + TESTB AX, AX + JE LBB15_80 + LONG $0x4d10fbc5; BYTE $0xb0 // vmovsd $-80(%rbp), %xmm1 + LONG $0x4510fbc5; BYTE $0xc0 // vmovsd $-64(%rbp), %xmm0 + LONG $0xc82ef9c5 // vucomisd %xmm0, %xmm1 + JNE LBB15_80 + JP LBB15_80 + +LBB15_78: + LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax + JMP LBB15_82 + +LBB15_80: + MOVQ 0(R14), AX + ADDQ AX, R15 + MOVQ R13, SI + SUBQ AX, SI + MOVQ R15, DI + MOVQ -56(BP), DX + MOVQ -96(BP), CX + LONG $0xffed89e8; BYTE $0xff // callq _atof_native + LONG $0x4511fbc5; BYTE $0xc0 // vmovsd %xmm0, $-64(%rbp) + LONG $0x7ef9e1c4; BYTE $0xc0 // vmovq %xmm0, %rax + JMP LBB15_83 + +LBB15_81: + MOVQ -64(BP), AX + +LBB15_82: + LONG $0x6ef9e1c4; BYTE $0xc0 // vmovq %rax, %xmm0 + +LBB15_83: + MOVQ $-9223372036854775808, CX + ADDQ $-1, CX + ANDQ AX, CX + MOVQ $9218868437227405312, AX + CMPQ CX, AX + JNE LBB15_85 + MOVQ $-8, 0(BX) + +LBB15_85: + LONG $0x4311fbc5; BYTE $0x08 // vmovsd %xmm0, $8(%rbx) + +LBB15_86: + MOVQ R13, 0(R14) + JMP LBB15_53 _vsigned: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp BYTE $0x53 // pushq %rbx - MOVQ 0(SI), BX + MOVQ 0(SI), AX MOVQ 0(DI), R8 - MOVQ 8(DI), R10 + MOVQ 8(DI), R11 MOVQ $9, 0(DX) LONG $0xc057f8c5 // vxorps %xmm0, %xmm0, %xmm0 LONG $0x4211f8c5; BYTE $0x08 // vmovups %xmm0, $8(%rdx) MOVQ 0(SI), CX MOVQ CX, 24(DX) - CMPQ BX, R10 - JAE LBB15_1 - MOVB 0(R8)(BX*1), CX + CMPQ AX, R11 + JAE LBB16_1 + MOVB 0(R8)(AX*1), CX MOVL $1, R9 CMPB CX, $45 - JNE LBB15_5 - INCQ BX - CMPQ BX, R10 - JAE LBB15_1 - MOVB 0(R8)(BX*1), CX + JNE LBB16_5 + ADDQ $1, AX + CMPQ AX, R11 + JAE LBB16_1 + MOVB 0(R8)(AX*1), CX MOVQ $-1, R9 -LBB15_5: +LBB16_5: LEAL -48(CX), DI CMPB DI, $10 - JB LBB15_7 - MOVQ BX, 0(SI) + JB LBB16_7 + MOVQ AX, 0(SI) MOVQ $-2, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB15_1: - MOVQ R10, 0(SI) +LBB16_1: + MOVQ R11, 0(SI) MOVQ $-1, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB15_7: +LBB16_7: CMPB CX, $48 - JNE LBB15_8 - LEAQ 1(BX), DI - CMPQ BX, R10 - JAE LBB15_17 + JNE LBB16_12 + LEAQ 1(AX), DI + CMPQ AX, R11 + JAE LBB16_11 MOVB 0(R8)(DI*1), CX ADDB $-46, CX CMPB CX, $55 - JA LBB15_17 - MOVBLZX CX, R11 + JA LBB16_11 + MOVBLZX CX, R10 MOVQ $36028797027352577, CX - BTQ R11, CX - JAE LBB15_17 + BTQ R10, CX + JAE LBB16_11 -LBB15_8: +LBB16_12: + CMPQ AX, R11 + MOVQ R11, R10 + LONG $0xd0470f4c // cmovaq %rax, %r10 XORL DI, DI -LBB15_9: - CMPQ BX, R10 - JAE LBB15_22 - MOVBQSX 0(R8)(BX*1), CX - LEAL -48(CX), AX - CMPB AX, $9 - JA LBB15_18 +LBB16_13: + CMPQ R10, AX + JE LBB16_23 + MOVBQSX 0(R8)(AX*1), CX + LEAL -48(CX), BX + CMPB BX, $9 + JA LBB16_18 IMUL3Q $10, DI, DI - JO LBB15_13 - INCQ BX - ADDQ $-48, CX + JO LBB16_17 + ADDQ $1, AX + ADDL $-48, CX IMULQ R9, CX ADDQ CX, DI - JNO LBB15_9 + JNO LBB16_13 -LBB15_13: - DECQ BX - MOVQ BX, 0(SI) +LBB16_17: + ADDQ $-1, AX + MOVQ AX, 0(SI) MOVQ $-5, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB15_17: +LBB16_11: MOVQ DI, 0(SI) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB15_18: - CMPB CX, $101 - JE LBB15_21 - CMPB CX, $69 - JE LBB15_21 +LBB16_18: + CMPQ AX, R11 + JAE LBB16_22 CMPB CX, $46 - JNE LBB15_22 + JE LBB16_25 + CMPB CX, $69 + JE LBB16_25 + CMPB CX, $101 + JNE LBB16_22 -LBB15_21: - MOVQ BX, 0(SI) +LBB16_25: + MOVQ AX, 0(SI) MOVQ $-6, 0(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB15_22: - MOVQ BX, 0(SI) +LBB16_22: + MOVQ AX, R10 + +LBB16_23: + MOVQ R10, 0(SI) MOVQ DI, 16(DX) BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp @@ -5789,106 +6098,125 @@ LBB15_22: _vunsigned: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + WORD $0x5641 // pushq %r14 + BYTE $0x53 // pushq %rbx MOVQ DX, R8 MOVQ 0(SI), CX MOVQ 0(DI), R9 - MOVQ 8(DI), R11 + MOVQ 8(DI), R14 MOVQ $9, 0(DX) LONG $0xc057f8c5 // vxorps %xmm0, %xmm0, %xmm0 LONG $0x4211f8c5; BYTE $0x08 // vmovups %xmm0, $8(%rdx) MOVQ 0(SI), AX MOVQ AX, 24(DX) - CMPQ CX, R11 - JAE LBB16_1 + CMPQ CX, R14 + JAE LBB17_1 MOVB 0(R9)(CX*1), AX CMPB AX, $45 - JNE LBB16_4 + JNE LBB17_4 -LBB16_3: +LBB17_3: MOVQ CX, 0(SI) MOVQ $-6, 0(R8) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET -LBB16_1: - MOVQ R11, 0(SI) +LBB17_1: + MOVQ R14, 0(SI) MOVQ $-1, 0(R8) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET -LBB16_4: +LBB17_4: LEAL -48(AX), DX CMPB DX, $10 - JB LBB16_6 + JB LBB17_6 MOVQ CX, 0(SI) MOVQ $-2, 0(R8) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET -LBB16_6: +LBB17_6: CMPB AX, $48 - JNE LBB16_7 + JNE LBB17_10 MOVB 1(R9)(CX*1), AX ADDB $-46, AX CMPB AX, $55 - JA LBB16_16 + JA LBB17_9 MOVBLZX AX, AX MOVQ $36028797027352577, DX BTQ AX, DX - JAE LBB16_16 + JAE LBB17_9 -LBB16_7: +LBB17_10: + CMPQ R14, CX + MOVQ CX, R10 + LONG $0xd6470f4d // cmovaq %r14, %r10 XORL AX, AX - MOVL $10, R10 + MOVL $10, R11 -LBB16_8: - CMPQ CX, R11 - JAE LBB16_20 - MOVBLSX 0(R9)(CX*1), DI - LEAL -48(DI), DX +LBB17_11: + CMPQ R10, CX + JE LBB17_22 + MOVBLSX 0(R9)(CX*1), BX + LEAL -48(BX), DX CMPB DX, $9 - JA LBB16_17 - MULQ R10 - JO LBB16_13 - INCQ CX - ADDL $-48, DI - MOVLQSX DI, DX - MOVQ DX, DI - SARQ $63, DI - ADDQ DX, AX - ADCQ $0, DI - MOVL DI, DX - ANDL $1, DX + JA LBB17_17 + MULQ R11 + JO LBB17_16 + ADDQ $1, CX + ADDL $-48, BX + XORL DI, DI + ADDQ BX, AX + SETCS DI + MOVQ DI, DX NEGQ DX XORQ DX, DI - JNE LBB16_13 + JNE LBB17_16 TESTQ DX, DX - JNS LBB16_8 + JNS LBB17_11 -LBB16_13: - DECQ CX +LBB17_16: + ADDQ $-1, CX MOVQ CX, 0(SI) MOVQ $-5, 0(R8) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET -LBB16_17: - CMPB DI, $101 - JE LBB16_3 - CMPB DI, $69 - JE LBB16_3 - CMPB DI, $46 - JE LBB16_3 - -LBB16_20: - MOVQ CX, 0(SI) +LBB17_17: + CMPQ CX, R14 + JAE LBB17_21 + CMPB BX, $46 + JE LBB17_3 + CMPB BX, $69 + JE LBB17_3 + CMPB BX, $101 + JE LBB17_3 + +LBB17_21: + MOVQ CX, R10 + +LBB17_22: + MOVQ R10, 0(SI) MOVQ AX, 16(R8) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET -LBB16_16: - INCQ CX +LBB17_9: + ADDQ $1, CX MOVQ CX, 0(SI) + BYTE $0x5b // popq %rbx + WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET @@ -5904,34 +6232,6 @@ _skip_one: BYTE $0x5d // popq %rbp JMP _fsm_exec -LCPI18_0: - QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' - QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' - -LCPI18_1: - QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' - QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' - -LCPI18_2: - QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' - QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' - -LCPI18_3: - QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' - QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' - -LCPI18_4: - QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' - -LCPI18_5: - QUAD $0x0909090909090909; QUAD $0x0909090909090909 // .space 16, '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t' - -LCPI18_6: - QUAD $0x0a0a0a0a0a0a0a0a; QUAD $0x0a0a0a0a0a0a0a0a // .space 16, '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n' - -LCPI18_7: - QUAD $0x0d0d0d0d0d0d0d0d; QUAD $0x0d0d0d0d0d0d0d0d // .space 16, '\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r' - _fsm_exec: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp @@ -5940,862 +6240,549 @@ _fsm_exec: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $24, SP + SUBQ $40, SP + MOVL CX, -60(BP) CMPL 0(DI), $0 - JE LBB18_2 - MOVQ DX, R11 - MOVQ SI, BX + JE LBB19_2 MOVQ DI, R12 - MOVL CX, -52(BP) - MOVQ $-1, R14 - MOVQ $4294977024, R8 - QUAD $0xffffff051d6ffec5 // vmovdqu $-251(%rip), %ymm3 /* LCPI18_0(%rip) */ - QUAD $0xffffff1d256ffec5 // vmovdqu $-227(%rip), %ymm4 /* LCPI18_1(%rip) */ - QUAD $0xffffff352d6ffec5 // vmovdqu $-203(%rip), %ymm5 /* LCPI18_2(%rip) */ - QUAD $0xffffff4d356ffec5 // vmovdqu $-179(%rip), %ymm6 /* LCPI18_3(%rip) */ + MOVQ DX, -56(BP) MOVQ SI, -48(BP) - JMP LBB18_4 + MOVQ $-1, R14 + JMP LBB19_6 -LBB18_2: +LBB19_2: MOVQ $-1, R13 - JMP LBB18_122 + JMP LBB19_74 -LBB18_3: +LBB19_3: LEAQ 3(AX), CX - MOVQ CX, 0(R11) + MOVQ CX, 0(BX) TESTQ AX, AX - JLE LBB18_118 + JLE LBB19_74 -LBB18_94: - MOVL 0(R12), AX +LBB19_4: + MOVL 0(R12), CX MOVQ R14, R13 - TESTL AX, AX - JE LBB18_122 - -LBB18_4: - MOVQ 0(R11), SI - MOVQ 0(BX), R9 - MOVQ 8(BX), R10 - CMPQ SI, R10 - JAE LBB18_8 - MOVB 0(R9)(SI*1), CX - CMPB CX, $13 - JE LBB18_8 - CMPB CX, $32 - JE LBB18_8 - ADDB $-9, CX - CMPB CX, $1 - JA LBB18_53 - -LBB18_8: - LEAQ 1(SI), DX - CMPQ DX, R10 - JAE LBB18_13 - MOVB 0(R9)(DX*1), CX - CMPB CX, $13 - JE LBB18_13 - CMPB CX, $32 - JE LBB18_13 - ADDB $-9, CX - CMPB CX, $1 - JA LBB18_12 - -LBB18_13: - LEAQ 2(SI), DX - CMPQ DX, R10 - JAE LBB18_18 - MOVB 0(R9)(DX*1), CX - CMPB CX, $13 - JE LBB18_18 - CMPB CX, $32 - JE LBB18_18 - ADDB $-9, CX - CMPB CX, $1 - JA LBB18_12 - -LBB18_18: - LEAQ 3(SI), DX - CMPQ DX, R10 - JAE LBB18_23 - MOVB 0(R9)(DX*1), CX - CMPB CX, $13 - JE LBB18_23 - CMPB CX, $32 - JE LBB18_23 - ADDB $-9, CX - CMPB CX, $1 - JBE LBB18_23 - -LBB18_12: - MOVQ DX, SI - JMP LBB18_53 - -LBB18_23: - LEAQ 4(SI), CX - CMPQ R10, CX - JBE LBB18_48 - LEAQ 0(R9)(CX*1), DI - MOVQ R10, DX - SUBQ CX, DX - JE LBB18_32 - MOVL DI, CX - ANDL $31, CX - TESTQ CX, CX - JE LBB18_32 - LEAQ 0(R9)(SI*1), DI - MOVQ R10, DX - SUBQ SI, DX - LEAQ -5(DX), SI - XORL BX, BX - -LBB18_27: - MOVBLSX 4(DI)(BX*1), CX - CMPL CX, $32 - JA LBB18_51 - BTQ CX, R8 - JAE LBB18_51 - LEAQ 1(BX), CX - CMPQ SI, BX - JE LBB18_31 - LEAQ 5(DI)(BX*1), AX - ANDL $31, AX - MOVQ CX, BX - TESTQ AX, AX - JNE LBB18_27 - -LBB18_31: - LEAQ 4(CX)(DI*1), DI - SUBQ CX, DX - ADDQ $-4, DX - MOVQ -48(BP), BX - -LBB18_32: - CMPQ DX, $32 - JB LBB18_37 - LEAQ -32(DX), SI - MOVQ SI, CX - ANDQ $-32, CX - LEAQ 32(CX)(DI*1), R8 - ANDL $31, SI - -LBB18_34: - LONG $0x076ffdc5 // vmovdqa (%rdi), %ymm0 - LONG $0xcb74fdc5 // vpcmpeqb %ymm3, %ymm0, %ymm1 - LONG $0xd474fdc5 // vpcmpeqb %ymm4, %ymm0, %ymm2 - LONG $0xcaebf5c5 // vpor %ymm2, %ymm1, %ymm1 - LONG $0xd574fdc5 // vpcmpeqb %ymm5, %ymm0, %ymm2 - LONG $0xc674fdc5 // vpcmpeqb %ymm6, %ymm0, %ymm0 - LONG $0xc2ebfdc5 // vpor %ymm2, %ymm0, %ymm0 - LONG $0xc1ebfdc5 // vpor %ymm1, %ymm0, %ymm0 - LONG $0xc8d7fdc5 // vpmovmskb %ymm0, %ecx - CMPL CX, $-1 - JNE LBB18_49 - ADDQ $32, DI - ADDQ $-32, DX - CMPQ DX, $31 - JA LBB18_34 - MOVQ SI, DX - MOVQ R8, DI - -LBB18_37: - WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ DX, $16 - JB LBB18_111 - LEAQ -16(DX), SI - MOVQ SI, AX - ANDQ $-16, AX - LEAQ 16(AX)(DI*1), R8 - ANDL $15, SI - QUAD $0xfffffd381d6ffec5 // vmovdqu $-712(%rip), %ymm3 /* LCPI18_0(%rip) */ - QUAD $0xfffffd50256ffec5 // vmovdqu $-688(%rip), %ymm4 /* LCPI18_1(%rip) */ - QUAD $0xfffffd682d6ffec5 // vmovdqu $-664(%rip), %ymm5 /* LCPI18_2(%rip) */ - QUAD $0xfffffd80356ffec5 // vmovdqu $-640(%rip), %ymm6 /* LCPI18_3(%rip) */ - -LBB18_39: - LONG $0x076ff9c5 // vmovdqa (%rdi), %xmm0 - QUAD $0xfffffd940d74f9c5 // vpcmpeqb $-620(%rip), %xmm0, %xmm1 /* LCPI18_4(%rip) */ - QUAD $0xfffffd9c1574f9c5 // vpcmpeqb $-612(%rip), %xmm0, %xmm2 /* LCPI18_5(%rip) */ - LONG $0xcaebf1c5 // vpor %xmm2, %xmm1, %xmm1 - QUAD $0xfffffda01574f9c5 // vpcmpeqb $-608(%rip), %xmm0, %xmm2 /* LCPI18_6(%rip) */ - QUAD $0xfffffda80574f9c5 // vpcmpeqb $-600(%rip), %xmm0, %xmm0 /* LCPI18_7(%rip) */ - LONG $0xc2ebf9c5 // vpor %xmm2, %xmm0, %xmm0 - LONG $0xc1ebf9c5 // vpor %xmm1, %xmm0, %xmm0 - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - CMPW CX, $-1 - JNE LBB18_54 - ADDQ $16, DI - ADDQ $-16, DX - CMPQ DX, $15 - JA LBB18_39 - MOVQ SI, DX - MOVQ R8, DI - TESTQ DX, DX - JE LBB18_47 - -LBB18_42: - LEAQ 0(DI)(DX*1), R8 - INCQ DI - MOVQ DI, SI - -LBB18_43: - MOVBLSX -1(SI), CX - CMPL CX, $32 - JA LBB18_55 - MOVQ $4294977024, AX - BTQ CX, AX - JAE LBB18_55 - DECQ DX - INCQ SI - TESTQ DX, DX - JNE LBB18_43 - MOVQ R8, DI - -LBB18_47: - MOVQ $4294977024, R8 - SUBQ R9, DI - MOVQ DI, SI - CMPQ SI, R10 - JB LBB18_52 - JMP LBB18_56 - -LBB18_48: - MOVQ CX, 0(R11) - JMP LBB18_56 - -LBB18_49: - WORD $0xf8c5; BYTE $0x77 // vzeroupper - QUAD $0xfffffcc9356ffec5 // vmovdqu $-823(%rip), %ymm6 /* LCPI18_3(%rip) */ - QUAD $0xfffffca12d6ffec5 // vmovdqu $-863(%rip), %ymm5 /* LCPI18_2(%rip) */ - QUAD $0xfffffc79256ffec5 // vmovdqu $-903(%rip), %ymm4 /* LCPI18_1(%rip) */ - QUAD $0xfffffc511d6ffec5 // vmovdqu $-943(%rip), %ymm3 /* LCPI18_0(%rip) */ - SUBQ R9, DI - NOTL CX - MOVLQSX CX, AX - BSFQ AX, SI - -LBB18_50: - ADDQ DI, SI - MOVQ $4294977024, R8 - CMPQ SI, R10 - JAE LBB18_56 - -LBB18_52: - MOVQ 0(BX), R9 - -LBB18_53: - LEAQ 1(SI), AX - MOVQ AX, 0(R11) - MOVB 0(R9)(SI*1), DX - JMP LBB18_57 - -LBB18_51: - ADDQ BX, DI - NOTQ R9 - LEAQ 5(R9)(DI*1), SI - MOVQ -48(BP), BX - CMPQ SI, R10 - JB LBB18_52 - -LBB18_56: - XORL DX, DX - -LBB18_57: - MOVLQSX 0(R12), CX - LEAQ -1(CX), AX - MOVL 0(R12)(CX*4), SI + TESTL CX, CX + JE LBB19_74 + +LBB19_6: + MOVQ -48(BP), R13 + MOVQ R13, DI + MOVQ -56(BP), BX + MOVQ BX, SI + LONG $0xfff226e8; BYTE $0xff // callq _advance_ns + MOVLQSX 0(R12), DX + LEAQ -1(DX), CX CMPQ R14, $-1 - JNE LBB18_59 - MOVQ 0(R11), R14 - DECQ R14 + JNE LBB19_8 + MOVQ 0(BX), R14 + ADDQ $-1, R14 -LBB18_59: - DECL SI +LBB19_8: + MOVL 0(R12)(DX*4), SI + ADDL $-1, SI CMPL SI, $5 - JA LBB18_64 - LONG $0xdc3d8d48; WORD $0x0005; BYTE $0x00 // leaq $1500(%rip), %rdi /* LJTI18_0(%rip) */ + JA LBB19_13 + LONG $0x353d8d48; WORD $0x0004; BYTE $0x00 // leaq $1077(%rip), %rdi /* LJTI19_0(%rip) */ MOVLQSX 0(DI)(SI*4), SI ADDQ DI, SI JMP SI -LBB18_61: - MOVBLSX DX, DX - CMPL DX, $44 - JE LBB18_83 - CMPL DX, $93 - JNE LBB18_121 - MOVL AX, 0(R12) - MOVQ R14, R13 - TESTL AX, AX - JNE LBB18_4 - JMP LBB18_122 +LBB19_10: + MOVBLSX AX, AX + CMPL AX, $44 + JE LBB19_30 + CMPL AX, $93 + JE LBB19_12 + JMP LBB19_68 -LBB18_64: - MOVL AX, 0(R12) - MOVBLSX DX, AX +LBB19_13: + MOVL CX, 0(R12) + MOVBLSX AX, AX CMPL AX, $123 - JBE LBB18_80 - JMP LBB18_121 - -LBB18_65: - MOVBLSX DX, DX - CMPL DX, $44 - JE LBB18_85 - CMPL DX, $125 - JNE LBB18_121 + JBE LBB19_25 + JMP LBB19_68 + +LBB19_14: + MOVBLSX AX, AX + CMPL AX, $44 + JNE LBB19_15 + CMPL DX, $65535 + JG LBB19_73 + LEAL 1(DX), AX MOVL AX, 0(R12) - MOVQ R14, R13 - TESTL AX, AX - JNE LBB18_4 - JMP LBB18_122 - -LBB18_68: - CMPB DX, $34 - JNE LBB18_121 - MOVL $4, 0(R12)(CX*4) - -LBB18_70: - MOVQ 0(R11), R15 - MOVQ BX, DI + MOVL $3, 4(R12)(DX*4) + JMP LBB19_4 + +LBB19_16: + CMPB AX, $34 + JNE LBB19_68 + MOVL $4, 0(R12)(DX*4) + MOVQ 0(BX), R15 + MOVQ R13, DI + +LBB19_18: MOVQ R15, SI - LEAQ -64(BP), DX - MOVQ R11, BX - WORD $0xf8c5; BYTE $0x77 // vzeroupper - LONG $0xfff13fe8; BYTE $0xff // callq _advance_string + LEAQ -72(BP), DX + LONG $0xfff492e8; BYTE $0xff // callq _advance_string MOVQ AX, R13 TESTQ AX, AX - JS LBB18_112 + JS LBB19_65 MOVQ R13, 0(BX) TESTQ R15, R15 - MOVQ BX, R11 - MOVQ -48(BP), BX - MOVQ $4294977024, R8 - QUAD $0xfffffb231d6ffec5 // vmovdqu $-1245(%rip), %ymm3 /* LCPI18_0(%rip) */ - QUAD $0xfffffb3b256ffec5 // vmovdqu $-1221(%rip), %ymm4 /* LCPI18_1(%rip) */ - QUAD $0xfffffb532d6ffec5 // vmovdqu $-1197(%rip), %ymm5 /* LCPI18_2(%rip) */ - QUAD $0xfffffb6b356ffec5 // vmovdqu $-1173(%rip), %ymm6 /* LCPI18_3(%rip) */ - JG LBB18_94 - JMP LBB18_113 - -LBB18_72: - CMPB DX, $58 - JNE LBB18_121 - MOVL $0, 0(R12)(CX*4) - JMP LBB18_94 - -LBB18_74: - CMPB DX, $93 - JNE LBB18_79 - MOVL AX, 0(R12) - MOVQ R14, R13 - TESTL AX, AX - JNE LBB18_4 - JMP LBB18_122 - -LBB18_76: - MOVBLSX DX, DX - CMPL DX, $34 - JE LBB18_87 - CMPL DX, $125 - JNE LBB18_121 - MOVL AX, 0(R12) - MOVQ R14, R13 - TESTL AX, AX - JNE LBB18_4 - JMP LBB18_122 - -LBB18_79: - MOVL $1, 0(R12)(CX*4) - MOVBLSX DX, AX + JG LBB19_4 + JMP LBB19_20 + +LBB19_21: + CMPB AX, $58 + JNE LBB19_68 + MOVL $0, 0(R12)(DX*4) + JMP LBB19_4 + +LBB19_23: + CMPB AX, $93 + JE LBB19_12 + MOVL $1, 0(R12)(DX*4) + MOVBLSX AX, AX CMPL AX, $123 - JA LBB18_121 + JA LBB19_68 -LBB18_80: +LBB19_25: MOVQ $-1, R13 - LONG $0x930d8d48; WORD $0x0004; BYTE $0x00 // leaq $1171(%rip), %rcx /* LJTI18_1(%rip) */ + LONG $0x6a0d8d48; WORD $0x0003; BYTE $0x00 // leaq $874(%rip), %rcx /* LJTI19_1(%rip) */ MOVLQSX 0(CX)(AX*4), AX ADDQ CX, AX JMP AX -LBB18_81: - MOVQ 0(R11), R15 +LBB19_28: + MOVQ -56(BP), BX + MOVQ 0(BX), R15 LEAQ -1(R15), R13 - MOVQ 0(BX), DI + MOVQ -48(BP), AX + MOVQ 0(AX), DI ADDQ R13, DI - MOVQ 8(BX), SI + MOVQ 8(AX), SI SUBQ R13, SI - MOVQ R11, BX - WORD $0xf8c5; BYTE $0x77 // vzeroupper - LONG $0x000e90e8; BYTE $0x00 // callq _do_skip_number + LONG $0x000d76e8; BYTE $0x00 // callq _do_skip_number TESTQ AX, AX - JS LBB18_114 + JS LBB19_66 MOVQ 0(BX), CX - LEAQ -1(AX)(CX*1), AX + ADDQ CX, AX + ADDQ $-1, AX MOVQ AX, 0(BX) TESTQ R15, R15 - MOVQ BX, R11 - MOVQ -48(BP), BX - MOVQ $4294977024, R8 - QUAD $0xfffffa2e1d6ffec5 // vmovdqu $-1490(%rip), %ymm3 /* LCPI18_0(%rip) */ - QUAD $0xfffffa46256ffec5 // vmovdqu $-1466(%rip), %ymm4 /* LCPI18_1(%rip) */ - QUAD $0xfffffa5e2d6ffec5 // vmovdqu $-1442(%rip), %ymm5 /* LCPI18_2(%rip) */ - QUAD $0xfffffa76356ffec5 // vmovdqu $-1418(%rip), %ymm6 /* LCPI18_3(%rip) */ - JG LBB18_94 - JMP LBB18_122 - -LBB18_83: - CMPL CX, $65535 - JG LBB18_115 - LEAL 1(CX), AX - MOVL AX, 0(R12) - MOVL $0, 4(R12)(CX*4) - JMP LBB18_94 + JG LBB19_4 + JMP LBB19_74 + +LBB19_26: + MOVBLSX AX, AX + CMPL AX, $34 + JE LBB19_34 + +LBB19_15: + CMPL AX, $125 + JNE LBB19_68 + +LBB19_12: + MOVL CX, 0(R12) + MOVQ R14, R13 + TESTL CX, CX + JNE LBB19_6 + JMP LBB19_74 -LBB18_85: - CMPL CX, $65535 - JG LBB18_115 - LEAL 1(CX), AX +LBB19_30: + CMPL DX, $65535 + JG LBB19_73 + LEAL 1(DX), AX MOVL AX, 0(R12) - MOVL $3, 4(R12)(CX*4) - JMP LBB18_94 - -LBB18_87: - MOVL $2, 0(R12)(CX*4) - MOVL -52(BP), AX - CMPL AX, $1 - JE LBB18_91 - TESTL AX, AX - JNE LBB18_92 - MOVQ 0(R11), R15 - MOVQ BX, DI + MOVL $0, 4(R12)(DX*4) + JMP LBB19_4 + +LBB19_34: + MOVL $2, 0(R12)(DX*4) + CMPL -60(BP), $0 + JE LBB19_37 + MOVQ R13, DI + MOVQ BX, SI + LONG $0x000599e8; BYTE $0x00 // callq _validate_string + TESTQ AX, AX + JNS LBB19_39 + JMP LBB19_36 + +LBB19_37: + MOVQ 0(BX), R15 + MOVQ R13, DI MOVQ R15, SI - LEAQ -64(BP), DX - MOVQ R11, BX - WORD $0xf8c5; BYTE $0x77 // vzeroupper - LONG $0xffef8ee8; BYTE $0xff // callq _advance_string + LEAQ -72(BP), DX + LONG $0xfff36de8; BYTE $0xff // callq _advance_string MOVQ AX, R13 TESTQ AX, AX - JS LBB18_112 + JS LBB19_65 MOVQ R13, 0(BX) TESTQ R15, R15 - MOVQ BX, R11 - MOVQ -48(BP), BX - JG LBB18_92 - JMP LBB18_113 - -LBB18_91: - MOVQ BX, DI - MOVQ R11, SI - MOVQ R11, BX - WORD $0xf8c5; BYTE $0x77 // vzeroupper - LONG $0x000654e8; BYTE $0x00 // callq _validate_string - MOVQ BX, R11 - MOVQ -48(BP), BX - TESTQ AX, AX - JS LBB18_119 + JLE LBB19_20 -LBB18_92: +LBB19_39: MOVLQSX 0(R12), AX CMPQ AX, $65535 - JG LBB18_115 + JG LBB19_73 LEAL 1(AX), CX MOVL CX, 0(R12) MOVL $4, 4(R12)(AX*4) - MOVQ $4294977024, R8 - QUAD $0xfffff92a1d6ffec5 // vmovdqu $-1750(%rip), %ymm3 /* LCPI18_0(%rip) */ - QUAD $0xfffff942256ffec5 // vmovdqu $-1726(%rip), %ymm4 /* LCPI18_1(%rip) */ - QUAD $0xfffff95a2d6ffec5 // vmovdqu $-1702(%rip), %ymm5 /* LCPI18_2(%rip) */ - QUAD $0xfffff972356ffec5 // vmovdqu $-1678(%rip), %ymm6 /* LCPI18_3(%rip) */ - JMP LBB18_94 - -LBB18_95: - MOVL -52(BP), AX - CMPL AX, $1 - JE LBB18_110 - TESTL AX, AX - JE LBB18_70 - JMP LBB18_94 - -LBB18_97: - MOVQ 0(R11), R13 - MOVQ 0(BX), DI + JMP LBB19_4 + +LBB19_41: + CMPL -60(BP), $0 + JE LBB19_64 + MOVQ -48(BP), DI + MOVQ -56(BP), SI + LONG $0x000529e8; BYTE $0x00 // callq _validate_string + TESTQ AX, AX + JNS LBB19_4 + JMP LBB19_36 + +LBB19_43: + MOVQ -56(BP), BX + MOVQ 0(BX), R13 + MOVQ -48(BP), AX + MOVQ 0(AX), DI ADDQ R13, DI - MOVQ 8(BX), SI + MOVQ 8(AX), SI SUBQ R13, SI - MOVQ R11, BX - WORD $0xf8c5; BYTE $0x77 // vzeroupper - LONG $0x000d0be8; BYTE $0x00 // callq _do_skip_number - MOVQ BX, R11 - MOVQ 0(BX), CX + LONG $0x000c58e8; BYTE $0x00 // callq _do_skip_number TESTQ AX, AX - JS LBB18_120 - ADDQ AX, CX - MOVQ CX, 0(R11) + JS LBB19_67 + ADDQ AX, 0(BX) TESTQ R13, R13 - MOVQ -48(BP), BX - MOVQ $4294977024, R8 - QUAD $0xfffff8ab1d6ffec5 // vmovdqu $-1877(%rip), %ymm3 /* LCPI18_0(%rip) */ - QUAD $0xfffff8c3256ffec5 // vmovdqu $-1853(%rip), %ymm4 /* LCPI18_1(%rip) */ - QUAD $0xfffff8db2d6ffec5 // vmovdqu $-1829(%rip), %ymm5 /* LCPI18_2(%rip) */ - QUAD $0xfffff8f3356ffec5 // vmovdqu $-1805(%rip), %ymm6 /* LCPI18_3(%rip) */ - JG LBB18_94 - JMP LBB18_128 - -LBB18_99: + JG LBB19_4 + JMP LBB19_45 + +LBB19_46: MOVLQSX 0(R12), AX CMPQ AX, $65535 - JG LBB18_115 + JG LBB19_73 LEAL 1(AX), CX MOVL CX, 0(R12) MOVL $5, 4(R12)(AX*4) - JMP LBB18_94 + JMP LBB19_4 -LBB18_101: - MOVQ 0(R11), AX - MOVQ 8(BX), CX +LBB19_48: + MOVQ -56(BP), BX + MOVQ 0(BX), AX + MOVQ -48(BP), SI + MOVQ 8(SI), CX LEAQ -4(CX), DX CMPQ AX, DX - JA LBB18_125 - MOVQ 0(BX), CX + JA LBB19_72 + MOVQ 0(SI), CX MOVL 0(CX)(AX*1), DX CMPL DX, $1702063201 - JNE LBB18_129 + JNE LBB19_69 LEAQ 4(AX), CX - MOVQ CX, 0(R11) + MOVQ CX, 0(BX) TESTQ AX, AX - JG LBB18_94 - JMP LBB18_118 - -LBB18_104: - MOVQ 0(R11), AX - MOVQ 8(BX), CX + JG LBB19_4 + JMP LBB19_51 + +LBB19_52: + MOVQ -56(BP), BX + MOVQ 0(BX), AX + MOVQ -48(BP), SI + MOVQ 8(SI), CX LEAQ -3(CX), DX CMPQ AX, DX - JA LBB18_125 - MOVQ 0(BX), CX + JA LBB19_72 + MOVQ 0(SI), CX + LEAQ -1(AX), R13 CMPL -1(CX)(AX*1), $1819047278 - JE LBB18_3 - JMP LBB18_132 - -LBB18_106: - MOVQ 0(R11), AX - MOVQ 8(BX), CX + JE LBB19_3 + JMP LBB19_54 + +LBB19_57: + MOVQ -56(BP), BX + MOVQ 0(BX), AX + MOVQ -48(BP), SI + MOVQ 8(SI), CX LEAQ -3(CX), DX CMPQ AX, DX - JA LBB18_125 - MOVQ 0(BX), CX + JA LBB19_72 + MOVQ 0(SI), CX + LEAQ -1(AX), R13 CMPL -1(CX)(AX*1), $1702195828 - JE LBB18_3 - JMP LBB18_135 + JE LBB19_3 + JMP LBB19_59 -LBB18_108: +LBB19_62: MOVLQSX 0(R12), AX CMPQ AX, $65535 - JG LBB18_115 + JG LBB19_73 LEAL 1(AX), CX MOVL CX, 0(R12) MOVL $6, 4(R12)(AX*4) - JMP LBB18_94 - -LBB18_54: - MOVWLZX CX, AX - SUBQ R9, DI - NOTL AX - BSFL AX, SI - JMP LBB18_50 - -LBB18_110: - MOVQ BX, DI - MOVQ R11, SI - MOVQ R11, BX - WORD $0xf8c5; BYTE $0x77 // vzeroupper - LONG $0x000476e8; BYTE $0x00 // callq _validate_string - QUAD $0xfffff7e6356ffec5 // vmovdqu $-2074(%rip), %ymm6 /* LCPI18_3(%rip) */ - QUAD $0xfffff7be2d6ffec5 // vmovdqu $-2114(%rip), %ymm5 /* LCPI18_2(%rip) */ - QUAD $0xfffff796256ffec5 // vmovdqu $-2154(%rip), %ymm4 /* LCPI18_1(%rip) */ - QUAD $0xfffff76e1d6ffec5 // vmovdqu $-2194(%rip), %ymm3 /* LCPI18_0(%rip) */ - MOVQ $4294977024, R8 - MOVQ BX, R11 - MOVQ -48(BP), BX - TESTQ AX, AX - JNS LBB18_94 - JMP LBB18_119 + JMP LBB19_4 -LBB18_55: - NOTQ R9 - ADDQ R9, SI - MOVQ $4294977024, R8 - CMPQ SI, R10 - JB LBB18_52 - JMP LBB18_56 - -LBB18_111: - QUAD $0xfffff7291d6ffec5 // vmovdqu $-2263(%rip), %ymm3 /* LCPI18_0(%rip) */ - QUAD $0xfffff741256ffec5 // vmovdqu $-2239(%rip), %ymm4 /* LCPI18_1(%rip) */ - QUAD $0xfffff7592d6ffec5 // vmovdqu $-2215(%rip), %ymm5 /* LCPI18_2(%rip) */ - QUAD $0xfffff771356ffec5 // vmovdqu $-2191(%rip), %ymm6 /* LCPI18_3(%rip) */ - TESTQ DX, DX - JNE LBB18_42 - JMP LBB18_47 +LBB19_64: + MOVQ -56(BP), BX + MOVQ 0(BX), R15 + MOVQ -48(BP), DI + JMP LBB19_18 -LBB18_115: +LBB19_73: MOVQ $-7, R13 - JMP LBB18_122 + JMP LBB19_74 -LBB18_112: +LBB19_65: MOVQ -48(BP), AX MOVQ 8(AX), AX MOVQ AX, 0(BX) - JMP LBB18_122 + JMP LBB19_74 -LBB18_113: - DECQ R15 +LBB19_20: + ADDQ $-1, R15 MOVQ R15, R13 - JMP LBB18_122 + JMP LBB19_74 -LBB18_114: +LBB19_66: MOVQ 0(BX), CX SUBQ AX, CX ADDQ $-2, CX MOVQ CX, 0(BX) -LBB18_121: +LBB19_68: MOVQ $-2, R13 -LBB18_122: +LBB19_74: MOVQ R13, AX - ADDQ $24, SP - BYTE $0x5b // popq %rbx - WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 - WORD $0x5e41 // popq %r14 - WORD $0x5f41 // popq %r15 - BYTE $0x5d // popq %rbp - WORD $0xf8c5; BYTE $0x77 // vzeroupper + ADDQ $40, SP + BYTE $0x5b // popq %rbx + WORD $0x5c41 // popq %r12 + WORD $0x5d41 // popq %r13 + WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 + BYTE $0x5d // popq %rbp RET -LBB18_125: - MOVQ CX, 0(R11) - JMP LBB18_122 - -LBB18_118: - DECQ AX - -LBB18_119: +LBB19_36: MOVQ AX, R13 - JMP LBB18_122 + JMP LBB19_74 + +LBB19_72: + MOVQ CX, 0(BX) + JMP LBB19_74 -LBB18_120: +LBB19_67: NOTQ AX - ADDQ AX, CX - MOVQ CX, 0(R11) - JMP LBB18_121 + ADDQ AX, 0(BX) + JMP LBB19_68 -LBB18_128: - DECQ R13 - JMP LBB18_122 +LBB19_45: + ADDQ $-1, R13 + JMP LBB19_74 -LBB18_129: +LBB19_69: MOVQ $-2, R13 CMPB DX, $97 - JNE LBB18_122 - INCQ AX + JNE LBB19_74 + ADDQ $1, AX MOVL $1702063201, DX -LBB18_131: +LBB19_71: SHRL $8, DX - MOVQ AX, 0(R11) + MOVQ AX, 0(BX) MOVBLSX 0(CX)(AX*1), SI MOVBLZX DX, DI - INCQ AX + ADDQ $1, AX CMPL DI, SI - JE LBB18_131 - JMP LBB18_122 + JE LBB19_71 + JMP LBB19_74 -LBB18_132: - LEAQ -1(AX), DX - MOVQ DX, 0(R11) - MOVQ $-2, R13 - CMPB -1(CX)(AX*1), $110 - JNE LBB18_122 - MOVL $1819047278, DX +LBB19_59: + MOVQ R13, 0(BX) + CMPB 0(CX)(R13*1), $116 + JNE LBB19_68 + MOVL $1702195828, DX -LBB18_134: +LBB19_61: SHRL $8, DX - MOVQ AX, 0(R11) + MOVQ AX, 0(BX) MOVBLSX 0(CX)(AX*1), SI MOVBLZX DX, DI - INCQ AX + ADDQ $1, AX CMPL DI, SI - JE LBB18_134 - JMP LBB18_122 + JE LBB19_61 + JMP LBB19_68 -LBB18_135: - LEAQ -1(AX), DX - MOVQ DX, 0(R11) - MOVQ $-2, R13 - CMPB -1(CX)(AX*1), $116 - JNE LBB18_122 - MOVL $1702195828, DX +LBB19_51: + ADDQ $-1, AX + MOVQ AX, R13 + JMP LBB19_74 + +LBB19_54: + MOVQ R13, 0(BX) + CMPB 0(CX)(R13*1), $110 + JNE LBB19_68 + MOVL $1819047278, DX -LBB18_137: +LBB19_56: SHRL $8, DX - MOVQ AX, 0(R11) + MOVQ AX, 0(BX) MOVBLSX 0(CX)(AX*1), SI MOVBLZX DX, DI - INCQ AX + ADDQ $1, AX CMPL DI, SI - JE LBB18_137 - JMP LBB18_122 - -// .set L18_0_set_61, LBB18_61-LJTI18_0 -// .set L18_0_set_65, LBB18_65-LJTI18_0 -// .set L18_0_set_68, LBB18_68-LJTI18_0 -// .set L18_0_set_72, LBB18_72-LJTI18_0 -// .set L18_0_set_74, LBB18_74-LJTI18_0 -// .set L18_0_set_76, LBB18_76-LJTI18_0 -LJTI18_0: - LONG $0xfffffa2d // .long L18_0_set_61 - LONG $0xfffffa6b // .long L18_0_set_65 - LONG $0xfffffa94 // .long L18_0_set_68 - LONG $0xfffffb0b // .long L18_0_set_72 - LONG $0xfffffb21 // .long L18_0_set_74 - LONG $0xfffffb3a // .long L18_0_set_76 - - // .set L18_1_set_122, LBB18_122-LJTI18_1 - // .set L18_1_set_121, LBB18_121-LJTI18_1 - // .set L18_1_set_95, LBB18_95-LJTI18_1 - // .set L18_1_set_97, LBB18_97-LJTI18_1 - // .set L18_1_set_81, LBB18_81-LJTI18_1 - // .set L18_1_set_99, LBB18_99-LJTI18_1 - // .set L18_1_set_101, LBB18_101-LJTI18_1 - // .set L18_1_set_104, LBB18_104-LJTI18_1 - // .set L18_1_set_106, LBB18_106-LJTI18_1 - // .set L18_1_set_108, LBB18_108-LJTI18_1 -LJTI18_1: - LONG $0xffffff22 // .long L18_1_set_122 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xfffffce6 // .long L18_1_set_95 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xfffffcff // .long L18_1_set_97 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xfffffb76 // .long L18_1_set_81 - LONG $0xfffffb76 // .long L18_1_set_81 - LONG $0xfffffb76 // .long L18_1_set_81 - LONG $0xfffffb76 // .long L18_1_set_81 - LONG $0xfffffb76 // .long L18_1_set_81 - LONG $0xfffffb76 // .long L18_1_set_81 - LONG $0xfffffb76 // .long L18_1_set_81 - LONG $0xfffffb76 // .long L18_1_set_81 - LONG $0xfffffb76 // .long L18_1_set_81 - LONG $0xfffffb76 // .long L18_1_set_81 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xfffffd6b // .long L18_1_set_99 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xfffffd90 // .long L18_1_set_101 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xfffffdcb // .long L18_1_set_104 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xfffffdf5 // .long L18_1_set_106 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xffffff1b // .long L18_1_set_121 - LONG $0xfffffe1f // .long L18_1_set_108 + JE LBB19_56 + JMP LBB19_68 + +// .set L19_0_set_10, LBB19_10-LJTI19_0 +// .set L19_0_set_14, LBB19_14-LJTI19_0 +// .set L19_0_set_16, LBB19_16-LJTI19_0 +// .set L19_0_set_21, LBB19_21-LJTI19_0 +// .set L19_0_set_23, LBB19_23-LJTI19_0 +// .set L19_0_set_26, LBB19_26-LJTI19_0 +LJTI19_0: + LONG $0xfffffbd4 // .long L19_0_set_10 + LONG $0xfffffc03 // .long L19_0_set_14 + LONG $0xfffffc30 // .long L19_0_set_16 + LONG $0xfffffc6f // .long L19_0_set_21 + LONG $0xfffffc84 // .long L19_0_set_23 + LONG $0xfffffcfc // .long L19_0_set_26 + + // .set L19_1_set_74, LBB19_74-LJTI19_1 + // .set L19_1_set_68, LBB19_68-LJTI19_1 + // .set L19_1_set_41, LBB19_41-LJTI19_1 + // .set L19_1_set_43, LBB19_43-LJTI19_1 + // .set L19_1_set_28, LBB19_28-LJTI19_1 + // .set L19_1_set_46, LBB19_46-LJTI19_1 + // .set L19_1_set_48, LBB19_48-LJTI19_1 + // .set L19_1_set_52, LBB19_52-LJTI19_1 + // .set L19_1_set_57, LBB19_57-LJTI19_1 + // .set L19_1_set_62, LBB19_62-LJTI19_1 +LJTI19_1: + LONG $0xffffff30 // .long L19_1_set_74 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xfffffd9c // .long L19_1_set_41 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xfffffdc1 // .long L19_1_set_43 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xfffffc9f // .long L19_1_set_28 + LONG $0xfffffc9f // .long L19_1_set_28 + LONG $0xfffffc9f // .long L19_1_set_28 + LONG $0xfffffc9f // .long L19_1_set_28 + LONG $0xfffffc9f // .long L19_1_set_28 + LONG $0xfffffc9f // .long L19_1_set_28 + LONG $0xfffffc9f // .long L19_1_set_28 + LONG $0xfffffc9f // .long L19_1_set_28 + LONG $0xfffffc9f // .long L19_1_set_28 + LONG $0xfffffc9f // .long L19_1_set_28 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xfffffdf8 // .long L19_1_set_46 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xfffffe1d // .long L19_1_set_48 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xfffffe60 // .long L19_1_set_52 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xfffffe96 // .long L19_1_set_57 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xffffff29 // .long L19_1_set_68 + LONG $0xfffffecc // .long L19_1_set_62 _skip_array: BYTE $0x55 // pushq %rbp @@ -6835,18 +6822,18 @@ _skip_string: MOVQ 0(SI), BX LEAQ -32(BP), DX MOVQ BX, SI - LONG $0xffe98be8; BYTE $0xff // callq _advance_string + LONG $0xffee73e8; BYTE $0xff // callq _advance_string TESTQ AX, AX - JS LBB21_2 - DECQ BX + JS LBB22_2 + ADDQ $-1, BX MOVQ AX, CX MOVQ BX, AX - JMP LBB21_3 + JMP LBB22_3 -LBB21_2: +LBB22_2: MOVQ 8(R15), CX -LBB21_3: +LBB22_3: MOVQ CX, 0(R14) ADDQ $8, SP BYTE $0x5b // popq %rbx @@ -6855,15 +6842,15 @@ LBB21_3: BYTE $0x5d // popq %rbp RET -LCPI22_0: +LCPI23_0: QUAD $0x2222222222222222; QUAD $0x2222222222222222 // .space 16, '""""""""""""""""' QUAD $0x2222222222222222; QUAD $0x2222222222222222 // .space 16, '""""""""""""""""' -LCPI22_1: +LCPI23_1: QUAD $0x5c5c5c5c5c5c5c5c; QUAD $0x5c5c5c5c5c5c5c5c // .space 16, '\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' QUAD $0x5c5c5c5c5c5c5c5c; QUAD $0x5c5c5c5c5c5c5c5c // .space 16, '\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -LCPI22_2: +LCPI23_2: QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' @@ -6875,152 +6862,140 @@ _validate_string: WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx - SUBQ $40, SP - MOVQ SI, R14 - MOVQ 0(SI), R15 - MOVQ 8(DI), R12 - MOVQ R12, -64(BP) - SUBQ R15, R12 - JE LBB22_18 - MOVQ R14, -48(BP) - MOVQ 0(DI), DI - LEAQ 0(DI)(R15*1), SI - CMPQ R12, $64 - MOVQ SI, -72(BP) - JB LBB22_33 - MOVL R12, R9 - ANDL $63, R9 + BYTE $0x50 // pushq %rax + MOVQ 0(SI), R14 + MOVQ 8(DI), R13 + MOVQ R13, -48(BP) + SUBQ R14, R13 + JE LBB23_54 + MOVQ 0(DI), R11 + LEAQ 0(R11)(R14*1), DI MOVQ $-1, AX - XORL R13, R13 - QUAD $0xffffff46056ffec5 // vmovdqu $-186(%rip), %ymm0 /* LCPI22_0(%rip) */ - QUAD $0xffffff5e0d6ffec5 // vmovdqu $-162(%rip), %ymm1 /* LCPI22_1(%rip) */ - QUAD $0xffffff76156ffec5 // vmovdqu $-138(%rip), %ymm2 /* LCPI22_2(%rip) */ + XORL R12, R12 + CMPQ R13, $64 + JB LBB23_2 + QUAD $0xffffff5b056ffec5 // vmovdqu $-165(%rip), %ymm0 /* LCPI23_0(%rip) */ + QUAD $0xffffff730d6ffec5 // vmovdqu $-141(%rip), %ymm1 /* LCPI23_1(%rip) */ + QUAD $0xffffff8b156ffec5 // vmovdqu $-117(%rip), %ymm2 /* LCPI23_2(%rip) */ LONG $0xdb76e5c5 // vpcmpeqd %ymm3, %ymm3, %ymm3 + MOVQ R14, R15 + +LBB23_4: + LONG $0x6f7e81c4; WORD $0x3b24 // vmovdqu (%r11,%r15), %ymm4 + LONG $0x6f7e81c4; WORD $0x3b6c; BYTE $0x20 // vmovdqu $32(%r11,%r15), %ymm5 + LONG $0xf074ddc5 // vpcmpeqb %ymm0, %ymm4, %ymm6 + LONG $0xded7fdc5 // vpmovmskb %ymm6, %ebx + LONG $0xf074d5c5 // vpcmpeqb %ymm0, %ymm5, %ymm6 + LONG $0xced77dc5 // vpmovmskb %ymm6, %r9d + LONG $0xf174ddc5 // vpcmpeqb %ymm1, %ymm4, %ymm6 + LONG $0xced7fdc5 // vpmovmskb %ymm6, %ecx + LONG $0xf174d5c5 // vpcmpeqb %ymm1, %ymm5, %ymm6 + LONG $0xd6d77dc5 // vpmovmskb %ymm6, %r10d + LONG $0xf564edc5 // vpcmpgtb %ymm5, %ymm2, %ymm6 + LONG $0xeb64d5c5 // vpcmpgtb %ymm3, %ymm5, %ymm5 + LONG $0xeddbcdc5 // vpand %ymm5, %ymm6, %ymm5 + LONG $0xc5d77dc5 // vpmovmskb %ymm5, %r8d + SHLQ $32, R9 + ORQ R9, BX + SHLQ $32, R10 + SHLQ $32, R8 + ORQ R10, CX + JNE LBB23_5 + TESTQ R12, R12 + JNE LBB23_7 + XORL R12, R12 -LBB22_3: - LONG $0x266ffec5 // vmovdqu (%rsi), %ymm4 - LONG $0x6e6ffec5; BYTE $0x20 // vmovdqu $32(%rsi), %ymm5 - LONG $0xf074ddc5 // vpcmpeqb %ymm0, %ymm4, %ymm6 - LONG $0xc6d77dc5 // vpmovmskb %ymm6, %r8d - LONG $0xf074d5c5 // vpcmpeqb %ymm0, %ymm5, %ymm6 - LONG $0xded7fdc5 // vpmovmskb %ymm6, %ebx - LONG $0xf174ddc5 // vpcmpeqb %ymm1, %ymm4, %ymm6 - LONG $0xded77dc5 // vpmovmskb %ymm6, %r11d - LONG $0xf174d5c5 // vpcmpeqb %ymm1, %ymm5, %ymm6 - LONG $0xf6d77dc5 // vpmovmskb %ymm6, %r14d - LONG $0xf464edc5 // vpcmpgtb %ymm4, %ymm2, %ymm6 - LONG $0xe364ddc5 // vpcmpgtb %ymm3, %ymm4, %ymm4 - LONG $0xe6dbddc5 // vpand %ymm6, %ymm4, %ymm4 - LONG $0xd4d77dc5 // vpmovmskb %ymm4, %r10d - LONG $0xe564edc5 // vpcmpgtb %ymm5, %ymm2, %ymm4 - LONG $0xeb64d5c5 // vpcmpgtb %ymm3, %ymm5, %ymm5 - LONG $0xe4dbd5c5 // vpand %ymm4, %ymm5, %ymm4 - LONG $0xd4d7fdc5 // vpmovmskb %ymm4, %edx - SHLQ $32, BX - SHLQ $32, R14 - ORQ R14, R11 - CMPQ AX, $-1 - JNE LBB22_5 - TESTQ R11, R11 - JNE LBB22_10 - -LBB22_5: - SHLQ $32, DX - ORQ R8, BX - MOVQ R11, CX - ORQ R13, CX - JNE LBB22_9 - ORQ R10, DX +LBB23_10: + LONG $0xec64edc5 // vpcmpgtb %ymm4, %ymm2, %ymm5 + LONG $0xe364ddc5 // vpcmpgtb %ymm3, %ymm4, %ymm4 + LONG $0xe4dbd5c5 // vpand %ymm4, %ymm5, %ymm4 + LONG $0xccd7fdc5 // vpmovmskb %ymm4, %ecx + ORQ CX, R8 TESTQ BX, BX - JNE LBB22_11 + JNE LBB23_11 + TESTQ R8, R8 + JNE LBB23_15 + ADDQ $-64, R13 + ADDQ $64, R15 + CMPQ R13, $63 + JA LBB23_4 + JMP LBB23_18 + +LBB23_5: + CMPQ AX, $-1 + JNE LBB23_7 + BSFQ CX, AX + ADDQ R15, AX -LBB22_7: - TESTQ DX, DX - JNE LBB22_21 - ADDQ $64, SI - ADDQ $-64, R12 - CMPQ R12, $63 - JA LBB22_3 - JMP LBB22_23 - -LBB22_9: - MOVQ R13, R14 - NOTQ R14 - ANDQ R11, R14 - LEAQ 0(R14)(R14*1), R8 - ORQ R13, R8 - MOVQ R8, -56(BP) - NOTQ R8 - ANDQ R11, R8 +LBB23_7: + MOVQ R12, R9 + NOTQ R9 + ANDQ CX, R9 + LEAQ 0(R9)(R9*1), DX + ORQ R12, DX + MOVQ DX, R10 + NOTQ R10 + ANDQ CX, R10 MOVQ $-6148914691236517206, CX - ANDQ CX, R8 - XORL R13, R13 - ADDQ R14, R8 - SETCS R13 - ADDQ R8, R8 + ANDQ CX, R10 + XORL R12, R12 + ADDQ R9, R10 + SETCS R12 + ADDQ R10, R10 MOVQ $6148914691236517205, CX - XORQ CX, R8 - ANDQ -56(BP), R8 - NOTQ R8 - ANDQ R8, BX - ORQ R10, DX - TESTQ BX, BX - JE LBB22_7 - JMP LBB22_11 - -LBB22_10: - MOVQ SI, R14 - SUBQ DI, R14 - BSFQ R11, AX - ADDQ R14, AX - JMP LBB22_5 + XORQ CX, R10 + ANDQ DX, R10 + NOTQ R10 + ANDQ R10, BX + JMP LBB23_10 -LBB22_11: - SUBQ DI, SI - BSFQ BX, BX - LEAQ 1(SI)(BX*1), R12 - TESTQ DX, DX - MOVQ -48(BP), R14 - JE LBB22_13 - BSFQ DX, CX - CMPQ CX, BX - JBE LBB22_29 +LBB23_11: + BSFQ BX, CX + LEAQ 0(CX)(R15*1), BX + ADDQ $1, BX + TESTQ R8, R8 + JE LBB23_52 + BSFQ R8, DX + CMPQ DX, CX + JBE LBB23_13 -LBB22_13: - TESTQ R12, R12 - JS LBB22_17 - MOVQ R15, SI +LBB23_52: + TESTQ BX, BX + JS LBB23_53 + MOVQ SI, R15 + MOVQ R14, SI NOTQ SI - ADDQ R12, SI - MOVQ -72(BP), DI + ADDQ BX, SI WORD $0xf8c5; BYTE $0x77 // vzeroupper - LONG $0x00026ee8; BYTE $0x00 // callq _utf8_validate + LONG $0x000280e8; BYTE $0x00 // callq _utf8_validate TESTQ AX, AX - JS LBB22_19 - ADDQ 0(R14), AX + JS LBB23_56 + MOVQ R15, SI + ADDQ 0(R15), AX -LBB22_16: - MOVQ $-2, R12 - JMP LBB22_20 +LBB23_58: + MOVQ $-2, BX + JMP LBB23_59 -LBB22_17: - CMPQ R12, $-1 - JNE LBB22_20 +LBB23_53: + CMPQ BX, $-1 + JNE LBB23_59 -LBB22_18: - MOVQ $-1, R12 - MOVQ -64(BP), AX - JMP LBB22_20 +LBB23_54: + MOVQ $-1, BX + MOVQ -48(BP), AX + JMP LBB23_59 -LBB22_19: - DECQ R15 - MOVQ R12, AX - MOVQ R15, R12 +LBB23_56: + ADDQ $-1, R14 + MOVQ BX, AX + MOVQ R14, BX + MOVQ R15, SI -LBB22_20: - MOVQ AX, 0(R14) - MOVQ R12, AX - ADDQ $40, SP +LBB23_59: + MOVQ AX, 0(SI) + MOVQ BX, AX + ADDQ $8, SP BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 WORD $0x5d41 // popq %r13 @@ -7030,195 +7005,196 @@ LBB22_20: WORD $0xf8c5; BYTE $0x77 // vzeroupper RET -LBB22_21: - MOVQ $-2, R12 +LBB23_15: + MOVQ $-2, BX CMPQ AX, $-1 - JE LBB22_30 - -LBB22_22: - MOVQ -48(BP), R14 - JMP LBB22_20 + JNE LBB23_59 + BSFQ R8, AX + ADDQ R15, AX + JMP LBB23_59 -LBB22_23: - MOVQ R9, R12 - CMPQ R12, $32 - JB LBB22_39 +LBB23_18: + ADDQ R11, R15 + CMPQ R13, $32 + JB LBB23_34 -LBB22_24: - LONG $0x066ffec5 // vmovdqu (%rsi), %ymm0 - QUAD $0xfffffd7c0d74fdc5 // vpcmpeqb $-644(%rip), %ymm0, %ymm1 /* LCPI22_0(%rip) */ - LONG $0xd9d7fdc5 // vpmovmskb %ymm1, %ebx - QUAD $0xfffffd900d74fdc5 // vpcmpeqb $-624(%rip), %ymm0, %ymm1 /* LCPI22_1(%rip) */ - LONG $0xd1d7fdc5 // vpmovmskb %ymm1, %edx - QUAD $0xfffffda40d6ffec5 // vmovdqu $-604(%rip), %ymm1 /* LCPI22_2(%rip) */ - LONG $0xc864f5c5 // vpcmpgtb %ymm0, %ymm1, %ymm1 - LONG $0xd276edc5 // vpcmpeqd %ymm2, %ymm2, %ymm2 - LONG $0xc264fdc5 // vpcmpgtb %ymm2, %ymm0, %ymm0 - LONG $0xc1dbfdc5 // vpand %ymm1, %ymm0, %ymm0 - LONG $0xc0d77dc5 // vpmovmskb %ymm0, %r8d - TESTL DX, DX - JNE LBB22_34 - TESTQ R13, R13 - JNE LBB22_36 - XORL R13, R13 +LBB23_20: + LONG $0x6f7ec1c4; BYTE $0x07 // vmovdqu (%r15), %ymm0 + QUAD $0xfffffda50d74fdc5 // vpcmpeqb $-603(%rip), %ymm0, %ymm1 /* LCPI23_0(%rip) */ + LONG $0xd9d7fdc5 // vpmovmskb %ymm1, %ebx + QUAD $0xfffffdb90d74fdc5 // vpcmpeqb $-583(%rip), %ymm0, %ymm1 /* LCPI23_1(%rip) */ + LONG $0xc9d7fdc5 // vpmovmskb %ymm1, %ecx + QUAD $0xfffffdcd0d6ffec5 // vmovdqu $-563(%rip), %ymm1 /* LCPI23_2(%rip) */ + LONG $0xc864f5c5 // vpcmpgtb %ymm0, %ymm1, %ymm1 + LONG $0xd276edc5 // vpcmpeqd %ymm2, %ymm2, %ymm2 + LONG $0xc264fdc5 // vpcmpgtb %ymm2, %ymm0, %ymm0 + LONG $0xc0dbf5c5 // vpand %ymm0, %ymm1, %ymm0 + LONG $0xc8d77dc5 // vpmovmskb %ymm0, %r9d + TESTL CX, CX + JNE LBB23_21 + TESTQ R12, R12 + JNE LBB23_24 + XORL R12, R12 TESTQ BX, BX - JE LBB22_37 + JE LBB23_30 -LBB22_27: - SUBQ DI, SI +LBB23_27: + SUBQ R11, R15 BSFQ BX, DX - LEAQ 1(SI)(DX*1), R12 - TESTL R8, R8 - JE LBB22_32 - BSFQ R8, CX + LEAQ 0(R15)(DX*1), BX + ADDQ $1, BX + TESTQ R9, R9 + JE LBB23_52 + BSFQ R9, CX CMPQ CX, DX - MOVQ -48(BP), R14 - JA LBB22_13 + JA LBB23_52 + ADDQ R15, CX + CMPQ AX, $-1 + LONG $0xc1440f48 // cmoveq %rcx, %rax + JMP LBB23_58 -LBB22_29: - ADDQ SI, CX - CMPQ AX, $-1 - LONG $0xc1440f48 // cmoveq %rcx, %rax - JMP LBB22_16 +LBB23_2: + MOVQ DI, R15 + CMPQ R13, $32 + JAE LBB23_20 + JMP LBB23_34 -LBB22_30: - SUBQ DI, SI - BSFQ DX, AX +LBB23_13: + ADDQ R15, DX + CMPQ AX, $-1 + LONG $0xc2440f48 // cmoveq %rdx, %rax + JMP LBB23_58 -LBB22_31: +LBB23_21: + MOVQ SI, R8 + CMPQ AX, $-1 + JNE LBB23_25 + MOVQ R15, SI + SUBQ R11, SI + BSFQ CX, AX ADDQ SI, AX - MOVQ -48(BP), R14 - JMP LBB22_20 - -LBB22_32: - MOVQ -48(BP), R14 - JMP LBB22_13 - -LBB22_33: - MOVQ $-1, AX - XORL R13, R13 - CMPQ R12, $32 - JAE LBB22_24 - JMP LBB22_39 + JMP LBB23_25 -LBB22_34: - CMPQ AX, $-1 - JNE LBB22_36 - MOVQ SI, CX - SUBQ DI, CX - BSFQ DX, AX - ADDQ CX, AX +LBB23_24: + MOVQ SI, R8 -LBB22_36: - MOVL R13, R10 +LBB23_25: + MOVL R12, R10 NOTL R10 - ANDL DX, R10 - LEAL 0(R10)(R10*1), R9 - ORL R13, R9 - MOVL R9, CX - NOTL CX - ANDL DX, CX - ANDL $-1431655766, CX - XORL R13, R13 - ADDL R10, CX - SETCS R13 - ADDL CX, CX - XORL $1431655765, CX - ANDL R9, CX - NOTL CX - ANDL CX, BX + ANDL CX, R10 + LEAL 0(R10)(R10*1), SI + LEAL 0(R12)(R10*2), DX + NOTL SI + ANDL CX, SI + ANDL $-1431655766, SI + XORL R12, R12 + ADDL R10, SI + SETCS R12 + ADDL SI, SI + XORL $1431655765, SI + ANDL DX, SI + NOTL SI + ANDL SI, BX + MOVQ R8, SI TESTQ BX, BX - JNE LBB22_27 + JNE LBB23_27 -LBB22_37: - TESTL R8, R8 - JNE LBB22_51 - ADDQ $32, SI - ADDQ $-32, R12 +LBB23_30: + TESTQ R9, R9 + JNE LBB23_31 + ADDQ $32, R15 + ADDQ $-32, R13 -LBB22_39: - TESTQ R13, R13 - JNE LBB22_53 - MOVQ -48(BP), R14 +LBB23_34: + MOVQ R11, DX + NOTQ DX TESTQ R12, R12 - JE LBB22_50 + JNE LBB23_35 + TESTQ R13, R13 + JE LBB23_51 -LBB22_41: - MOVQ DI, R8 - NOTQ R8 +LBB23_38: + LEAQ 1(DX), R9 + +LBB23_39: + XORL CX, CX -LBB22_42: - LEAQ 1(SI), DX - MOVBLZX 0(SI), BX +LBB23_40: + MOVBLZX 0(R15)(CX*1), BX CMPB BX, $34 - JE LBB22_49 - LEAQ -1(R12), R10 + JE LBB23_50 CMPB BX, $92 - JE LBB22_46 + JE LBB23_42 CMPB BX, $31 - JBE LBB22_55 - MOVQ DX, SI - MOVQ R10, R12 - TESTQ R10, R10 - JNE LBB22_42 - JMP LBB22_48 - -LBB22_46: - TESTQ R10, R10 - JE LBB22_18 - ADDQ R8, DX - CMPQ AX, $-1 - LONG $0xc2440f48 // cmoveq %rdx, %rax - ADDQ $2, SI - ADDQ $-2, R12 - MOVQ R12, R10 - TESTQ R10, R10 - JNE LBB22_42 + JBE LBB23_45 + ADDQ $1, CX + CMPQ R13, CX + JNE LBB23_40 + JMP LBB23_48 + +LBB23_42: + LEAQ -1(R13), BX + CMPQ BX, CX + JE LBB23_54 + LEAQ 0(R9)(R15*1), BX + ADDQ CX, BX + CMPQ AX, $-1 + LONG $0xc3440f48 // cmoveq %rbx, %rax + ADDQ CX, R15 + ADDQ $2, R15 + MOVQ R13, BX + SUBQ CX, BX + ADDQ $-2, BX + ADDQ $-2, R13 + CMPQ R13, CX + MOVQ BX, R13 + JNE LBB23_39 + JMP LBB23_54 -LBB22_48: - CMPB BX, $34 - JNE LBB22_18 - JMP LBB22_50 +LBB23_50: + ADDQ CX, R15 + ADDQ $1, R15 -LBB22_49: - MOVQ DX, SI +LBB23_51: + SUBQ R11, R15 + MOVQ R15, BX + JMP LBB23_52 -LBB22_50: - SUBQ DI, SI - MOVQ SI, R12 - JMP LBB22_13 +LBB23_48: + CMPB BX, $34 + JNE LBB23_54 + ADDQ R13, R15 + JMP LBB23_51 -LBB22_51: - MOVQ $-2, R12 +LBB23_31: + MOVQ $-2, BX CMPQ AX, $-1 - JNE LBB22_22 - SUBQ DI, SI - BSFQ R8, AX - JMP LBB22_31 + JNE LBB23_59 + SUBQ R11, R15 + BSFQ R9, AX + ADDQ R15, AX + JMP LBB23_59 -LBB22_53: - TESTQ R12, R12 - MOVQ -48(BP), R14 - JE LBB22_18 - MOVQ DI, CX - NOTQ CX - ADDQ SI, CX +LBB23_35: + TESTQ R13, R13 + JE LBB23_54 + LEAQ 0(R15)(DX*1), CX CMPQ AX, $-1 - LONG $0xc1440f48 // cmoveq %rcx, %rax - INCQ SI - DECQ R12 - TESTQ R12, R12 - JNE LBB22_41 - JMP LBB22_50 + LONG $0xc1440f48 // cmoveq %rcx, %rax + ADDQ $1, R15 + ADDQ $-1, R13 + TESTQ R13, R13 + JNE LBB23_38 + JMP LBB23_51 -LBB22_55: - MOVQ $-2, R12 +LBB23_45: + MOVQ $-2, BX CMPQ AX, $-1 - JNE LBB22_22 - ADDQ R8, DX - MOVQ DX, AX - MOVQ -48(BP), R14 - JMP LBB22_20 + JNE LBB23_59 + ADDQ R15, DX + LEAQ 0(CX)(DX*1), AX + ADDQ $1, AX + JMP LBB23_59 _utf8_validate: BYTE $0x55 // pushq %rbp @@ -7228,143 +7204,151 @@ _utf8_validate: BYTE $0x53 // pushq %rbx MOVQ $-1, AX TESTQ SI, SI - JLE LBB23_33 - LONG $0xd60d8d4c; WORD $0x00af; BYTE $0x00 // leaq $45014(%rip), %r9 /* _first(%rip) */ - LONG $0xcf058d4c; WORD $0x00b0; BYTE $0x00 // leaq $45263(%rip), %r8 /* _ranges(%rip) */ - LONG $0x4e158d4c; WORD $0x0001; BYTE $0x00 // leaq $334(%rip), %r10 /* LJTI23_0(%rip) */ + JLE LBB24_35 + LONG $0xda058d4c; WORD $0x00b0; BYTE $0x00 // leaq $45274(%rip), %r8 /* _first(%rip) */ + LONG $0xd30d8d4c; WORD $0x00b1; BYTE $0x00 // leaq $45523(%rip), %r9 /* _ranges(%rip) */ + LONG $0x84158d4c; WORD $0x0001; BYTE $0x00 // leaq $388(%rip), %r10 /* LJTI24_0(%rip) */ MOVQ DI, R11 -LBB23_2: +LBB24_2: CMPB 0(R11), $0 - JS LBB23_3 - MOVQ SI, DX - MOVQ R11, CX + JS LBB24_3 + MOVQ SI, R14 + MOVQ R11, DX CMPQ SI, $32 - JL LBB23_10 - MOVQ R11, CX - MOVQ SI, DX + JL LBB24_16 + XORL BX, BX + XORL CX, CX -LBB23_6: - LONG $0x016ffec5 // vmovdqu (%rcx), %ymm0 - LONG $0xd8d7fdc5 // vpmovmskb %ymm0, %ebx - TESTL BX, BX - JNE LBB23_7 - ADDQ $32, CX +LBB24_6: + LONG $0x6f7ec1c4; WORD $0x1b04 // vmovdqu (%r11,%rbx), %ymm0 + LONG $0xd0d7fdc5 // vpmovmskb %ymm0, %edx + TESTL DX, DX + JNE LBB24_7 + ADDQ $32, BX + LEAQ 0(SI)(CX*1), DX + ADDQ $-32, DX + ADDQ $-32, CX + ADDQ $32, DX CMPQ DX, $63 - LEAQ -32(DX), DX - JG LBB23_6 + JG LBB24_6 + MOVQ R11, DX + SUBQ CX, DX + MOVQ SI, R14 + SUBQ BX, R14 -LBB23_10: +LBB24_16: WORD $0xf8c5; BYTE $0x77 // vzeroupper - CMPQ DX, $16 - JL LBB23_11 - -LBB23_16: - LONG $0x016ffac5 // vmovdqu (%rcx), %xmm0 - LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx - TESTW BX, BX - JNE LBB23_17 - ADDQ $16, CX - CMPQ DX, $31 - LEAQ -16(DX), DX - JG LBB23_16 - -LBB23_11: - TESTQ DX, DX - JLE LBB23_33 - INCQ DX - -LBB23_13: - CMPB 0(CX), $0 - JS LBB23_18 - INCQ CX - DECQ DX - CMPQ DX, $1 - JG LBB23_13 - JMP LBB23_33 - -LBB23_3: - XORL DX, DX - CMPQ DX, $-1 - JNE LBB23_20 - JMP LBB23_33 - -LBB23_18: - SUBQ R11, CX - MOVQ CX, DX - CMPQ DX, $-1 - JE LBB23_33 + CMPQ R14, $16 + JL LBB24_17 + MOVQ R11, CX + SUBQ DX, CX -LBB23_20: - SUBQ DX, SI - JLE LBB23_33 - LEAQ 0(R11)(DX*1), R14 - MOVBLZX 0(R11)(DX*1), R11 - MOVBLZX 0(R11)(R9*1), R15 - MOVL R15, DX - ANDL $7, DX - CMPQ SI, DX - JB LBB23_31 - CMPB DX, $4 - JA LBB23_31 - MOVL $1, BX - MOVBLZX DX, CX - MOVLQSX 0(R10)(CX*4), CX - ADDQ R10, CX - JMP CX +LBB24_23: + LONG $0x026ffac5 // vmovdqu (%rdx), %xmm0 + LONG $0xd8d7f9c5 // vpmovmskb %xmm0, %ebx + TESTL BX, BX + JNE LBB24_24 + LEAQ -16(R14), BX + ADDQ $16, DX + ADDQ $-16, CX + CMPQ R14, $31 + MOVQ BX, R14 + JG LBB24_23 + JMP LBB24_18 -LBB23_24: - MOVB 3(R14), BX - TESTB BX, BX - JNS LBB23_31 - CMPB BX, $-65 - JA LBB23_31 +LBB24_3: + XORL R14, R14 -LBB23_26: - MOVB 2(R14), BX - TESTB BX, BX - JNS LBB23_31 - CMPB BX, $-65 - JA LBB23_31 +LBB24_9: + CMPQ R14, $-1 + JE LBB24_35 + SUBQ R14, SI + JLE LBB24_35 + LEAQ 0(R11)(R14*1), R15 + MOVBLZX 0(R11)(R14*1), R11 + MOVBLZX 0(R11)(R8*1), BX + MOVL BX, CX + ANDL $7, CX + CMPQ SI, CX + JB LBB24_33 + CMPB CX, $4 + JA LBB24_33 + MOVL $1, R14 + MOVBLZX CX, DX + MOVLQSX 0(R10)(DX*4), DX + ADDQ R10, DX + JMP DX -LBB23_28: +LBB24_26: + MOVB 3(R15), DX + TESTB DX, DX + JNS LBB24_33 + CMPB DX, $-65 + JA LBB24_33 + +LBB24_28: + MOVB 2(R15), DX + TESTB DX, DX + JNS LBB24_33 + CMPB DX, $-65 + JA LBB24_33 + +LBB24_30: + SHRQ $4, BX + MOVB 1(R15), DX + CMPB DX, 0(R9)(BX*2) + JB LBB24_33 + CMPB 1(R9)(BX*2), DX + JB LBB24_33 + MOVQ CX, R14 TESTB R11, R11 - JNS LBB23_31 - SHRQ $4, R15 - MOVB 1(R14), R11 - CMPB R11, 0(R8)(R15*2) - JB LBB23_31 - MOVQ DX, BX - CMPB 1(R8)(R15*2), R11 - JB LBB23_31 - -LBB23_32: - ADDQ BX, R14 - MOVQ R14, R11 - SUBQ BX, SI - JG LBB23_2 - JMP LBB23_33 + JNS LBB24_33 -LBB23_7: - MOVLQSX BX, DX - JMP LBB23_8 - -LBB23_17: - MOVWLZX BX, DX +LBB24_34: + ADDQ R14, R15 + MOVQ R15, R11 + SUBQ R14, SI + JG LBB24_2 + JMP LBB24_35 -LBB23_8: - SUBQ R11, CX - BSFQ DX, DX - ADDQ CX, DX - CMPQ DX, $-1 - JNE LBB23_20 - JMP LBB23_33 +LBB24_17: + MOVQ R14, BX -LBB23_31: - SUBQ DI, R14 - MOVQ R14, AX +LBB24_18: + TESTQ BX, BX + JLE LBB24_35 + ADDQ $1, BX + MOVQ DX, R14 + SUBQ R11, R14 + +LBB24_20: + CMPB 0(DX), $0 + JS LBB24_9 + ADDQ $1, DX + ADDQ $-1, BX + ADDQ $1, R14 + CMPQ BX, $1 + JG LBB24_20 + JMP LBB24_35 + +LBB24_7: + BSFL DX, R14 + JMP LBB24_8 + +LBB24_24: + BSFW BX, DX + MOVWLZX DX, R14 + +LBB24_8: + SUBQ CX, R14 + JMP LBB24_9 + +LBB24_33: + SUBQ DI, R15 + MOVQ R15, AX -LBB23_33: +LBB24_35: BYTE $0x5b // popq %rbx WORD $0x5e41 // popq %r14 WORD $0x5f41 // popq %r15 @@ -7372,17 +7356,17 @@ LBB23_33: WORD $0xf8c5; BYTE $0x77 // vzeroupper RET -// .set L23_0_set_32, LBB23_32-LJTI23_0 -// .set L23_0_set_31, LBB23_31-LJTI23_0 -// .set L23_0_set_28, LBB23_28-LJTI23_0 -// .set L23_0_set_26, LBB23_26-LJTI23_0 -// .set L23_0_set_24, LBB23_24-LJTI23_0 -LJTI23_0: - LONG $0xffffffc1 // .long L23_0_set_32 - LONG $0xfffffff0 // .long L23_0_set_31 - LONG $0xffffffa4 // .long L23_0_set_28 - LONG $0xffffff97 // .long L23_0_set_26 - LONG $0xffffff8a // .long L23_0_set_24 +// .set L24_0_set_34, LBB24_34-LJTI24_0 +// .set L24_0_set_33, LBB24_33-LJTI24_0 +// .set L24_0_set_30, LBB24_30-LJTI24_0 +// .set L24_0_set_28, LBB24_28-LJTI24_0 +// .set L24_0_set_26, LBB24_26-LJTI24_0 +LJTI24_0: + LONG $0xffffff9a // .long L24_0_set_34 + LONG $0xfffffff0 // .long L24_0_set_33 + LONG $0xffffff7d // .long L24_0_set_30 + LONG $0xffffff70 // .long L24_0_set_28 + LONG $0xffffff5b // .long L24_0_set_26 _skip_negative: BYTE $0x55 // pushq %rbp @@ -7396,534 +7380,490 @@ _skip_negative: MOVQ 8(DI), SI SUBQ BX, SI MOVQ AX, DI - LONG $0x00017be8; BYTE $0x00 // callq _do_skip_number - MOVQ 0(R14), CX + LONG $0x000173e8; BYTE $0x00 // callq _do_skip_number TESTQ AX, AX - JS LBB24_1 - ADDQ AX, CX - MOVQ CX, 0(R14) - DECQ BX - JMP LBB24_3 + JS LBB25_1 + ADDQ AX, 0(R14) + ADDQ $-1, BX + JMP LBB25_3 -LBB24_1: +LBB25_1: NOTQ AX - ADDQ AX, CX - MOVQ CX, 0(R14) + ADDQ AX, 0(R14) MOVQ $-2, BX -LBB24_3: +LBB25_3: MOVQ BX, AX BYTE $0x5b // popq %rbx WORD $0x5e41 // popq %r14 BYTE $0x5d // popq %rbp RET -LCPI25_0: +LCPI26_0: QUAD $0x2f2f2f2f2f2f2f2f; QUAD $0x2f2f2f2f2f2f2f2f // .space 16, '////////////////' QUAD $0x2f2f2f2f2f2f2f2f; QUAD $0x2f2f2f2f2f2f2f2f // .space 16, '////////////////' -LCPI25_1: +LCPI26_1: QUAD $0x3a3a3a3a3a3a3a3a; QUAD $0x3a3a3a3a3a3a3a3a // .space 16, '::::::::::::::::' QUAD $0x3a3a3a3a3a3a3a3a; QUAD $0x3a3a3a3a3a3a3a3a // .space 16, '::::::::::::::::' -LCPI25_2: +LCPI26_2: QUAD $0x2b2b2b2b2b2b2b2b; QUAD $0x2b2b2b2b2b2b2b2b // .space 16, '++++++++++++++++' QUAD $0x2b2b2b2b2b2b2b2b; QUAD $0x2b2b2b2b2b2b2b2b // .space 16, '++++++++++++++++' -LCPI25_3: +LCPI26_3: QUAD $0x2d2d2d2d2d2d2d2d; QUAD $0x2d2d2d2d2d2d2d2d // .space 16, '----------------' QUAD $0x2d2d2d2d2d2d2d2d; QUAD $0x2d2d2d2d2d2d2d2d // .space 16, '----------------' -LCPI25_4: - QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' - QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' +LCPI26_4: + QUAD $0xdfdfdfdfdfdfdfdf; QUAD $0xdfdfdfdfdfdfdfdf // .space 16, '\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf' + QUAD $0xdfdfdfdfdfdfdfdf; QUAD $0xdfdfdfdfdfdfdfdf // .space 16, '\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf' -LCPI25_5: +LCPI26_5: QUAD $0x2e2e2e2e2e2e2e2e; QUAD $0x2e2e2e2e2e2e2e2e // .space 16, '................' QUAD $0x2e2e2e2e2e2e2e2e; QUAD $0x2e2e2e2e2e2e2e2e // .space 16, '................' -LCPI25_6: - QUAD $0x6565656565656565; QUAD $0x6565656565656565 // .space 16, 'eeeeeeeeeeeeeeee' - QUAD $0x6565656565656565; QUAD $0x6565656565656565 // .space 16, 'eeeeeeeeeeeeeeee' +LCPI26_6: + QUAD $0x4545454545454545; QUAD $0x4545454545454545 // .space 16, 'EEEEEEEEEEEEEEEE' + QUAD $0x4545454545454545; QUAD $0x4545454545454545 // .space 16, 'EEEEEEEEEEEEEEEE' -LCPI25_7: +LCPI26_7: QUAD $0x2f2f2f2f2f2f2f2f; QUAD $0x2f2f2f2f2f2f2f2f // .space 16, '////////////////' -LCPI25_8: +LCPI26_8: QUAD $0x3a3a3a3a3a3a3a3a; QUAD $0x3a3a3a3a3a3a3a3a // .space 16, '::::::::::::::::' -LCPI25_9: +LCPI26_9: QUAD $0x2b2b2b2b2b2b2b2b; QUAD $0x2b2b2b2b2b2b2b2b // .space 16, '++++++++++++++++' -LCPI25_10: +LCPI26_10: QUAD $0x2d2d2d2d2d2d2d2d; QUAD $0x2d2d2d2d2d2d2d2d // .space 16, '----------------' -LCPI25_11: - QUAD $0x2020202020202020; QUAD $0x2020202020202020 // .space 16, ' ' +LCPI26_11: + QUAD $0xdfdfdfdfdfdfdfdf; QUAD $0xdfdfdfdfdfdfdfdf // .space 16, '\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf\xdf' -LCPI25_12: +LCPI26_12: QUAD $0x2e2e2e2e2e2e2e2e; QUAD $0x2e2e2e2e2e2e2e2e // .space 16, '................' -LCPI25_13: - QUAD $0x6565656565656565; QUAD $0x6565656565656565 // .space 16, 'eeeeeeeeeeeeeeee' +LCPI26_13: + QUAD $0x4545454545454545; QUAD $0x4545454545454545 // .space 16, 'EEEEEEEEEEEEEEEE' _do_skip_number: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp WORD $0x5741 // pushq %r15 WORD $0x5641 // pushq %r14 - WORD $0x5541 // pushq %r13 WORD $0x5441 // pushq %r12 BYTE $0x53 // pushq %rbx TESTQ SI, SI - JE LBB25_53 + JE LBB26_1 CMPB 0(DI), $48 - JNE LBB25_5 - MOVL $1, DX + JNE LBB26_6 + MOVL $1, AX CMPQ SI, $1 - JE LBB25_73 - MOVB 1(DI), AX - ADDB $-46, AX - CMPB AX, $55 - JA LBB25_73 - MOVBLZX AX, AX - MOVQ $36028797027352577, CX - BTQ AX, CX - JAE LBB25_73 + JE LBB26_81 + MOVB 1(DI), CX + ADDB $-46, CX + CMPB CX, $55 + JA LBB26_81 + MOVBLZX CX, CX + MOVQ $36028797027352577, DX + BTQ CX, DX + JAE LBB26_81 -LBB25_5: - CMPQ SI, $32 - JB LBB25_76 - LEAQ -32(SI), R11 - MOVQ R11, AX - ANDQ $-32, AX - LEAQ 32(AX)(DI*1), R10 - ANDL $31, R11 +LBB26_6: MOVQ $-1, R9 - QUAD $0xfffffe35056ffec5 // vmovdqu $-459(%rip), %ymm0 /* LCPI25_0(%rip) */ - QUAD $0xfffffe4d0d6ffec5 // vmovdqu $-435(%rip), %ymm1 /* LCPI25_1(%rip) */ - QUAD $0xfffffe65156ffec5 // vmovdqu $-411(%rip), %ymm2 /* LCPI25_2(%rip) */ - QUAD $0xfffffe7d1d6ffec5 // vmovdqu $-387(%rip), %ymm3 /* LCPI25_3(%rip) */ - QUAD $0xfffffe95256ffec5 // vmovdqu $-363(%rip), %ymm4 /* LCPI25_4(%rip) */ - QUAD $0xfffffead2d6ffec5 // vmovdqu $-339(%rip), %ymm5 /* LCPI25_5(%rip) */ - QUAD $0xfffffec5356ffec5 // vmovdqu $-315(%rip), %ymm6 /* LCPI25_6(%rip) */ - MOVQ $-1, AX + CMPQ SI, $32 + JB LBB26_7 + XORL AX, AX + QUAD $0xfffffe47056ffec5 // vmovdqu $-441(%rip), %ymm0 /* LCPI26_0(%rip) */ + QUAD $0xfffffe5f0d6ffec5 // vmovdqu $-417(%rip), %ymm1 /* LCPI26_1(%rip) */ + QUAD $0xfffffe77156ffec5 // vmovdqu $-393(%rip), %ymm2 /* LCPI26_2(%rip) */ + QUAD $0xfffffe8f1d6ffec5 // vmovdqu $-369(%rip), %ymm3 /* LCPI26_3(%rip) */ + QUAD $0xfffffea7256ffec5 // vmovdqu $-345(%rip), %ymm4 /* LCPI26_4(%rip) */ + QUAD $0xfffffebf2d6ffec5 // vmovdqu $-321(%rip), %ymm5 /* LCPI26_5(%rip) */ + QUAD $0xfffffed7356ffec5 // vmovdqu $-297(%rip), %ymm6 /* LCPI26_6(%rip) */ MOVQ $-1, R8 - MOVQ DI, R14 + MOVQ $-1, R10 -LBB25_7: - LONG $0x6f7ec1c4; BYTE $0x3e // vmovdqu (%r14), %ymm7 +LBB26_9: + LONG $0x3c6ffec5; BYTE $0x07 // vmovdqu (%rdi,%rax), %ymm7 LONG $0xc06445c5 // vpcmpgtb %ymm0, %ymm7, %ymm8 LONG $0xcf6475c5 // vpcmpgtb %ymm7, %ymm1, %ymm9 - LONG $0xdb3d41c4; BYTE $0xc1 // vpand %ymm9, %ymm8, %ymm8 + LONG $0xdb3541c4; BYTE $0xc0 // vpand %ymm8, %ymm9, %ymm8 LONG $0xca7445c5 // vpcmpeqb %ymm2, %ymm7, %ymm9 LONG $0xd37445c5 // vpcmpeqb %ymm3, %ymm7, %ymm10 LONG $0xeb2d41c4; BYTE $0xc9 // vpor %ymm9, %ymm10, %ymm9 - LONG $0xd4eb45c5 // vpor %ymm4, %ymm7, %ymm10 + LONG $0xd4db45c5 // vpand %ymm4, %ymm7, %ymm10 LONG $0xd6742dc5 // vpcmpeqb %ymm6, %ymm10, %ymm10 LONG $0xfd74c5c5 // vpcmpeqb %ymm5, %ymm7, %ymm7 LONG $0xd7d7fdc5 // vpmovmskb %ymm7, %edx - LONG $0xd77d41c4; BYTE $0xe2 // vpmovmskb %ymm10, %r12d - LONG $0xd77d41c4; BYTE $0xf9 // vpmovmskb %ymm9, %r15d - LONG $0xffebadc5 // vpor %ymm7, %ymm10, %ymm7 - LONG $0xeb3541c4; BYTE $0xc0 // vpor %ymm8, %ymm9, %ymm8 + LONG $0xd77d41c4; BYTE $0xfa // vpmovmskb %ymm10, %r15d + LONG $0xd77d41c4; BYTE $0xd9 // vpmovmskb %ymm9, %r11d + LONG $0xffebbdc5 // vpor %ymm7, %ymm8, %ymm7 + LONG $0xeb2d41c4; BYTE $0xc1 // vpor %ymm9, %ymm10, %ymm8 LONG $0xffebbdc5 // vpor %ymm7, %ymm8, %ymm7 LONG $0xcfd7fdc5 // vpmovmskb %ymm7, %ecx NOTQ CX - BSFQ CX, CX - CMPL CX, $32 - JE LBB25_9 + BSFQ CX, R14 + CMPL R14, $32 + JE LBB26_11 MOVL $-1, BX + MOVL R14, CX SHLL CX, BX NOTL BX - ANDL BX, DX - ANDL BX, R12 - ANDL R15, BX - MOVL BX, R15 + ANDL BX, DX + ANDL BX, R15 + ANDL R11, BX + MOVL BX, R11 -LBB25_9: - LEAL -1(DX), BX - ANDL DX, BX - JNE LBB25_70 - LEAL -1(R12), BX - ANDL R12, BX - JNE LBB25_70 - LEAL -1(R15), BX - ANDL R15, BX - JNE LBB25_70 +LBB26_11: + LEAL -1(DX), CX + ANDL DX, CX + JNE LBB26_12 + LEAL -1(R15), CX + ANDL R15, CX + JNE LBB26_12 + LEAL -1(R11), CX + ANDL R11, CX + JNE LBB26_12 TESTL DX, DX - JE LBB25_15 - MOVQ R14, BX - SUBQ DI, BX - BSFL DX, DX - ADDQ BX, DX - CMPQ R8, $-1 - JNE LBB25_72 - MOVQ DX, R8 - -LBB25_15: - TESTL R12, R12 - JE LBB25_18 - MOVQ R14, BX - SUBQ DI, BX - BSFL R12, DX - ADDQ BX, DX - CMPQ AX, $-1 - JNE LBB25_72 - MOVQ DX, AX + JE LBB26_19 + BSFL DX, CX + CMPQ R10, $-1 + JNE LBB26_82 + ADDQ AX, CX + MOVQ CX, R10 -LBB25_18: +LBB26_19: TESTL R15, R15 - JE LBB25_21 - MOVQ R14, BX - SUBQ DI, BX - BSFL R15, DX - ADDQ BX, DX + JE LBB26_22 + BSFL R15, CX + CMPQ R8, $-1 + JNE LBB26_82 + ADDQ AX, CX + MOVQ CX, R8 + +LBB26_22: + TESTL R11, R11 + JE LBB26_25 + BSFL R11, CX CMPQ R9, $-1 - JNE LBB25_72 - MOVQ DX, R9 + JNE LBB26_82 + ADDQ AX, CX + MOVQ CX, R9 -LBB25_21: - CMPL CX, $32 - JNE LBB25_54 - ADDQ $32, R14 +LBB26_25: + CMPL R14, $32 + JNE LBB26_83 ADDQ $-32, SI + ADDQ $32, AX CMPQ SI, $31 - JA LBB25_7 + JA LBB26_9 WORD $0xf8c5; BYTE $0x77 // vzeroupper - MOVQ R11, SI + ADDQ DI, AX + MOVQ AX, R14 CMPQ SI, $16 - JB LBB25_42 + JB LBB26_49 -LBB25_24: - LEAQ -16(SI), R14 - MOVQ R14, CX - ANDQ $-16, CX - LEAQ 16(CX)(R10*1), R11 - ANDL $15, R14 - QUAD $0xfffffda2056f7ac5 // vmovdqu $-606(%rip), %xmm8 /* LCPI25_7(%rip) */ - QUAD $0xfffffdaa0d6f7ac5 // vmovdqu $-598(%rip), %xmm9 /* LCPI25_8(%rip) */ - QUAD $0xfffffdb2156f7ac5 // vmovdqu $-590(%rip), %xmm10 /* LCPI25_9(%rip) */ - QUAD $0xfffffdba1d6f7ac5 // vmovdqu $-582(%rip), %xmm11 /* LCPI25_10(%rip) */ - QUAD $0xfffffdc2256ffac5 // vmovdqu $-574(%rip), %xmm4 /* LCPI25_11(%rip) */ - QUAD $0xfffffdca2d6ffac5 // vmovdqu $-566(%rip), %xmm5 /* LCPI25_12(%rip) */ - QUAD $0xfffffdd2356ffac5 // vmovdqu $-558(%rip), %xmm6 /* LCPI25_13(%rip) */ - MOVL $4294967295, R15 - -LBB25_25: - LONG $0x6f7ac1c4; BYTE $0x3a // vmovdqu (%r10), %xmm7 - LONG $0x6441c1c4; BYTE $0xc0 // vpcmpgtb %xmm8, %xmm7, %xmm0 - LONG $0xcf64b1c5 // vpcmpgtb %xmm7, %xmm9, %xmm1 - LONG $0xc1dbf9c5 // vpand %xmm1, %xmm0, %xmm0 - LONG $0xcf74a9c5 // vpcmpeqb %xmm7, %xmm10, %xmm1 - LONG $0xd774a1c5 // vpcmpeqb %xmm7, %xmm11, %xmm2 - LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 - LONG $0xd4ebc1c5 // vpor %xmm4, %xmm7, %xmm2 - LONG $0xd674e9c5 // vpcmpeqb %xmm6, %xmm2, %xmm2 - LONG $0xfd74c1c5 // vpcmpeqb %xmm5, %xmm7, %xmm7 - LONG $0xdfebe9c5 // vpor %xmm7, %xmm2, %xmm3 - LONG $0xc0ebf1c5 // vpor %xmm0, %xmm1, %xmm0 - LONG $0xc0ebe1c5 // vpor %xmm0, %xmm3, %xmm0 - LONG $0xd7d7f9c5 // vpmovmskb %xmm7, %edx - LONG $0xead779c5 // vpmovmskb %xmm2, %r13d - LONG $0xe1d779c5 // vpmovmskb %xmm1, %r12d - LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx - XORQ R15, CX - BSFQ CX, CX +LBB26_29: + MOVQ R14, R11 + SUBQ DI, R11 + XORL AX, AX + QUAD $0xfffffdce056f7ac5 // vmovdqu $-562(%rip), %xmm8 /* LCPI26_7(%rip) */ + QUAD $0xfffffdd60d6f7ac5 // vmovdqu $-554(%rip), %xmm9 /* LCPI26_8(%rip) */ + QUAD $0xfffffdde156f7ac5 // vmovdqu $-546(%rip), %xmm10 /* LCPI26_9(%rip) */ + QUAD $0xfffffde61d6f7ac5 // vmovdqu $-538(%rip), %xmm11 /* LCPI26_10(%rip) */ + QUAD $0xfffffdee256ffac5 // vmovdqu $-530(%rip), %xmm4 /* LCPI26_11(%rip) */ + QUAD $0xfffffdf62d6ffac5 // vmovdqu $-522(%rip), %xmm5 /* LCPI26_12(%rip) */ + QUAD $0xfffffdfe356ffac5 // vmovdqu $-514(%rip), %xmm6 /* LCPI26_13(%rip) */ + +LBB26_30: + LONG $0x6f7ac1c4; WORD $0x063c // vmovdqu (%r14,%rax), %xmm7 + LONG $0x6441c1c4; BYTE $0xc0 // vpcmpgtb %xmm8, %xmm7, %xmm0 + LONG $0xcf64b1c5 // vpcmpgtb %xmm7, %xmm9, %xmm1 + LONG $0xc1dbf9c5 // vpand %xmm1, %xmm0, %xmm0 + LONG $0xcf74a9c5 // vpcmpeqb %xmm7, %xmm10, %xmm1 + LONG $0xd774a1c5 // vpcmpeqb %xmm7, %xmm11, %xmm2 + LONG $0xc9ebe9c5 // vpor %xmm1, %xmm2, %xmm1 + LONG $0xd4dbc1c5 // vpand %xmm4, %xmm7, %xmm2 + LONG $0xd674e9c5 // vpcmpeqb %xmm6, %xmm2, %xmm2 + LONG $0xfd74c1c5 // vpcmpeqb %xmm5, %xmm7, %xmm7 + LONG $0xdfebe9c5 // vpor %xmm7, %xmm2, %xmm3 + LONG $0xc0ebf1c5 // vpor %xmm0, %xmm1, %xmm0 + LONG $0xc0ebe1c5 // vpor %xmm0, %xmm3, %xmm0 + LONG $0xd7d7f9c5 // vpmovmskb %xmm7, %edx + LONG $0xe2d779c5 // vpmovmskb %xmm2, %r12d + LONG $0xf9d779c5 // vpmovmskb %xmm1, %r15d + LONG $0xc8d7f9c5 // vpmovmskb %xmm0, %ecx + NOTL CX + BSFL CX, CX CMPL CX, $16 - JE LBB25_27 + JE LBB26_32 MOVL $-1, BX SHLL CX, BX NOTL BX ANDL BX, DX - ANDL BX, R13 - ANDL R12, BX - MOVL BX, R12 + ANDL BX, R12 + ANDL R15, BX + MOVL BX, R15 -LBB25_27: +LBB26_32: LEAL -1(DX), BX ANDL DX, BX - JNE LBB25_71 - LEAL -1(R13), BX - ANDL R13, BX - JNE LBB25_71 + JNE LBB26_33 LEAL -1(R12), BX ANDL R12, BX - JNE LBB25_71 + JNE LBB26_33 + LEAL -1(R15), BX + ANDL R15, BX + JNE LBB26_33 TESTL DX, DX - JE LBB25_33 - MOVQ R10, BX - SUBQ DI, BX + JE LBB26_40 BSFL DX, DX - ADDQ BX, DX - CMPQ R8, $-1 - JNE LBB25_72 - MOVQ DX, R8 - -LBB25_33: - TESTL R13, R13 - JE LBB25_36 - MOVQ R10, BX - SUBQ DI, BX - BSFL R13, DX - ADDQ BX, DX - CMPQ AX, $-1 - JNE LBB25_72 - MOVQ DX, AX + CMPQ R10, $-1 + JNE LBB26_84 + ADDQ R11, DX + ADDQ AX, DX + MOVQ DX, R10 -LBB25_36: +LBB26_40: TESTL R12, R12 - JE LBB25_39 - MOVQ R10, BX - SUBQ DI, BX + JE LBB26_43 BSFL R12, DX - ADDQ BX, DX + CMPQ R8, $-1 + JNE LBB26_84 + ADDQ R11, DX + ADDQ AX, DX + MOVQ DX, R8 + +LBB26_43: + TESTL R15, R15 + JE LBB26_46 + BSFL R15, DX CMPQ R9, $-1 - JNE LBB25_72 + JNE LBB26_84 + ADDQ R11, DX + ADDQ AX, DX MOVQ DX, R9 -LBB25_39: +LBB26_46: CMPL CX, $16 - JNE LBB25_55 - ADDQ $16, R10 + JNE LBB26_65 ADDQ $-16, SI + ADDQ $16, AX CMPQ SI, $15 - JA LBB25_25 - MOVQ R14, SI - MOVQ R11, R10 + JA LBB26_30 + ADDQ AX, R14 -LBB25_42: +LBB26_49: TESTQ SI, SI - JE LBB25_56 - LEAQ 0(R10)(SI*1), R11 - LONG $0x8f1d8d48; WORD $0x0001; BYTE $0x00 // leaq $399(%rip), %rbx /* LJTI25_0(%rip) */ - JMP LBB25_45 + JE LBB26_67 + LEAQ 0(R14)(SI*1), R11 + MOVQ R14, DX + SUBQ DI, DX + XORL AX, AX + LONG $0x813d8d4c; WORD $0x0001; BYTE $0x00 // leaq $385(%rip), %r15 /* LJTI26_0(%rip) */ + JMP LBB26_51 -LBB25_44: - MOVQ CX, R10 - DECQ SI - JE LBB25_75 - -LBB25_45: - MOVBLSX 0(R10), DX - ADDL $-43, DX - CMPL DX, $58 - JA LBB25_56 - LEAQ 1(R10), CX - MOVLQSX 0(BX)(DX*4), DX - ADDQ BX, DX - JMP DX +LBB26_53: + CMPL CX, $101 + JNE LBB26_66 + +LBB26_54: + CMPQ R8, $-1 + JNE LBB26_59 + LEAQ 0(DX)(AX*1), R8 -LBB25_47: - MOVQ CX, DX - SUBQ DI, DX +LBB26_63: + ADDQ $1, AX + CMPQ SI, AX + JE LBB26_64 + +LBB26_51: + MOVBLSX 0(R14)(AX*1), CX + LEAL -48(CX), BX + CMPL BX, $10 + JB LBB26_63 + LEAL -43(CX), BX + CMPL BX, $26 + JA LBB26_53 + MOVLQSX 0(R15)(BX*4), CX + ADDQ R15, CX + JMP CX + +LBB26_61: CMPQ R9, $-1 - JNE LBB25_79 - DECQ DX - MOVQ DX, R9 - JMP LBB25_44 - -LBB25_49: - MOVQ CX, DX - SUBQ DI, DX - CMPQ AX, $-1 - JNE LBB25_79 - DECQ DX - MOVQ DX, AX - JMP LBB25_44 + JNE LBB26_59 + LEAQ 0(DX)(AX*1), R9 + JMP LBB26_63 -LBB25_51: - MOVQ CX, DX - SUBQ DI, DX - CMPQ R8, $-1 - JNE LBB25_79 - DECQ DX - MOVQ DX, R8 - JMP LBB25_44 +LBB26_57: + CMPQ R10, $-1 + JNE LBB26_59 + LEAQ 0(DX)(AX*1), R10 + JMP LBB26_63 -LBB25_53: +LBB26_1: MOVQ $-1, AX - JMP LBB25_74 + JMP LBB26_81 -LBB25_54: - ADDQ CX, R14 +LBB26_83: + ADDQ AX, R14 + ADDQ DI, R14 WORD $0xf8c5; BYTE $0x77 // vzeroupper - MOVQ R14, R10 - MOVQ $-1, DX - TESTQ AX, AX - JNE LBB25_57 - JMP LBB25_73 + MOVQ $-1, AX + TESTQ R10, R10 + JNE LBB26_68 + JMP LBB26_81 -LBB25_55: - ADDQ CX, R10 +LBB26_65: + MOVL CX, CX + ADDQ CX, R14 -LBB25_56: - MOVQ $-1, DX - TESTQ AX, AX - JE LBB25_73 +LBB26_66: + ADDQ AX, R14 + +LBB26_67: + MOVQ $-1, AX + TESTQ R10, R10 + JE LBB26_81 -LBB25_57: +LBB26_68: TESTQ R9, R9 - JE LBB25_73 + JE LBB26_81 TESTQ R8, R8 - JE LBB25_73 - SUBQ DI, R10 - LEAQ -1(R10), CX - CMPQ AX, CX - JE LBB25_65 - CMPQ R8, CX - JE LBB25_65 - CMPQ R9, CX - JE LBB25_65 + JE LBB26_81 + SUBQ DI, R14 + LEAQ -1(R14), AX + CMPQ R10, AX + JE LBB26_73 + CMPQ R9, AX + JE LBB26_73 + CMPQ R8, AX + JE LBB26_73 TESTQ R9, R9 - JLE LBB25_66 - LEAQ -1(R9), CX - CMPQ AX, CX - JE LBB25_66 + JLE LBB26_77 + LEAQ -1(R9), AX + CMPQ R8, AX + JE LBB26_77 NOTQ R9 - MOVQ R9, DX MOVQ R9, AX - JMP LBB25_74 + JMP LBB26_81 -LBB25_65: - NEGQ R10 - MOVQ R10, DX - MOVQ R10, AX - JMP LBB25_74 +LBB26_73: + NEGQ R14 + MOVQ R14, AX + JMP LBB26_81 -LBB25_66: - MOVQ R8, CX - ORQ AX, CX - CMPQ R8, AX - JL LBB25_69 - TESTQ CX, CX - JS LBB25_69 +LBB26_77: + MOVQ R10, AX + ORQ R8, AX + SETPL AX + JS LBB26_80 + CMPQ R10, R8 + JL LBB26_80 + NOTQ R10 + MOVQ R10, AX + JMP LBB26_81 + +LBB26_80: + LEAQ -1(R8), CX + CMPQ R10, CX NOTQ R8 - MOVQ R8, DX + LONG $0xc6450f4d // cmovneq %r14, %r8 + TESTB AX, AX + LONG $0xc6440f4d // cmoveq %r14, %r8 MOVQ R8, AX - JMP LBB25_74 + JMP LBB26_81 -LBB25_69: - TESTQ CX, CX - LEAQ -1(AX), CX - NOTQ AX - LONG $0xc2480f49 // cmovsq %r10, %rax - CMPQ R8, CX - LONG $0xc2450f49 // cmovneq %r10, %rax - JMP LBB25_74 - -LBB25_70: - SUBQ DI, R14 - BSFL BX, DX - ADDQ R14, DX - JMP LBB25_72 +LBB26_64: + MOVQ R11, R14 + MOVQ $-1, AX + TESTQ R10, R10 + JNE LBB26_68 + JMP LBB26_81 -LBB25_71: - SUBQ DI, R10 - BSFL BX, DX - ADDQ R10, DX +LBB26_12: + BSFL CX, CX + JMP LBB26_13 -LBB25_72: - NOTQ DX +LBB26_82: + MOVL CX, CX -LBB25_73: - MOVQ DX, AX +LBB26_13: + NOTQ AX + SUBQ CX, AX + JMP LBB26_81 + +LBB26_33: + BSFL BX, CX + JMP LBB26_34 + +LBB26_59: + SUBQ R14, DI + JMP LBB26_60 + +LBB26_84: + MOVL DX, CX + +LBB26_34: + SUBQ R14, DI + SUBQ CX, DI + +LBB26_60: + NOTQ AX + ADDQ DI, AX -LBB25_74: +LBB26_81: BYTE $0x5b // popq %rbx WORD $0x5c41 // popq %r12 - WORD $0x5d41 // popq %r13 WORD $0x5e41 // popq %r14 WORD $0x5f41 // popq %r15 BYTE $0x5d // popq %rbp WORD $0xf8c5; BYTE $0x77 // vzeroupper RET -LBB25_75: - MOVQ R11, R10 - MOVQ $-1, DX - TESTQ AX, AX - JNE LBB25_57 - JMP LBB25_73 - -LBB25_79: - NEGQ DX - JMP LBB25_73 - -LBB25_76: - MOVQ $-1, R9 - MOVQ $-1, AX +LBB26_7: MOVQ $-1, R8 - MOVQ DI, R10 + MOVQ $-1, R10 + MOVQ DI, R14 CMPQ SI, $16 - JAE LBB25_24 - JMP LBB25_42 - -// .set L25_0_set_47, LBB25_47-LJTI25_0 -// .set L25_0_set_56, LBB25_56-LJTI25_0 -// .set L25_0_set_51, LBB25_51-LJTI25_0 -// .set L25_0_set_44, LBB25_44-LJTI25_0 -// .set L25_0_set_49, LBB25_49-LJTI25_0 -LJTI25_0: - LONG $0xfffffe98 // .long L25_0_set_47 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xfffffe98 // .long L25_0_set_47 - LONG $0xfffffec8 // .long L25_0_set_51 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xfffffe73 // .long L25_0_set_44 - LONG $0xfffffe73 // .long L25_0_set_44 - LONG $0xfffffe73 // .long L25_0_set_44 - LONG $0xfffffe73 // .long L25_0_set_44 - LONG $0xfffffe73 // .long L25_0_set_44 - LONG $0xfffffe73 // .long L25_0_set_44 - LONG $0xfffffe73 // .long L25_0_set_44 - LONG $0xfffffe73 // .long L25_0_set_44 - LONG $0xfffffe73 // .long L25_0_set_44 - LONG $0xfffffe73 // .long L25_0_set_44 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xfffffeb0 // .long L25_0_set_49 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xffffff09 // .long L25_0_set_56 - LONG $0xfffffeb0 // .long L25_0_set_49 + JAE LBB26_29 + JMP LBB26_49 + +// .set L26_0_set_61, LBB26_61-LJTI26_0 +// .set L26_0_set_66, LBB26_66-LJTI26_0 +// .set L26_0_set_57, LBB26_57-LJTI26_0 +// .set L26_0_set_54, LBB26_54-LJTI26_0 +LJTI26_0: + LONG $0xfffffec3 // .long L26_0_set_61 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xfffffec3 // .long L26_0_set_61 + LONG $0xfffffed3 // .long L26_0_set_57 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xffffff0e // .long L26_0_set_66 + LONG $0xfffffe8a // .long L26_0_set_54 _skip_positive: BYTE $0x55 // pushq %rbp @@ -7932,26 +7872,27 @@ _skip_positive: BYTE $0x53 // pushq %rbx MOVQ SI, R14 MOVQ 0(SI), BX - DECQ BX + ADDQ $-1, BX MOVQ 0(DI), AX ADDQ BX, AX MOVQ 8(DI), SI SUBQ BX, SI MOVQ AX, DI - LONG $0xfffa20e8; BYTE $0xff // callq _do_skip_number + LONG $0xfffae4e8; BYTE $0xff // callq _do_skip_number TESTQ AX, AX - JS LBB26_1 + JS LBB27_1 MOVQ 0(R14), CX - LEAQ -1(AX)(CX*1), CX - JMP LBB26_3 + ADDQ AX, CX + ADDQ $-1, CX + JMP LBB27_3 -LBB26_1: +LBB27_1: MOVQ 0(R14), CX SUBQ AX, CX ADDQ $-2, CX MOVQ $-2, BX -LBB26_3: +LBB27_3: MOVQ CX, 0(R14) MOVQ BX, AX BYTE $0x5b // popq %rbx @@ -7976,22 +7917,22 @@ _skip_number: SETEQ AX ADDQ AX, BX SUBQ AX, SI - JE LBB27_6 + JE LBB28_6 CMPQ R15, SI - JAE LBB27_3 + JAE LBB28_3 MOVB 0(BX), AX ADDB $-48, AX CMPB AX, $9 - JA LBB27_8 + JA LBB28_8 -LBB27_3: +LBB28_3: MOVQ BX, DI - LONG $0xfff9b2e8; BYTE $0xff // callq _do_skip_number + LONG $0xfffa74e8; BYTE $0xff // callq _do_skip_number TESTQ AX, AX - JS LBB27_7 + JS LBB28_7 ADDQ AX, BX -LBB27_5: +LBB28_5: SUBQ R12, BX MOVQ BX, 0(R14) MOVQ R15, AX @@ -8002,17 +7943,17 @@ LBB27_5: BYTE $0x5d // popq %rbp RET -LBB27_6: +LBB28_6: MOVQ $-1, R15 - JMP LBB27_5 + JMP LBB28_5 -LBB27_7: +LBB28_7: NOTQ AX ADDQ AX, BX -LBB27_8: +LBB28_8: MOVQ $-2, R15 - JMP LBB27_5 + JMP LBB28_5 _validate_one: BYTE $0x55 // pushq %rbp @@ -8029,84 +7970,106 @@ _validate_one: _find_non_ascii: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - MOVQ DI, CX CMPQ SI, $32 - JL LBB29_5 + JL LBB30_1 + XORL AX, AX + XORL DX, DX -LBB29_1: - LONG $0x016ffec5 // vmovdqu (%rcx), %ymm0 - LONG $0xc0d7fdc5 // vpmovmskb %ymm0, %eax - TESTL AX, AX - JNE LBB29_2 +LBB30_3: + LONG $0x046ffec5; BYTE $0x07 // vmovdqu (%rdi,%rax), %ymm0 + LONG $0xc8d7fdc5 // vpmovmskb %ymm0, %ecx + TESTL CX, CX + JNE LBB30_4 + ADDQ $32, AX + LEAQ 0(SI)(DX*1), CX + ADDQ $-32, CX + ADDQ $-32, DX ADDQ $32, CX - CMPQ SI, $63 - LEAQ -32(SI), SI - JG LBB29_1 + CMPQ CX, $63 + JG LBB30_3 + MOVQ DI, CX + SUBQ DX, CX + SUBQ AX, SI + WORD $0xf8c5; BYTE $0x77 // vzeroupper + CMPQ SI, $16 + JGE LBB30_13 -LBB29_5: +LBB30_8: + MOVQ SI, DX + JMP LBB30_9 + +LBB30_1: + MOVQ DI, CX WORD $0xf8c5; BYTE $0x77 // vzeroupper CMPQ SI, $16 - JL LBB29_6 + JL LBB30_8 -LBB29_11: +LBB30_13: + MOVQ DI, R8 + SUBQ CX, R8 + +LBB30_14: LONG $0x016ffac5 // vmovdqu (%rcx), %xmm0 LONG $0xc0d7f9c5 // vpmovmskb %xmm0, %eax - TESTW AX, AX - JNE LBB29_12 + TESTL AX, AX + JNE LBB30_15 + LEAQ -16(SI), DX ADDQ $16, CX + ADDQ $-16, R8 CMPQ SI, $31 - LEAQ -16(SI), SI - JG LBB29_11 + MOVQ DX, SI + JG LBB30_14 -LBB29_6: +LBB30_9: MOVQ $-1, AX - TESTQ SI, SI - JLE LBB29_14 - INCQ SI + TESTQ DX, DX + JLE LBB30_18 + ADDQ $1, DX + MOVQ CX, SI + SUBQ DI, SI -LBB29_8: +LBB30_11: CMPB 0(CX), $0 - JS LBB29_13 - INCQ CX - DECQ SI - CMPQ SI, $1 - JG LBB29_8 + JS LBB30_12 + ADDQ $1, CX + ADDQ $-1, DX + ADDQ $1, SI + CMPQ DX, $1 + JG LBB30_11 -LBB29_14: +LBB30_18: BYTE $0x5d // popq %rbp RET -LBB29_13: - SUBQ DI, CX - MOVQ CX, AX +LBB30_12: + MOVQ SI, AX BYTE $0x5d // popq %rbp RET -LBB29_2: - WORD $0x9848 // cltq - JMP LBB29_3 - -LBB29_12: - MOVWLZX AX, AX - -LBB29_3: - SUBQ DI, CX - BSFQ AX, AX - ADDQ CX, AX +LBB30_4: + BSFL CX, AX + SUBQ DX, AX BYTE $0x5d // popq %rbp WORD $0xf8c5; BYTE $0x77 // vzeroupper RET +LBB30_15: + BSFW AX, AX + MOVWLZX AX, AX + SUBQ R8, AX + BYTE $0x5d // popq %rbp + RET + _print_mantissa: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp WORD $0x5641 // pushq %r14 BYTE $0x53 // pushq %rbx - MOVLQSX DX, R14 + MOVL DX, R14 ADDQ SI, R14 MOVQ DI, AX SHRQ $32, AX - JE LBB30_2 + JE LBB31_2 MOVQ $-6067343680855748867, DX MOVQ DI, AX MULQ DX @@ -8138,7 +8101,7 @@ _print_mantissa: LONG $0x64fa6b41 // imull $100, %r10d, %edi SUBL DI, AX MOVWLZX AX, R11 - LONG $0x203d8d48; WORD $0x0059; BYTE $0x00 // leaq $22816(%rip), %rdi /* _Digits(%rip) */ + LONG $0x753d8d48; WORD $0x0059; BYTE $0x00 // leaq $22901(%rip), %rdi /* _Digits(%rip) */ MOVWLZX 0(DI)(R8*2), AX MOVW AX, -2(R14) MOVWLZX 0(DI)(R9*2), AX @@ -8150,13 +8113,13 @@ _print_mantissa: ADDQ $-8, R14 MOVQ DX, DI -LBB30_2: +LBB31_2: CMPL DI, $10000 - JB LBB30_3 + JB LBB31_3 MOVL $3518437209, R8 - LONG $0xd80d8d4c; WORD $0x0058; BYTE $0x00 // leaq $22744(%rip), %r9 /* _Digits(%rip) */ + LONG $0x2d0d8d4c; WORD $0x0059; BYTE $0x00 // leaq $22829(%rip), %r9 /* _Digits(%rip) */ -LBB30_5: +LBB31_5: MOVL DI, AX IMULQ R8, AX SHRQ $45, AX @@ -8173,11 +8136,11 @@ LBB30_5: ADDQ $-4, R14 CMPL DI, $99999999 MOVL AX, DI - JA LBB30_5 + JA LBB31_5 CMPL AX, $100 - JB LBB30_8 + JB LBB31_8 -LBB30_7: +LBB31_7: MOVWLZX AX, CX SHRL $2, CX LONG $0x147bc969; WORD $0x0000 // imull $5243, %ecx, %ecx @@ -8185,17 +8148,17 @@ LBB30_7: WORD $0xd16b; BYTE $0x64 // imull $100, %ecx, %edx SUBL DX, AX MOVWLZX AX, AX - LONG $0x71158d48; WORD $0x0058; BYTE $0x00 // leaq $22641(%rip), %rdx /* _Digits(%rip) */ + LONG $0xc6158d48; WORD $0x0058; BYTE $0x00 // leaq $22726(%rip), %rdx /* _Digits(%rip) */ MOVWLZX 0(DX)(AX*2), AX MOVW AX, -2(R14) ADDQ $-2, R14 MOVL CX, AX -LBB30_8: +LBB31_8: CMPL AX, $10 - JB LBB30_10 + JB LBB31_10 MOVL AX, AX - LONG $0x540d8d48; WORD $0x0058; BYTE $0x00 // leaq $22612(%rip), %rcx /* _Digits(%rip) */ + LONG $0xa90d8d48; WORD $0x0058; BYTE $0x00 // leaq $22697(%rip), %rcx /* _Digits(%rip) */ MOVWLZX 0(CX)(AX*2), AX MOVW AX, -2(R14) BYTE $0x5b // popq %rbx @@ -8203,13 +8166,13 @@ LBB30_8: BYTE $0x5d // popq %rbp RET -LBB30_3: +LBB31_3: MOVL DI, AX CMPL AX, $100 - JAE LBB30_7 - JMP LBB30_8 + JAE LBB31_7 + JMP LBB31_8 -LBB30_10: +LBB31_10: ADDB $48, AX MOVB AX, 0(SI) BYTE $0x5b // popq %rbx @@ -8220,292 +8183,314 @@ LBB30_10: _left_shift: BYTE $0x55 // pushq %rbp WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + WORD $0x5741 // pushq %r15 WORD $0x5641 // pushq %r14 BYTE $0x53 // pushq %rbx MOVL SI, CX - IMUL3Q $104, CX, DX - LONG $0x78358d48; WORD $0x008a; BYTE $0x00 // leaq $35448(%rip), %rsi /* _LSHIFT_TAB(%rip) */ - MOVL 0(DX)(SI*1), R8 - MOVQ 0(DI), R10 + IMUL3Q $104, CX, R14 + LONG $0xcb158d48; WORD $0x008b; BYTE $0x00 // leaq $35787(%rip), %rdx /* _LSHIFT_TAB(%rip) */ + MOVL 0(R14)(DX*1), R8 + MOVQ 0(DI), R11 MOVLQSX 16(DI), R9 - MOVB 4(DX)(SI*1), AX + MOVL R9, R10 TESTQ R9, R9 - JE LBB31_6 - LEAQ 5(DX)(SI*1), DX - XORL SI, SI + JE LBB32_1 + LEAQ 0(R14)(DX*1), SI + ADDQ $4, SI + XORL BX, BX -LBB31_3: +LBB32_3: + MOVBLZX 0(SI)(BX*1), AX TESTB AX, AX - JE LBB31_8 - CMPB 0(R10)(SI*1), AX - JNE LBB31_5 - MOVBLZX 0(DX)(SI*1), AX - INCQ SI - CMPQ R9, SI - JNE LBB31_3 - -LBB31_6: - TESTB AX, AX - JE LBB31_8 + JE LBB32_10 + CMPB 0(R11)(BX*1), AX + JNE LBB32_5 + ADDQ $1, BX + CMPQ R9, BX + JNE LBB32_3 + MOVL R9, SI + ADDQ R14, DX + CMPB 4(SI)(DX*1), $0 + JNE LBB32_9 + JMP LBB32_10 -LBB31_7: - DECL R8 +LBB32_1: + XORL SI, SI + ADDQ R14, DX + CMPB 4(SI)(DX*1), $0 + JE LBB32_10 -LBB31_8: - TESTL R9, R9 - JLE LBB31_23 - LEAL 0(R8)(R9*1), AX - MOVLQSX AX, R14 - DECQ R14 +LBB32_9: + ADDL $-1, R8 + +LBB32_10: + TESTL R10, R10 + JLE LBB32_25 + LEAL 0(R8)(R10*1), AX + MOVLQSX AX, R15 + ADDL $-1, R9 + ADDQ $-1, R15 XORL DX, DX - MOVQ $-3689348814741910323, R11 + MOVQ $-3689348814741910323, R14 -LBB31_10: - MOVBQSX -1(R10)(R9*1), SI +LBB32_12: + MOVL R9, AX + MOVBQSX 0(R11)(AX*1), SI ADDQ $-48, SI SHLQ CX, SI ADDQ DX, SI MOVQ SI, AX - MULQ R11 + MULQ R14 SHRQ $3, DX LEAQ 0(DX)(DX*1), AX LEAQ 0(AX)(AX*4), BX MOVQ SI, AX SUBQ BX, AX - CMPQ 8(DI), R14 - JBE LBB31_16 + CMPQ 8(DI), R15 + JBE LBB32_18 ADDB $48, AX - MOVB AX, 0(R10)(R14*1) - JMP LBB31_18 + MOVB AX, 0(R11)(R15*1) + JMP LBB32_20 -LBB31_16: +LBB32_18: TESTQ AX, AX - JE LBB31_18 + JE LBB32_20 MOVL $1, 28(DI) -LBB31_18: - CMPQ R9, $2 - JL LBB31_12 - DECQ R9 - MOVQ 0(DI), R10 - DECQ R14 - JMP LBB31_10 +LBB32_20: + CMPQ R10, $2 + JL LBB32_14 + ADDQ $-1, R10 + MOVQ 0(DI), R11 + ADDL $-1, R9 + ADDQ $-1, R15 + JMP LBB32_12 -LBB31_12: +LBB32_14: CMPQ SI, $10 - JAE LBB31_13 + JAE LBB32_15 -LBB31_23: +LBB32_25: MOVLQSX 16(DI), CX MOVLQSX R8, AX ADDQ CX, AX MOVL AX, 16(DI) MOVQ 8(DI), CX CMPQ CX, AX - JA LBB31_25 + JA LBB32_27 MOVL CX, 16(DI) MOVL CX, AX -LBB31_25: +LBB32_27: ADDL R8, 20(DI) TESTL AX, AX - JLE LBB31_29 + JLE LBB32_31 MOVQ 0(DI), CX - MOVL AX, AX + MOVL AX, DX + ADDQ $1, DX + ADDL $-1, AX -LBB31_27: - CMPB -1(CX)(AX*1), $48 - JNE LBB31_31 - MOVL AX, DX - DECQ AX - DECL DX - MOVL DX, 16(DI) - LEAQ 1(AX), DX +LBB32_29: + MOVL AX, SI + CMPB 0(CX)(SI*1), $48 + JNE LBB32_33 + MOVL AX, 16(DI) + ADDQ $-1, DX + ADDL $-1, AX CMPQ DX, $1 - JG LBB31_27 + JG LBB32_29 + JMP LBB32_32 -LBB31_29: - TESTL AX, AX - JE LBB31_30 +LBB32_31: + JNE LBB32_33 + +LBB32_32: + MOVL $0, 20(DI) -LBB31_31: +LBB32_33: BYTE $0x5b // popq %rbx WORD $0x5e41 // popq %r14 + WORD $0x5f41 // popq %r15 BYTE $0x5d // popq %rbp RET -LBB31_13: - MOVLQSX R14, SI - DECQ SI - JMP LBB31_14 +LBB32_15: + ADDL R8, R9 + MOVLQSX R9, SI + ADDQ $-1, SI + JMP LBB32_16 -LBB31_15: +LBB32_17: ADDB $48, AX MOVQ 0(DI), BX MOVB AX, 0(BX)(SI*1) -LBB31_22: - DECQ SI +LBB32_24: + ADDQ $-1, SI CMPQ CX, $9 - JBE LBB31_23 + JBE LBB32_25 -LBB31_14: +LBB32_16: MOVQ DX, CX MOVQ DX, AX - MULQ R11 + MULQ R14 SHRQ $3, DX LEAQ 0(DX)(DX*1), AX LEAQ 0(AX)(AX*4), BX MOVQ CX, AX SUBQ BX, AX CMPQ 8(DI), SI - JA LBB31_15 + JA LBB32_17 TESTQ AX, AX - JE LBB31_22 + JE LBB32_24 MOVL $1, 28(DI) - JMP LBB31_22 - -LBB31_30: - MOVL $0, 20(DI) - BYTE $0x5b // popq %rbx - WORD $0x5e41 // popq %r14 - BYTE $0x5d // popq %rbp - RET + JMP LBB32_24 -LBB31_5: - JL LBB31_7 - JMP LBB31_8 +LBB32_5: + JL LBB32_9 + JMP LBB32_10 _right_shift: - BYTE $0x55 // pushq %rbp - WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp - MOVL SI, CX - MOVLQSX 16(DI), R9 - XORL SI, SI - XORL AX, AX + BYTE $0x55 // pushq %rbp + WORD $0x8948; BYTE $0xe5 // movq %rsp, %rbp + BYTE $0x53 // pushq %rbx + MOVL SI, CX + MOVL 16(DI), R8 + XORL DX, DX + TESTL R8, R8 + MOVL $0, R11 + LONG $0xd84f0f45 // cmovgl %r8d, %r11d + XORL AX, AX -LBB32_1: - CMPQ SI, R9 - JGE LBB32_2 +LBB33_1: + CMPQ R11, DX + JE LBB33_2 LEAQ 0(AX)(AX*4), AX - MOVQ 0(DI), DX - MOVBQSX 0(DX)(SI*1), DX - LEAQ -48(DX)(AX*2), AX - INCQ SI - MOVQ AX, DX - SHRQ CX, DX - TESTQ DX, DX - JE LBB32_1 + MOVQ 0(DI), SI + MOVBQSX 0(SI)(DX*1), SI + LEAQ 0(SI)(AX*2), AX + ADDQ $-48, AX + ADDQ $1, DX + MOVQ AX, SI + SHRQ CX, SI + TESTQ SI, SI + JE LBB33_1 + MOVL DX, R11 -LBB32_6: +LBB33_7: MOVL 20(DI), DX - SUBL SI, DX - INCL DX - MOVQ $-1, R8 - SHLQ CX, R8 + SUBL R11, DX + ADDL $1, DX + MOVQ $-1, R9 + SHLQ CX, R9 MOVL DX, 20(DI) - NOTQ R8 + NOTQ R9 XORL R10, R10 - CMPL SI, R9 - JGE LBB32_9 - MOVLQSX SI, R9 + CMPL R11, R8 + JGE LBB33_10 + MOVLQSX R11, R8 MOVQ 0(DI), SI XORL R10, R10 -LBB32_8: +LBB33_9: MOVQ AX, DX SHRQ CX, DX - ANDQ R8, AX + ANDQ R9, AX ADDB $48, DX MOVB DX, 0(SI)(R10*1) - LEAQ 0(AX)(AX*4), AX MOVQ 0(DI), SI - LEAQ 0(SI)(R9*1), DX - MOVBQSX 0(R10)(DX*1), DX - LEAQ -48(DX)(AX*2), AX - MOVLQSX 16(DI), R11 - LEAQ 1(R9)(R10*1), DX - INCQ R10 - CMPQ DX, R11 - JL LBB32_8 - JMP LBB32_9 - -LBB32_11: + LEAQ 0(SI)(R8*1), DX + MOVBQSX 0(R10)(DX*1), R11 + LEAQ 1(R8)(R10*1), BX + ADDQ $1, R10 + LEAQ 0(AX)(AX*4), AX + LEAQ 0(R11)(AX*2), AX + ADDQ $-48, AX + MOVLQSX 16(DI), DX + CMPQ BX, DX + JL LBB33_9 + JMP LBB33_10 + +LBB33_12: ADDB $48, SI - MOVQ 0(DI), DX - MOVB SI, 0(DX)(R9*1) - INCL R9 - MOVL R9, R10 + MOVQ 0(DI), BX + MOVB SI, 0(BX)(DX*1) + ADDL $1, DX + MOVL DX, R10 -LBB32_14: +LBB33_15: ADDQ AX, AX LEAQ 0(AX)(AX*4), AX -LBB32_9: +LBB33_10: TESTQ AX, AX - JE LBB32_15 + JE LBB33_16 MOVQ AX, SI SHRQ CX, SI - ANDQ R8, AX - MOVLQSX R10, R9 - CMPQ 8(DI), R9 - JA LBB32_11 + ANDQ R9, AX + MOVLQSX R10, DX + CMPQ 8(DI), DX + JA LBB33_12 TESTQ SI, SI - JE LBB32_14 + JE LBB33_15 MOVL $1, 28(DI) - JMP LBB32_14 + JMP LBB33_15 -LBB32_15: +LBB33_16: MOVL R10, 16(DI) TESTL R10, R10 - JLE LBB32_19 + JLE LBB33_20 MOVQ 0(DI), AX - MOVL R10, R10 - -LBB32_17: - CMPB -1(AX)(R10*1), $48 - JNE LBB32_21 - MOVL R10, CX - DECQ R10 - DECL CX - MOVL CX, 16(DI) - LEAQ 1(R10), CX + MOVL R10, CX + ADDQ $1, CX + ADDL $-1, R10 + +LBB33_18: + MOVL R10, DX + CMPB 0(AX)(DX*1), $48 + JNE LBB33_22 + MOVL R10, 16(DI) + ADDQ $-1, CX + ADDL $-1, R10 CMPQ CX, $1 - JG LBB32_17 - -LBB32_19: - TESTL R10, R10 - JE LBB32_20 - -LBB32_21: - BYTE $0x5d // popq %rbp - RET + JG LBB33_18 + JMP LBB33_21 -LBB32_2: +LBB33_2: TESTQ AX, AX - JE LBB32_22 + JE LBB33_23 MOVQ AX, DX SHRQ CX, DX TESTQ DX, DX - JNE LBB32_6 + JNE LBB33_7 -LBB32_4: +LBB33_4: ADDQ AX, AX LEAQ 0(AX)(AX*4), AX - INCL SI + ADDL $1, R11 MOVQ AX, DX SHRQ CX, DX TESTQ DX, DX - JE LBB32_4 - JMP LBB32_6 + JE LBB33_4 + JMP LBB33_7 -LBB32_20: +LBB33_20: + JE LBB33_21 + +LBB33_22: + BYTE $0x5b // popq %rbx + BYTE $0x5d // popq %rbp + RET + +LBB33_21: MOVL $0, 20(DI) + BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET -LBB32_22: +LBB33_23: MOVL $0, 16(DI) + BYTE $0x5b // popq %rbx BYTE $0x5d // popq %rbp RET @@ -11870,6 +11855,25 @@ __DoubleQuoteTab: QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +__EscTab: + QUAD $0x0101010101010101; QUAD $0x0101010101010101 // .ascii 16, '\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01' + QUAD $0x0101010101010101; QUAD $0x0101010101010101 // .ascii 16, '\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01' + QUAD $0x0000000000010000; QUAD $0x0000000000000000 // .ascii 16, '\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .ascii 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .ascii 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; LONG $0x00000000; BYTE $0x01 // .ascii 13, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .space 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + WORD $0x0000; BYTE $0x00 // .space 3, '\x00\x00\x00' + __UnquoteTab: QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .ascii 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' QUAD $0x0000000000000000; QUAD $0x0000000000000000 // .ascii 16, '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' @@ -12642,29 +12646,29 @@ _LSHIFT_TAB: LONG $0x00000000 // .asciz 4, '\x00\x00\x00\x00' _P10_TAB: - QUAD $0x3ff0000000000000 // .quad 4607182418800017408 - QUAD $0x4024000000000000 // .quad 4621819117588971520 - QUAD $0x4059000000000000 // .quad 4636737291354636288 - QUAD $0x408f400000000000 // .quad 4652007308841189376 - QUAD $0x40c3880000000000 // .quad 4666723172467343360 - QUAD $0x40f86a0000000000 // .quad 4681608360884174848 - QUAD $0x412e848000000000 // .quad 4696837146684686336 - QUAD $0x416312d000000000 // .quad 4711630319722168320 - QUAD $0x4197d78400000000 // .quad 4726483295884279808 - QUAD $0x41cdcd6500000000 // .quad 4741671816366391296 - QUAD $0x4202a05f20000000 // .quad 4756540486875873280 - QUAD $0x42374876e8000000 // .quad 4771362005757984768 - QUAD $0x426d1a94a2000000 // .quad 4786511204640096256 - QUAD $0x42a2309ce5400000 // .quad 4801453603149578240 - QUAD $0x42d6bcc41e900000 // .quad 4816244402031689728 - QUAD $0x430c6bf526340000 // .quad 4831355200913801216 - QUAD $0x4341c37937e08000 // .quad 4846369599423283200 - QUAD $0x4376345785d8a000 // .quad 4861130398305394688 - QUAD $0x43abc16d674ec800 // .quad 4876203697187506176 - QUAD $0x43e158e460913d00 // .quad 4891288408196988160 - QUAD $0x4415af1d78b58c40 // .quad 4906019910204099648 - QUAD $0x444b1ae4d6e2ef50 // .quad 4921056587992461136 - QUAD $0x4480f0cf064dd592 // .quad 4936209963552724370 + QUAD $0x3ff0000000000000 // .quad 0x3ff0000000000000 + QUAD $0x4024000000000000 // .quad 0x4024000000000000 + QUAD $0x4059000000000000 // .quad 0x4059000000000000 + QUAD $0x408f400000000000 // .quad 0x408f400000000000 + QUAD $0x40c3880000000000 // .quad 0x40c3880000000000 + QUAD $0x40f86a0000000000 // .quad 0x40f86a0000000000 + QUAD $0x412e848000000000 // .quad 0x412e848000000000 + QUAD $0x416312d000000000 // .quad 0x416312d000000000 + QUAD $0x4197d78400000000 // .quad 0x4197d78400000000 + QUAD $0x41cdcd6500000000 // .quad 0x41cdcd6500000000 + QUAD $0x4202a05f20000000 // .quad 0x4202a05f20000000 + QUAD $0x42374876e8000000 // .quad 0x42374876e8000000 + QUAD $0x426d1a94a2000000 // .quad 0x426d1a94a2000000 + QUAD $0x42a2309ce5400000 // .quad 0x42a2309ce5400000 + QUAD $0x42d6bcc41e900000 // .quad 0x42d6bcc41e900000 + QUAD $0x430c6bf526340000 // .quad 0x430c6bf526340000 + QUAD $0x4341c37937e08000 // .quad 0x4341c37937e08000 + QUAD $0x4376345785d8a000 // .quad 0x4376345785d8a000 + QUAD $0x43abc16d674ec800 // .quad 0x43abc16d674ec800 + QUAD $0x43e158e460913d00 // .quad 0x43e158e460913d00 + QUAD $0x4415af1d78b58c40 // .quad 0x4415af1d78b58c40 + QUAD $0x444b1ae4d6e2ef50 // .quad 0x444b1ae4d6e2ef50 + QUAD $0x4480f0cf064dd592 // .quad 0x4480f0cf064dd592 _first: QUAD $0xf0f0f0f0f0f0f0f0; QUAD $0xf0f0f0f0f0f0f0f0 // .ascii 16, '\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0' @@ -12701,14 +12705,14 @@ TEXT ·__f64toa(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -120(SP), R12 + LEAQ -136(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow _f64toa: MOVQ out+0(FP), DI MOVSD val+8(FP), X0 - CALL ·__native_entry__+903(SB) // _f64toa + CALL ·__native_entry__+814(SB) // _f64toa MOVQ AX, ret+16(FP) RET @@ -12730,7 +12734,7 @@ _html_escape: MOVQ nb+8(FP), SI MOVQ dp+16(FP), DX MOVQ dn+24(FP), CX - CALL ·__native_entry__+9535(SB) // _html_escape + CALL ·__native_entry__+10717(SB) // _html_escape MOVQ AX, ret+32(FP) RET @@ -12750,7 +12754,7 @@ _entry: _i64toa: MOVQ out+0(FP), DI MOVQ val+8(FP), SI - CALL ·__native_entry__+3915(SB) // _i64toa + CALL ·__native_entry__+3449(SB) // _i64toa MOVQ AX, ret+16(FP) RET @@ -12771,7 +12775,7 @@ _lspace: MOVQ sp+0(FP), DI MOVQ nb+8(FP), SI MOVQ off+16(FP), DX - CALL ·__native_entry__+429(SB) // _lspace + CALL ·__native_entry__+379(SB) // _lspace MOVQ AX, ret+24(FP) RET @@ -12804,7 +12808,7 @@ TEXT ·__quote(SB), NOSPLIT | NOFRAME, $0 - 48 _entry: MOVQ (TLS), R14 - LEAQ -64(SP), R12 + LEAQ -72(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12814,7 +12818,7 @@ _quote: MOVQ dp+16(FP), DX MOVQ dn+24(FP), CX MOVQ flags+32(FP), R8 - CALL ·__native_entry__+5328(SB) // _quote + CALL ·__native_entry__+4842(SB) // _quote MOVQ AX, ret+40(FP) RET @@ -12827,7 +12831,7 @@ TEXT ·__skip_array(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -136(SP), R12 + LEAQ -152(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12835,7 +12839,7 @@ _skip_array: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+21558(SB) // _skip_array + CALL ·__native_entry__+21271(SB) // _skip_array MOVQ AX, ret+24(FP) RET @@ -12848,14 +12852,14 @@ TEXT ·__skip_number(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -96(SP), R12 + LEAQ -88(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow _skip_number: MOVQ s+0(FP), DI MOVQ p+8(FP), SI - CALL ·__native_entry__+25206(SB) // _skip_number + CALL ·__native_entry__+24742(SB) // _skip_number MOVQ AX, ret+16(FP) RET @@ -12868,7 +12872,7 @@ TEXT ·__skip_object(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -136(SP), R12 + LEAQ -152(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12876,7 +12880,7 @@ _skip_object: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+21595(SB) // _skip_object + CALL ·__native_entry__+21308(SB) // _skip_object MOVQ AX, ret+24(FP) RET @@ -12889,7 +12893,7 @@ TEXT ·__skip_one(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -136(SP), R12 + LEAQ -152(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12897,7 +12901,7 @@ _skip_one: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+18458(SB) // _skip_one + CALL ·__native_entry__+19499(SB) // _skip_one MOVQ AX, ret+24(FP) RET @@ -12917,7 +12921,7 @@ _entry: _u64toa: MOVQ out+0(FP), DI MOVQ val+8(FP), SI - CALL ·__native_entry__+4008(SB) // _u64toa + CALL ·__native_entry__+3544(SB) // _u64toa MOVQ AX, ret+16(FP) RET @@ -12940,7 +12944,7 @@ _unquote: MOVQ dp+16(FP), DX MOVQ ep+24(FP), CX MOVQ flags+32(FP), R8 - CALL ·__native_entry__+7080(SB) // _unquote + CALL ·__native_entry__+7467(SB) // _unquote MOVQ AX, ret+40(FP) RET @@ -12953,7 +12957,7 @@ TEXT ·__validate_one(SB), NOSPLIT | NOFRAME, $0 - 32 _entry: MOVQ (TLS), R14 - LEAQ -136(SP), R12 + LEAQ -152(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12961,7 +12965,7 @@ _validate_one: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ m+16(FP), DX - CALL ·__native_entry__+25323(SB) // _validate_one + CALL ·__native_entry__+24859(SB) // _validate_one MOVQ AX, ret+24(FP) RET @@ -12974,7 +12978,7 @@ TEXT ·__value(SB), NOSPLIT | NOFRAME, $0 - 48 _entry: MOVQ (TLS), R14 - LEAQ -408(SP), R12 + LEAQ -416(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -12984,7 +12988,7 @@ _value: MOVQ p+16(FP), DX MOVQ v+24(FP), CX MOVQ allow_control+32(FP), R8 - CALL ·__native_entry__+13781(SB) // _value + CALL ·__native_entry__+14548(SB) // _value MOVQ AX, ret+40(FP) RET @@ -13005,7 +13009,7 @@ _vnumber: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+16616(SB), AX // _vnumber + LEAQ ·__native_entry__+17612(SB), AX // _vnumber JMP AX _stack_grow: @@ -13025,7 +13029,7 @@ _vsigned: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+17930(SB), AX // _vsigned + LEAQ ·__native_entry__+18917(SB), AX // _vsigned JMP AX _stack_grow: @@ -13045,7 +13049,7 @@ _vstring: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+15739(SB), AX // _vstring + LEAQ ·__native_entry__+16708(SB), AX // _vstring JMP AX _stack_grow: @@ -13057,7 +13061,7 @@ TEXT ·__vunsigned(SB), NOSPLIT | NOFRAME, $0 - 24 _entry: MOVQ (TLS), R14 - LEAQ -8(SP), R12 + LEAQ -24(SP), R12 CMPQ R12, 16(R14) JBE _stack_grow @@ -13065,7 +13069,7 @@ _vunsigned: MOVQ s+0(FP), DI MOVQ p+8(FP), SI MOVQ v+16(FP), DX - LEAQ ·__native_entry__+18189(SB), AX // _vunsigned + LEAQ ·__native_entry__+19197(SB), AX // _vunsigned JMP AX _stack_grow: diff --git a/internal/native/avx2/native_subr_amd64.go b/internal/native/avx2/native_subr_amd64.go index 527f583d4..f03aea917 100644 --- a/internal/native/avx2/native_subr_amd64.go +++ b/internal/native/avx2/native_subr_amd64.go @@ -9,45 +9,45 @@ package avx2 func __native_entry__() uintptr var ( - _subr__f64toa = __native_entry__() + 903 - _subr__html_escape = __native_entry__() + 9535 - _subr__i64toa = __native_entry__() + 3915 - _subr__lspace = __native_entry__() + 429 + _subr__f64toa = __native_entry__() + 814 + _subr__html_escape = __native_entry__() + 10717 + _subr__i64toa = __native_entry__() + 3449 + _subr__lspace = __native_entry__() + 379 _subr__lzero = __native_entry__() + 13 - _subr__quote = __native_entry__() + 5328 - _subr__skip_array = __native_entry__() + 21558 - _subr__skip_number = __native_entry__() + 25206 - _subr__skip_object = __native_entry__() + 21595 - _subr__skip_one = __native_entry__() + 18458 - _subr__u64toa = __native_entry__() + 4008 - _subr__unquote = __native_entry__() + 7080 - _subr__validate_one = __native_entry__() + 25323 - _subr__value = __native_entry__() + 13781 - _subr__vnumber = __native_entry__() + 16616 - _subr__vsigned = __native_entry__() + 17930 - _subr__vstring = __native_entry__() + 15739 - _subr__vunsigned = __native_entry__() + 18189 + _subr__quote = __native_entry__() + 4842 + _subr__skip_array = __native_entry__() + 21271 + _subr__skip_number = __native_entry__() + 24742 + _subr__skip_object = __native_entry__() + 21308 + _subr__skip_one = __native_entry__() + 19499 + _subr__u64toa = __native_entry__() + 3544 + _subr__unquote = __native_entry__() + 7467 + _subr__validate_one = __native_entry__() + 24859 + _subr__value = __native_entry__() + 14548 + _subr__vnumber = __native_entry__() + 17612 + _subr__vsigned = __native_entry__() + 18917 + _subr__vstring = __native_entry__() + 16708 + _subr__vunsigned = __native_entry__() + 19197 ) const ( - _stack__f64toa = 120 + _stack__f64toa = 136 _stack__html_escape = 72 _stack__i64toa = 24 _stack__lspace = 8 _stack__lzero = 8 - _stack__quote = 64 - _stack__skip_array = 136 - _stack__skip_number = 96 - _stack__skip_object = 136 - _stack__skip_one = 136 + _stack__quote = 72 + _stack__skip_array = 152 + _stack__skip_number = 88 + _stack__skip_object = 152 + _stack__skip_one = 152 _stack__u64toa = 8 _stack__unquote = 72 - _stack__validate_one = 136 - _stack__value = 408 + _stack__validate_one = 152 + _stack__value = 416 _stack__vnumber = 312 _stack__vsigned = 16 _stack__vstring = 112 - _stack__vunsigned = 8 + _stack__vunsigned = 24 ) var ( diff --git a/native/parsing.c b/native/parsing.c index 26821541f..ce47c0728 100644 --- a/native/parsing.c +++ b/native/parsing.c @@ -18,10 +18,10 @@ #include /** String Quoting **/ - +#define MAX_ESCAPED_BYTES 8 typedef struct { const long n; - const char s[8]; + const char s[MAX_ESCAPED_BYTES]; } quoted_t; static const quoted_t _SingleQuoteTab[256] = { @@ -250,6 +250,154 @@ static inline ssize_t memcchr_quote(const char *sp, ssize_t nb, char *dp, ssize_ } } +static const bool _EscTab[256] = { + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x00-0x0F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x10-0x1F + // '"' + 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0x20-0x2F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0x30-0x3F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0x40-0x4F + // '"" + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, // 0x50-0x5F + // 0x60-0xFF are zeroes +}; + +static inline uint8_t escape_mask4(const char *sp) { + return _EscTab[*(uint8_t *)(sp)] | (_EscTab[*(uint8_t *)(sp + 1)] << 1) | (_EscTab[*(uint8_t *)(sp + 2)] << 2) | (_EscTab[*(uint8_t *)(sp + 3)] << 3); +} + +static inline ssize_t memcchr_quote_unsafe(const char *sp, ssize_t nb, char *dp, const quoted_t * tab) { + uint32_t mm; + const char * ss = sp; + const char * ds = dp; + size_t cn = 0; + uint8_t ch; + +simd_copy: + + if (nb < 16) goto scalar_copy; + +#if USE_AVX2 + /* 32-byte loop, full store */ + while (nb >= 32) { + __m256i vv = _mm256_loadu_si256 ((const void *)sp); + __m256i rv = _mm256_find_quote (vv); + _mm256_storeu_si256 ((void *)dp, vv); + + /* check for matches */ + if ((mm = _mm256_movemask_epi8(rv)) != 0) { + cn = __builtin_ctz(mm); + sp += cn; + nb -= cn; + dp += cn; + goto escape; + } + + /* move to next block */ + sp += 32; + dp += 32; + nb -= 32; + } + + /* clear upper half to avoid AVX-SSE transition penalty */ + _mm256_zeroupper(); +#endif + + /* 16-byte loop, full store */ + while (nb >= 16) { + __m128i vv = _mm_loadu_si128 ((const void *)sp); + __m128i rv = _mm_find_quote (vv); + _mm_storeu_si128 ((void *)dp, vv); + + /* check for matches */ + if ((mm = _mm_movemask_epi8(rv)) != 0) { + cn = __builtin_ctz(mm); + sp += cn; + nb -= cn; + dp += cn; + goto escape; + } + + /* move to next block */ + sp += 16; + dp += 16; + nb -= 16; + } + + /* handle the remaining bytes with scalar code */ + // while (nb > 0) { + // if (_EscTab[*(uint8_t *)sp]) { + // goto escape; + // } else { + // nb--; + // *dp++ = *sp++; + // } + // } + // optimize: loop unrolling here + +scalar_copy: + if (nb >= 8) { + uint8_t mask1 = escape_mask4(sp); + *(uint64_t *)dp = *(const uint64_t *)sp; + if (unlikely(mask1)) { + cn = __builtin_ctz(mask1); + sp += cn; + nb -= cn; + dp += cn; + goto escape; + } + uint8_t mask2 = escape_mask4(sp + 4); + if (unlikely(mask2)) { + cn = __builtin_ctz(mask2); + sp += cn + 4; + nb -= cn + 4; + dp += cn + 4; + goto escape; + } + dp += 8, sp += 8, nb -= 8; + } + + if (nb >= 4) { + uint8_t mask2 = escape_mask4(sp); + *(uint32_t *)dp = *(const uint32_t *)sp; + if (unlikely(mask2)) { + cn = __builtin_ctz(mask2); + sp += cn; + nb -= cn; + dp += cn; + goto escape; + } + dp += 4, sp += 4, nb -= 4; + } + + while (nb > 0) { + if (unlikely(_EscTab[*(uint8_t *)(sp)])) goto escape; + *dp++ = *sp++, nb--; + } + /* all quote done */ + return dp - ds; +escape: + /* get the escape entry, handle consecutive quotes */ + do { + uint8_t ch = *(uint8_t *)sp; + int nc = tab[ch].n; + /* copy the quoted value. + * Note: dp always has at least 8 bytes (MAX_ESCAPED_BYTES) here. + * so, we not use memcpy_p8(dp, tab[ch].s, nc); + */ + *(uint64_t *)dp = *(const uint64_t *)tab[ch].s; + sp++; + nb--; + dp += nc; + if (nb <= 0) break; + /* copy and find escape chars */ + if (_EscTab[*(uint8_t *)(sp)] == 0) { + goto simd_copy; + } + } while (true); + return dp - ds; +} + ssize_t quote(const char *sp, ssize_t nb, char *dp, ssize_t *dn, uint64_t flags) { ssize_t nd = *dn; const char * ds = dp; @@ -263,6 +411,11 @@ ssize_t quote(const char *sp, ssize_t nb, char *dp, ssize_t *dn, uint64_t flags) tab = _DoubleQuoteTab; } + if (*dn >= nb * MAX_ESCAPED_BYTES) { + *dn = memcchr_quote_unsafe(sp, nb, dp, tab); + return nb; + } + /* find the special characters, copy on the fly */ while (nb != 0) { int nc; @@ -827,4 +980,6 @@ ssize_t html_escape(const char *sp, ssize_t nb, char *dp, ssize_t *dn) { /* all done */ *dn = dp - ds; return sp - ss; -} \ No newline at end of file +} + +#undef MAX_ESCAPED_BYTES \ No newline at end of file