Submission #1046465

# Submission time Handle Problem Language Result Execution time Memory
1046465 2024-08-06T15:20:39 Z ymm Distributing Candies (IOI21_candies) C++17
Compilation error
0 ms 0 KB
asm(R"asm(
.file	"candies.cpp"
.text
#APP
.globl _ZSt21ios_base_library_initv
#NO_APP
.section	.text._Z7uprangeILb1EJiEEviiDpT0_,"axG",@progbits,_Z7uprangeILb1EJiEEviiDpT0_,comdat
.p2align 4
.weak	_Z7uprangeILb1EJiEEviiDpT0_
.type	_Z7uprangeILb1EJiEEviiDpT0_,@function
_Z7uprangeILb1EJiEEviiDpT0_:
.LFB11148:
.cfi_startproc
endbr64
vmovd	%edx,%xmm1
movslq	%esi,%r10
movslq	%edi,%rdx
vmovdqa	%xmm1,%xmm3
cmpq	%r10,%rdx
jge	.L53
movq	%r10,%rsi
movq	%rdx,%r11
subq	%rdx,%rsi
leaq	-1(%rsi),%rax
cmpq	$6,%rax
jbe	.L8
movq	%rsi,%r8
leaq	a(%rip),%r9
vpxor	%xmm0,%xmm0,%xmm0
xorl	%eax,%eax
shrq	$3,%r8
leaq	(%r9,%rdx,4),%rcx
vpbroadcastd	%xmm1,%ymm2
salq	$2,%r8
leaq	-4(%r8),%rdi
shrq	$2,%rdi
addq	$1,%rdi
andl	$7,%edi
je	.L4
cmpq	$1,%rdi
je	.L35
cmpq	$2,%rdi
je	.L36
cmpq	$3,%rdi
je	.L37
cmpq	$4,%rdi
je	.L38
cmpq	$5,%rdi
je	.L39
cmpq	$6,%rdi
jne	.L55
.L40:
vmovdqu	(%rcx,%rax,8),%ymm6
vpsubd	%ymm2,%ymm6,%ymm8
vpmaxsd	%ymm0,%ymm8,%ymm9
vmovdqu	%ymm9,(%rcx,%rax,8)
addq	$4,%rax
.L39:
vmovdqu	(%rcx,%rax,8),%ymm10
vpsubd	%ymm2,%ymm10,%ymm11
vpmaxsd	%ymm0,%ymm11,%ymm12
vmovdqu	%ymm12,(%rcx,%rax,8)
addq	$4,%rax
.L38:
vmovdqu	(%rcx,%rax,8),%ymm13
vpsubd	%ymm2,%ymm13,%ymm14
vpmaxsd	%ymm0,%ymm14,%ymm15
vmovdqu	%ymm15,(%rcx,%rax,8)
addq	$4,%rax
.L37:
vmovdqu	(%rcx,%rax,8),%ymm7
vpsubd	%ymm2,%ymm7,%ymm4
vpmaxsd	%ymm0,%ymm4,%ymm5
vmovdqu	%ymm5,(%rcx,%rax,8)
addq	$4,%rax
.L36:
vmovdqu	(%rcx,%rax,8),%ymm6
vpsubd	%ymm2,%ymm6,%ymm8
vpmaxsd	%ymm0,%ymm8,%ymm9
vmovdqu	%ymm9,(%rcx,%rax,8)
addq	$4,%rax
.L35:
vmovdqu	(%rcx,%rax,8),%ymm10
vpsubd	%ymm2,%ymm10,%ymm11
vpmaxsd	%ymm0,%ymm11,%ymm12
vmovdqu	%ymm12,(%rcx,%rax,8)
addq	$4,%rax
cmpq	%rax,%r8
je	.L51
.L4:
vmovdqu	(%rcx,%rax,8),%ymm13
leaq	4(%rax),%rdi
vpsubd	%ymm2,%ymm13,%ymm14
vpmaxsd	%ymm0,%ymm14,%ymm15
vmovdqu	%ymm15,(%rcx,%rax,8)
vmovdqu	(%rcx,%rdi,8),%ymm7
vpsubd	%ymm2,%ymm7,%ymm4
vpmaxsd	%ymm0,%ymm4,%ymm5
vmovdqu	%ymm5,(%rcx,%rdi,8)
leaq	8(%rax),%rdi
vmovdqu	(%rcx,%rdi,8),%ymm6
vpsubd	%ymm2,%ymm6,%ymm8
vpmaxsd	%ymm0,%ymm8,%ymm9
vmovdqu	%ymm9,(%rcx,%rdi,8)
leaq	12(%rax),%rdi
vmovdqu	(%rcx,%rdi,8),%ymm10
vpsubd	%ymm2,%ymm10,%ymm11
vpmaxsd	%ymm0,%ymm11,%ymm12
vmovdqu	%ymm12,(%rcx,%rdi,8)
leaq	16(%rax),%rdi
vmovdqu	(%rcx,%rdi,8),%ymm13
vpsubd	%ymm2,%ymm13,%ymm14
vpmaxsd	%ymm0,%ymm14,%ymm15
vmovdqu	%ymm15,(%rcx,%rdi,8)
leaq	20(%rax),%rdi
vmovdqu	(%rcx,%rdi,8),%ymm7
vpsubd	%ymm2,%ymm7,%ymm4
vpmaxsd	%ymm0,%ymm4,%ymm5
vmovdqu	%ymm5,(%rcx,%rdi,8)
leaq	24(%rax),%rdi
vmovdqu	(%rcx,%rdi,8),%ymm6
vpsubd	%ymm2,%ymm6,%ymm8
vpmaxsd	%ymm0,%ymm8,%ymm9
vmovdqu	%ymm9,(%rcx,%rdi,8)
leaq	28(%rax),%rdi
addq	$32,%rax
vmovdqu	(%rcx,%rdi,8),%ymm10
vpsubd	%ymm2,%ymm10,%ymm11
vpmaxsd	%ymm0,%ymm11,%ymm12
vmovdqu	%ymm12,(%rcx,%rdi,8)
cmpq	%rax,%r8
jne	.L4
.L51:
movq	%rsi,%r8
andq	$-8,%r8
addq	%r8,%rdx
testb	$7,%sil
je	.L56
vzeroupper
.L3:
subq	%r8,%rsi
leaq	-1(%rsi),%rcx
cmpq	$2,%rcx
jbe	.L6
addq	%r11,%r8
vpshufd	$0,%xmm3,%xmm3
vpxor	%xmm13,%xmm13,%xmm13
movq	%rsi,%rax
leaq	(%r9,%r8,4),%r11
andq	$-4,%rax
vmovdqu	(%r11),%xmm2
addq	%rax,%rdx
andl	$3,%esi
vpsubd	%xmm3,%xmm2,%xmm0
vpmaxsd	%xmm13,%xmm0,%xmm14
vmovdqu	%xmm14,(%r11)
je	.L53
.L6:
vmovd	(%r9,%rdx,4),%xmm15
vpxor	%xmm4,%xmm4,%xmm4
leaq	1(%rdx),%rsi
vpsubd	%xmm1,%xmm15,%xmm7
vpmaxsd	%xmm4,%xmm7,%xmm5
vmovd	%xmm5,(%r9,%rdx,4)
cmpq	%rsi,%r10
jle	.L53
vpinsrd	$0,(%r9,%rsi,4),%xmm4,%xmm6
addq	$2,%rdx
vpsubd	%xmm1,%xmm6,%xmm8
vpmaxsd	%xmm4,%xmm8,%xmm9
vmovd	%xmm9,(%r9,%rsi,4)
cmpq	%rdx,%r10
jle	.L53
vpinsrd	$0,(%r9,%rdx,4),%xmm4,%xmm10
vpsubd	%xmm1,%xmm10,%xmm11
vpmaxsd	%xmm4,%xmm11,%xmm1
vmovd	%xmm1,(%r9,%rdx,4)
.L53:
ret
.p2align 4,,10
.p2align 3
.L55:
vmovdqu	(%rcx),%ymm7
movl	$4,%eax
vpsubd	%ymm2,%ymm7,%ymm4
vpmaxsd	%ymm0,%ymm4,%ymm5
vmovdqu	%ymm5,(%rcx)
jmp	.L40
.p2align 4,,10
.p2align 3
.L56:
vzeroupper
ret
.L8:
xorl	%r8d,%r8d
leaq	a(%rip),%r9
jmp	.L3
.cfi_endproc
.LFE11148:
.size	_Z7uprangeILb1EJiEEviiDpT0_,.-_Z7uprangeILb1EJiEEviiDpT0_
.section	.text._Z7uprangeILb0EJiEEviiDpT0_,"axG",@progbits,_Z7uprangeILb0EJiEEviiDpT0_,comdat
.p2align 4
.weak	_Z7uprangeILb0EJiEEviiDpT0_
.type	_Z7uprangeILb0EJiEEviiDpT0_,@function
_Z7uprangeILb0EJiEEviiDpT0_:
.LFB11149:
.cfi_startproc
endbr64
movslq	%edi,%rcx
movslq	%esi,%r10
cmpq	%r10,%rcx
jge	.L111
pushq	%rbp
.cfi_def_cfa_offset 16
.cfi_offset 6,-16
movq	%r10,%rdi
vmovd	%edx,%xmm1
subq	%rcx,%rdi
vmovdqa	%xmm1,%xmm0
leaq	-1(%rdi),%rax
movq	%rsp,%rbp
.cfi_def_cfa_register 6
pushq	%r12
pushq	%rbx
.cfi_offset 12,-24
.cfi_offset 3,-32
movq	%rcx,%rbx
cmpq	$6,%rax
jbe	.L64
movq	%rdi,%r12
leaq	b(%rip),%r11
xorl	%eax,%eax
shrq	$3,%r12
leaq	0(,%rcx,4),%rdx
vpbroadcastd	%xmm1,%ymm2
salq	$2,%r12
leaq	a(%rip),%r9
leaq	(%r11,%rdx),%r8
leaq	-4(%r12),%rsi
addq	%r9,%rdx
shrq	$2,%rsi
addq	$1,%rsi
andl	$7,%esi
je	.L60
cmpq	$1,%rsi
je	.L91
cmpq	$2,%rsi
je	.L92
cmpq	$3,%rsi
je	.L93
cmpq	$4,%rsi
je	.L94
cmpq	$5,%rsi
je	.L95
cmpq	$6,%rsi
jne	.L114
.L96:
vpaddd	(%rdx,%rax,8),%ymm2,%ymm5
vpminsd	(%r8,%rax,8),%ymm5,%ymm6
vmovdqu	%ymm6,(%rdx,%rax,8)
addq	$4,%rax
.L95:
vpaddd	(%rdx,%rax,8),%ymm2,%ymm7
vpminsd	(%r8,%rax,8),%ymm7,%ymm8
vmovdqu	%ymm8,(%rdx,%rax,8)
addq	$4,%rax
.L94:
vpaddd	(%rdx,%rax,8),%ymm2,%ymm9
vpminsd	(%r8,%rax,8),%ymm9,%ymm10
vmovdqu	%ymm10,(%rdx,%rax,8)
addq	$4,%rax
.L93:
vpaddd	(%rdx,%rax,8),%ymm2,%ymm11
vpminsd	(%r8,%rax,8),%ymm11,%ymm12
vmovdqu	%ymm12,(%rdx,%rax,8)
addq	$4,%rax
.L92:
vpaddd	(%rdx,%rax,8),%ymm2,%ymm13
vpminsd	(%r8,%rax,8),%ymm13,%ymm14
vmovdqu	%ymm14,(%rdx,%rax,8)
addq	$4,%rax
.L91:
vpaddd	(%rdx,%rax,8),%ymm2,%ymm15
vpminsd	(%r8,%rax,8),%ymm15,%ymm3
vmovdqu	%ymm3,(%rdx,%rax,8)
addq	$4,%rax
cmpq	%rax,%r12
je	.L107
.L60:
vpaddd	(%rdx,%rax,8),%ymm2,%ymm4
vpminsd	(%r8,%rax,8),%ymm4,%ymm5
vpaddd	32(%rdx,%rax,8),%ymm2,%ymm6
vpaddd	64(%rdx,%rax,8),%ymm2,%ymm8
vmovdqu	%ymm5,(%rdx,%rax,8)
vpminsd	32(%r8,%rax,8),%ymm6,%ymm7
vpaddd	96(%rdx,%rax,8),%ymm2,%ymm10
vpaddd	128(%rdx,%rax,8),%ymm2,%ymm12
vpaddd	160(%rdx,%rax,8),%ymm2,%ymm14
vpaddd	192(%rdx,%rax,8),%ymm2,%ymm3
vmovdqu	%ymm7,32(%rdx,%rax,8)
vpminsd	64(%r8,%rax,8),%ymm8,%ymm9
vpaddd	224(%rdx,%rax,8),%ymm2,%ymm5
vmovdqu	%ymm9,64(%rdx,%rax,8)
vpminsd	96(%r8,%rax,8),%ymm10,%ymm11
vmovdqu	%ymm11,96(%rdx,%rax,8)
vpminsd	128(%r8,%rax,8),%ymm12,%ymm13
vmovdqu	%ymm13,128(%rdx,%rax,8)
vpminsd	160(%r8,%rax,8),%ymm14,%ymm15
vmovdqu	%ymm15,160(%rdx,%rax,8)
vpminsd	192(%r8,%rax,8),%ymm3,%ymm4
vmovdqu	%ymm4,192(%rdx,%rax,8)
vpminsd	224(%r8,%rax,8),%ymm5,%ymm6
vmovdqu	%ymm6,224(%rdx,%rax,8)
addq	$32,%rax
cmpq	%rax,%r12
jne	.L60
.L107:
movq	%rdi,%r8
andq	$-8,%r8
addq	%r8,%rcx
testb	$7,%dil
je	.L115
vzeroupper
.L59:
subq	%r8,%rdi
leaq	-1(%rdi),%rdx
cmpq	$2,%rdx
jbe	.L62
addq	%rbx,%r8
movq	%rdi,%r12
vpshufd	$0,%xmm0,%xmm0
leaq	(%r9,%r8,4),%rbx
andq	$-4,%r12
vpaddd	(%rbx),%xmm0,%xmm2
vpminsd	(%r11,%r8,4),%xmm2,%xmm7
addq	%r12,%rcx
andl	$3,%edi
vmovdqu	%xmm7,(%rbx)
je	.L109
.L62:
vmovd	(%r9,%rcx,4),%xmm8
vmovd	(%r11,%rcx,4),%xmm10
leaq	1(%rcx),%rdi
vpaddd	%xmm8,%xmm1,%xmm9
vpminsd	%xmm10,%xmm9,%xmm11
vmovd	%xmm11,(%r9,%rcx,4)
cmpq	%rdi,%r10
jle	.L109
vmovd	(%r9,%rdi,4),%xmm12
vmovd	(%r11,%rdi,4),%xmm14
addq	$2,%rcx
vpaddd	%xmm12,%xmm1,%xmm13
vpminsd	%xmm14,%xmm13,%xmm15
vmovd	%xmm15,(%r9,%rdi,4)
cmpq	%rcx,%r10
jle	.L109
vmovd	(%r9,%rcx,4),%xmm3
vpaddd	%xmm3,%xmm1,%xmm4
vmovd	(%r11,%rcx,4),%xmm1
vpminsd	%xmm1,%xmm4,%xmm5
vmovd	%xmm5,(%r9,%rcx,4)
.L109:
popq	%rbx
popq	%r12
popq	%rbp
.cfi_remember_state
.cfi_def_cfa 7,8
ret
.p2align 4,,10
.p2align 3
.L114:
.cfi_restore_state
vpaddd	(%rdx),%ymm2,%ymm3
vpminsd	(%r8),%ymm3,%ymm4
movl	$4,%eax
vmovdqu	%ymm4,(%rdx)
jmp	.L96
.p2align 4,,10
.p2align 3
.L111:
.cfi_def_cfa 7,8
.cfi_restore 3
.cfi_restore 6
.cfi_restore 12
ret
.p2align 4,,10
.p2align 3
.L115:
.cfi_def_cfa 6,16
.cfi_offset 3,-32
.cfi_offset 6,-16
.cfi_offset 12,-24
vzeroupper
popq	%rbx
popq	%r12
popq	%rbp
.cfi_remember_state
.cfi_def_cfa 7,8
ret
.L64:
.cfi_restore_state
xorl	%r8d,%r8d
leaq	b(%rip),%r11
leaq	a(%rip),%r9
jmp	.L59
.cfi_endproc
.LFE11149:
.size	_Z7uprangeILb0EJiEEviiDpT0_,.-_Z7uprangeILb0EJiEEviiDpT0_
.section	.rodata._Z2upILb1EEviiSt6vectorIiSaIiEE.str1.8,"aMS",@progbits,1
.align 8
.LC0:
.string	"void up(int,int,std::vector<int>) [with bool start_neg = true]"
.section	.rodata._Z2upILb1EEviiSt6vectorIiSaIiEE.str1.1,"aMS",@progbits,1
.LC1:
.string	"candies.cpp"
.LC2:
.string	"false"
.section	.text._Z2upILb1EEviiSt6vectorIiSaIiEE,"axG",@progbits,_Z2upILb1EEviiSt6vectorIiSaIiEE,comdat
.p2align 4
.weak	_Z2upILb1EEviiSt6vectorIiSaIiEE
.type	_Z2upILb1EEviiSt6vectorIiSaIiEE,@function
_Z2upILb1EEviiSt6vectorIiSaIiEE:
.LFB11140:
.cfi_startproc
endbr64
pushq	%rbp
.cfi_def_cfa_offset 16
.cfi_offset 6,-16
movq	%rsp,%rbp
.cfi_def_cfa_register 6
pushq	%r12
pushq	%rbx
andq	$-32,%rsp
subq	$32,%rsp
.cfi_offset 12,-24
.cfi_offset 3,-32
movq	(%rdx),%rax
movq	8(%rdx),%rdx
subq	%rax,%rdx
movq	%rdx,%r8
sarq	$2,%r8
cmpq	$27,%rdx
ja	.L117
movslq	%esi,%rcx
leaq	.L119(%rip),%rsi
movslq	(%rsi,%r8,4),%rbx
addq	%rsi,%rbx
notrack jmp	*%rbx
.section	.rodata._Z2upILb1EEviiSt6vectorIiSaIiEE,"aG",@progbits,_Z2upILb1EEviiSt6vectorIiSaIiEE,comdat
.align 4
.align 4
.L119:
.long	.L366-.L119
.long	.L124-.L119
.long	.L123-.L119
.long	.L122-.L119
.long	.L121-.L119
.long	.L120-.L119
.long	.L118-.L119
.section	.text._Z2upILb1EEviiSt6vectorIiSaIiEE,"axG",@progbits,_Z2upILb1EEviiSt6vectorIiSaIiEE,comdat
.p2align 4,,10
.p2align 3
.L372:
vzeroupper
.L366:
leaq	-16(%rbp),%rsp
popq	%rbx
popq	%r12
popq	%rbp
.cfi_remember_state
.cfi_def_cfa 7,8
ret
.p2align 4,,10
.p2align 3
.L120:
.cfi_restore_state
vmovd	16(%rax),%xmm11
vmovd	12(%rax),%xmm12
vmovd	(%rax),%xmm15
movslq	%edi,%rdi
vmovd	8(%rax),%xmm13
vmovd	4(%rax),%xmm14
vmovd	%xmm11,28(%rsp)
vmovdqa	%xmm12,%xmm2
vmovdqa	%xmm13,%xmm3
vmovdqa	%xmm14,%xmm4
vmovdqa	%xmm15,%xmm5
cmpq	%rcx,%rdi
jge	.L366
movq	%rcx,%rsi
movq	%rdi,%r11
subq	%rdi,%rsi
leaq	-1(%rsi),%r9
cmpq	$6,%r9
jbe	.L168
movq	%rsi,%r12
vpbroadcastd	%xmm15,%ymm10
vpbroadcastd	%xmm14,%ymm9
xorl	%eax,%eax
shrq	$3,%r12
vpbroadcastd	%xmm13,%ymm8
vpxor	%xmm0,%xmm0,%xmm0
salq	$2,%r12
leaq	0(,%rdi,4),%rdx
leaq	b(%rip),%r10
leaq	-4(%r12),%r8
leaq	(%r10,%rdx),%rbx
vpbroadcastd	%xmm12,%ymm7
shrq	$2,%r8
leaq	a(%rip),%r9
vpbroadcastd	%xmm11,%ymm6
addq	$1,%r8
addq	%r9,%rdx
andl	$3,%r8d
je	.L153
cmpq	$1,%r8
je	.L294
cmpq	$2,%r8
jne	.L373
.L295:
vmovdqu	(%rdx,%rax,8),%ymm1
vpsubd	%ymm10,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm9,%ymm1,%ymm1
vpminsd	(%rbx,%rax,8),%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	(%rbx,%rax,8),%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vmovdqu	%ymm1,(%rdx,%rax,8)
addq	$4,%rax
.L294:
vmovdqu	(%rdx,%rax,8),%ymm1
vpsubd	%ymm10,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm9,%ymm1,%ymm1
vpminsd	(%rbx,%rax,8),%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	(%rbx,%rax,8),%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vmovdqu	%ymm1,(%rdx,%rax,8)
addq	$4,%rax
cmpq	%r12,%rax
je	.L347
.L153:
vmovdqu	(%rdx,%rax,8),%ymm1
leaq	4(%rax),%r8
vpsubd	%ymm10,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm9,%ymm1,%ymm1
vpminsd	(%rbx,%rax,8),%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	(%rbx,%rax,8),%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vmovdqu	%ymm1,(%rdx,%rax,8)
vmovdqu	(%rdx,%r8,8),%ymm1
vpsubd	%ymm10,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm9,%ymm1,%ymm1
vpminsd	(%rbx,%r8,8),%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	(%rbx,%r8,8),%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vmovdqu	%ymm1,(%rdx,%r8,8)
leaq	8(%rax),%r8
vmovdqu	(%rdx,%r8,8),%ymm1
vpsubd	%ymm10,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm9,%ymm1,%ymm1
vpminsd	(%rbx,%r8,8),%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	(%rbx,%r8,8),%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vmovdqu	%ymm1,(%rdx,%r8,8)
leaq	12(%rax),%r8
addq	$16,%rax
vmovdqu	(%rdx,%r8,8),%ymm1
vpsubd	%ymm10,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm9,%ymm1,%ymm1
vpminsd	(%rbx,%r8,8),%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	(%rbx,%r8,8),%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vmovdqu	%ymm1,(%rdx,%r8,8)
cmpq	%r12,%rax
jne	.L153
.L347:
movq	%rsi,%rbx
andq	$-8,%rbx
addq	%rbx,%rdi
testb	$7,%sil
je	.L372
vzeroupper
.L152:
subq	%rbx,%rsi
leaq	-1(%rsi),%rdx
cmpq	$2,%rdx
jbe	.L156
addq	%r11,%rbx
vpshufd	$0,%xmm5,%xmm5
vpxor	%xmm7,%xmm7,%xmm7
movq	%rsi,%r12
leaq	(%r9,%rbx,4),%r11
vmovdqu	(%r10,%rbx,4),%xmm10
vpshufd	$0,%xmm4,%xmm4
vpshufd	$0,%xmm3,%xmm3
vmovdqu	(%r11),%xmm9
vpshufd	$0,%xmm2,%xmm2
andq	$-4,%r12
addq	%r12,%rdi
andl	$3,%esi
vpsubd	%xmm5,%xmm9,%xmm8
vpmaxsd	%xmm7,%xmm8,%xmm6
vpaddd	%xmm4,%xmm6,%xmm0
vmovd	28(%rsp),%xmm6
vpminsd	%xmm10,%xmm0,%xmm1
vpsubd	%xmm3,%xmm1,%xmm5
vpshufd	$0,%xmm6,%xmm4
vpmaxsd	%xmm7,%xmm5,%xmm9
vpaddd	%xmm2,%xmm9,%xmm8
vpminsd	%xmm10,%xmm8,%xmm10
vpsubd	%xmm4,%xmm10,%xmm0
vpmaxsd	%xmm7,%xmm0,%xmm7
vmovdqu	%xmm7,(%r11)
je	.L366
.L156:
vmovd	(%r9,%rdi,4),%xmm3
vpxor	%xmm9,%xmm9,%xmm9
vmovd	(%r10,%rdi,4),%xmm1
vpsubd	%xmm15,%xmm3,%xmm5
leaq	1(%rdi),%rsi
vpmaxsd	%xmm9,%xmm5,%xmm2
vpaddd	%xmm14,%xmm2,%xmm8
vpminsd	%xmm1,%xmm8,%xmm10
vpsubd	%xmm13,%xmm10,%xmm6
vpmaxsd	%xmm9,%xmm6,%xmm4
vpaddd	%xmm12,%xmm4,%xmm0
vpminsd	%xmm1,%xmm0,%xmm7
vpsubd	%xmm11,%xmm7,%xmm3
vpmaxsd	%xmm9,%xmm3,%xmm5
vmovd	%xmm5,(%r9,%rdi,4)
cmpq	%rsi,%rcx
jle	.L366
vpinsrd	$0,(%r9,%rsi,4),%xmm9,%xmm8
vpinsrd	$0,(%r10,%rsi,4),%xmm9,%xmm2
addq	$2,%rdi
vpsubd	%xmm15,%xmm8,%xmm10
vpmaxsd	%xmm9,%xmm10,%xmm6
vpaddd	%xmm14,%xmm6,%xmm4
vpminsd	%xmm2,%xmm4,%xmm0
vpsubd	%xmm13,%xmm0,%xmm7
vpmaxsd	%xmm9,%xmm7,%xmm3
vpaddd	%xmm12,%xmm3,%xmm1
vpminsd	%xmm2,%xmm1,%xmm5
vpsubd	%xmm11,%xmm5,%xmm2
vpmaxsd	%xmm9,%xmm2,%xmm10
vmovd	%xmm10,(%r9,%rsi,4)
cmpq	%rdi,%rcx
jle	.L366
vpinsrd	$0,(%r9,%rdi,4),%xmm9,%xmm4
vpinsrd	$0,(%r10,%rdi,4),%xmm9,%xmm6
vpsubd	%xmm15,%xmm4,%xmm15
vpmaxsd	%xmm9,%xmm15,%xmm0
vpaddd	%xmm14,%xmm0,%xmm14
vpminsd	%xmm6,%xmm14,%xmm7
vpsubd	%xmm13,%xmm7,%xmm13
vpmaxsd	%xmm9,%xmm13,%xmm3
vpaddd	%xmm12,%xmm3,%xmm12
vpminsd	%xmm6,%xmm12,%xmm1
vpsubd	%xmm11,%xmm1,%xmm11
vpmaxsd	%xmm9,%xmm11,%xmm9
vmovd	%xmm9,(%r9,%rdi,4)
jmp	.L366
.p2align 4,,10
.p2align 3
.L118:
vmovd	20(%rax),%xmm9
vmovd	16(%rax),%xmm10
vmovd	12(%rax),%xmm11
movslq	%edi,%rdi
vmovd	8(%rax),%xmm12
vmovd	4(%rax),%xmm13
vmovd	(%rax),%xmm14
vmovd	%xmm10,28(%rsp)
vmovdqa	%xmm9,%xmm15
vmovd	%xmm11,24(%rsp)
vmovdqa	%xmm14,%xmm2
vmovd	%xmm12,20(%rsp)
vmovd	%xmm13,16(%rsp)
cmpq	%rcx,%rdi
jge	.L366
movq	%rcx,%rsi
movq	%rdi,%r12
subq	%rdi,%rsi
leaq	-1(%rsi),%r10
cmpq	$6,%r10
jbe	.L169
movq	%rsi,%r8
vpbroadcastd	%xmm14,%ymm8
vpxor	%xmm1,%xmm1,%xmm1
shrq	$3,%r8
leaq	b(%rip),%r11
leaq	0(,%rdi,4),%rax
addq	%rdi,%r8
leaq	(%r11,%rax),%rdx
vpbroadcastd	%xmm13,%ymm7
leaq	(%r11,%r8,4),%rbx
leaq	a(%rip),%r10
vpbroadcastd	%xmm12,%ymm6
movq	%rdx,%r9
movq	%rbx,%r8
vpbroadcastd	%xmm11,%ymm5
vpbroadcastd	%xmm10,%ymm4
addq	%r10,%rax
subq	%rdx,%r8
vpbroadcastd	%xmm9,%ymm3
subq	$4,%r8
shrq	$2,%r8
addq	$1,%r8
andl	$3,%r8d
je	.L354
cmpq	$1,%r8
je	.L296
cmpq	$2,%r8
jne	.L374
.L297:
vmovdqu	(%rax),%ymm0
addq	$32,%rdx
addq	$32,%rax
addq	$4,%r9
vpsubd	%ymm8,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm7,%ymm0,%ymm0
vpminsd	-32(%rdx),%ymm0,%ymm0
vpsubd	%ymm6,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm5,%ymm0,%ymm0
vpminsd	-32(%rdx),%ymm0,%ymm0
vpsubd	%ymm4,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm3,%ymm0,%ymm0
vpminsd	-32(%rdx),%ymm0,%ymm0
vmovdqu	%ymm0,-32(%rax)
.L296:
vmovdqu	(%rax),%ymm0
addq	$4,%r9
addq	$32,%rdx
addq	$32,%rax
vpsubd	%ymm8,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm7,%ymm0,%ymm0
vpminsd	-32(%rdx),%ymm0,%ymm0
vpsubd	%ymm6,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm5,%ymm0,%ymm0
vpminsd	-32(%rdx),%ymm0,%ymm0
vpsubd	%ymm4,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm3,%ymm0,%ymm0
vpminsd	-32(%rdx),%ymm0,%ymm0
vmovdqu	%ymm0,-32(%rax)
cmpq	%r9,%rbx
je	.L342
.L354:
vmovd	%xmm15,12(%rsp)
.L159:
vmovdqu	(%rax),%ymm0
vmovdqu	(%rdx),%ymm15
addq	$16,%r9
subq	$-128,%rdx
subq	$-128,%rax
vpsubd	%ymm8,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm7,%ymm0,%ymm0
vpminsd	%ymm15,%ymm0,%ymm0
vpsubd	%ymm6,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm5,%ymm0,%ymm0
vpminsd	%ymm15,%ymm0,%ymm0
vpsubd	%ymm4,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm3,%ymm0,%ymm0
vpminsd	%ymm15,%ymm0,%ymm15
vmovdqu	-96(%rax),%ymm0
vmovdqu	%ymm15,-128(%rax)
vmovdqu	-96(%rdx),%ymm15
vpsubd	%ymm8,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm7,%ymm0,%ymm0
vpminsd	%ymm15,%ymm0,%ymm0
vpsubd	%ymm6,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm5,%ymm0,%ymm0
vpminsd	%ymm15,%ymm0,%ymm0
vpsubd	%ymm4,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm3,%ymm0,%ymm0
vpminsd	%ymm15,%ymm0,%ymm15
vmovdqu	-64(%rax),%ymm0
vmovdqu	%ymm15,-96(%rax)
vmovdqu	-64(%rdx),%ymm15
vpsubd	%ymm8,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm7,%ymm0,%ymm0
vpminsd	%ymm15,%ymm0,%ymm0
vpsubd	%ymm6,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm5,%ymm0,%ymm0
vpminsd	%ymm15,%ymm0,%ymm0
vpsubd	%ymm4,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm3,%ymm0,%ymm0
vpminsd	%ymm15,%ymm0,%ymm15
vmovdqu	-32(%rax),%ymm0
vmovdqu	%ymm15,-64(%rax)
vmovdqu	-32(%rdx),%ymm15
vpsubd	%ymm8,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm7,%ymm0,%ymm0
vpminsd	%ymm15,%ymm0,%ymm0
vpsubd	%ymm6,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm5,%ymm0,%ymm0
vpminsd	%ymm15,%ymm0,%ymm0
vpsubd	%ymm4,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm3,%ymm0,%ymm0
vpminsd	%ymm15,%ymm0,%ymm15
vmovdqu	%ymm15,-32(%rax)
cmpq	%r9,%rbx
jne	.L159
vmovd	12(%rsp),%xmm15
.L342:
movq	%rsi,%rax
andq	$-8,%rax
addq	%rax,%rdi
testb	$7,%sil
je	.L372
vzeroupper
.L158:
subq	%rax,%rsi
leaq	-1(%rsi),%rdx
cmpq	$2,%rdx
jbe	.L162
addq	%r12,%rax
vmovd	16(%rsp),%xmm3
vpxor	%xmm4,%xmm4,%xmm4
movq	%rsi,%rbx
leaq	(%r10,%rax,4),%r12
vpshufd	$0,%xmm2,%xmm2
vmovdqu	(%r11,%rax,4),%xmm8
vpshufd	$0,%xmm15,%xmm15
vmovdqu	(%r12),%xmm7
vpshufd	$0,%xmm3,%xmm1
andq	$-4,%rbx
vmovd	24(%rsp),%xmm3
addq	%rbx,%rdi
andl	$3,%esi
vpsubd	%xmm2,%xmm7,%xmm6
vmovd	20(%rsp),%xmm7
vpmaxsd	%xmm4,%xmm6,%xmm5
vpaddd	%xmm1,%xmm5,%xmm0
vpshufd	$0,%xmm7,%xmm6
vpminsd	%xmm8,%xmm0,%xmm2
vpshufd	$0,%xmm3,%xmm0
vpsubd	%xmm6,%xmm2,%xmm5
vmovd	28(%rsp),%xmm6
vpmaxsd	%xmm4,%xmm5,%xmm1
vpaddd	%xmm0,%xmm1,%xmm2
vpshufd	$0,%xmm6,%xmm5
vpminsd	%xmm8,%xmm2,%xmm7
vpsubd	%xmm5,%xmm7,%xmm1
vpmaxsd	%xmm4,%xmm1,%xmm4
vpaddd	%xmm15,%xmm4,%xmm3
vpminsd	%xmm8,%xmm3,%xmm8
vmovdqu	%xmm8,(%r12)
je	.L366
.L162:
vmovd	(%r10,%rdi,4),%xmm0
vpxor	%xmm2,%xmm2,%xmm2
vmovd	(%r11,%rdi,4),%xmm7
vpsubd	%xmm14,%xmm0,%xmm6
leaq	1(%rdi),%rsi
vpmaxsd	%xmm2,%xmm6,%xmm5
vpaddd	%xmm13,%xmm5,%xmm1
vpminsd	%xmm7,%xmm1,%xmm4
vpsubd	%xmm12,%xmm4,%xmm15
vpmaxsd	%xmm2,%xmm15,%xmm3
vpaddd	%xmm11,%xmm3,%xmm8
vpminsd	%xmm7,%xmm8,%xmm0
vpsubd	%xmm10,%xmm0,%xmm6
vpmaxsd	%xmm2,%xmm6,%xmm5
vpaddd	%xmm9,%xmm5,%xmm1
vpminsd	%xmm7,%xmm1,%xmm7
vmovd	%xmm7,(%r10,%rdi,4)
cmpq	%rsi,%rcx
jle	.L366
vpinsrd	$0,(%r10,%rsi,4),%xmm2,%xmm15
vpinsrd	$0,(%r11,%rsi,4),%xmm2,%xmm4
addq	$2,%rdi
vpsubd	%xmm14,%xmm15,%xmm3
vpmaxsd	%xmm2,%xmm3,%xmm8
vpaddd	%xmm13,%xmm8,%xmm0
vpminsd	%xmm4,%xmm0,%xmm6
vpsubd	%xmm12,%xmm6,%xmm5
vpmaxsd	%xmm2,%xmm5,%xmm1
vpaddd	%xmm11,%xmm1,%xmm7
vpminsd	%xmm4,%xmm7,%xmm15
vpsubd	%xmm10,%xmm15,%xmm3
vpmaxsd	%xmm2,%xmm3,%xmm8
vpaddd	%xmm9,%xmm8,%xmm0
vpminsd	%xmm4,%xmm0,%xmm4
vmovd	%xmm4,(%r10,%rsi,4)
cmpq	%rdi,%rcx
jle	.L366
vpinsrd	$0,(%r10,%rdi,4),%xmm2,%xmm5
vpinsrd	$0,(%r11,%rdi,4),%xmm2,%xmm6
vpsubd	%xmm14,%xmm5,%xmm14
vpmaxsd	%xmm2,%xmm14,%xmm1
vpaddd	%xmm13,%xmm1,%xmm13
vpminsd	%xmm6,%xmm13,%xmm7
vpsubd	%xmm12,%xmm7,%xmm12
vpmaxsd	%xmm2,%xmm12,%xmm15
vpaddd	%xmm11,%xmm15,%xmm11
vpminsd	%xmm6,%xmm11,%xmm3
vpsubd	%xmm10,%xmm3,%xmm10
vpmaxsd	%xmm2,%xmm10,%xmm2
vpaddd	%xmm9,%xmm2,%xmm9
vpminsd	%xmm6,%xmm9,%xmm8
vmovd	%xmm8,(%r10,%rdi,4)
jmp	.L366
.p2align 4,,10
.p2align 3
.L124:
vmovd	(%rax),%xmm13
movslq	%edi,%rdi
vmovdqa	%xmm13,%xmm9
cmpq	%rcx,%rdi
jge	.L366
movq	%rcx,%r11
movq	%rdi,%rbx
subq	%rdi,%r11
leaq	-1(%r11),%r10
cmpq	$6,%r10
jbe	.L164
movq	%r11,%r9
leaq	a(%rip),%r12
vpxor	%xmm0,%xmm0,%xmm0
xorl	%eax,%eax
shrq	$3,%r9
leaq	(%r12,%rdi,4),%rdx
vpbroadcastd	%xmm13,%ymm1
salq	$2,%r9
leaq	-4(%r9),%r8
shrq	$2,%r8
addq	$1,%r8
andl	$7,%r8d
je	.L128
cmpq	$1,%r8
je	.L278
cmpq	$2,%r8
je	.L279
cmpq	$3,%r8
je	.L280
cmpq	$4,%r8
je	.L281
cmpq	$5,%r8
je	.L282
cmpq	$6,%r8
je	.L283
vmovdqu	(%rdx),%ymm6
movl	$4,%eax
vpsubd	%ymm1,%ymm6,%ymm7
vpmaxsd	%ymm0,%ymm7,%ymm4
vmovdqu	%ymm4,(%rdx)
.L283:
vmovdqu	(%rdx,%rax,8),%ymm14
vpsubd	%ymm1,%ymm14,%ymm3
vpmaxsd	%ymm0,%ymm3,%ymm2
vmovdqu	%ymm2,(%rdx,%rax,8)
addq	$4,%rax
.L282:
vmovdqu	(%rdx,%rax,8),%ymm5
vpsubd	%ymm1,%ymm5,%ymm12
vpmaxsd	%ymm0,%ymm12,%ymm10
vmovdqu	%ymm10,(%rdx,%rax,8)
addq	$4,%rax
.L281:
vmovdqu	(%rdx,%rax,8),%ymm15
vpsubd	%ymm1,%ymm15,%ymm8
vpmaxsd	%ymm0,%ymm8,%ymm11
vmovdqu	%ymm11,(%rdx,%rax,8)
addq	$4,%rax
.L280:
vmovdqu	(%rdx,%rax,8),%ymm6
vpsubd	%ymm1,%ymm6,%ymm7
vpmaxsd	%ymm0,%ymm7,%ymm4
vmovdqu	%ymm4,(%rdx,%rax,8)
addq	$4,%rax
.L279:
vmovdqu	(%rdx,%rax,8),%ymm14
vpsubd	%ymm1,%ymm14,%ymm3
vpmaxsd	%ymm0,%ymm3,%ymm2
vmovdqu	%ymm2,(%rdx,%rax,8)
addq	$4,%rax
.L278:
vmovdqu	(%rdx,%rax,8),%ymm5
vpsubd	%ymm1,%ymm5,%ymm12
vpmaxsd	%ymm0,%ymm12,%ymm10
vmovdqu	%ymm10,(%rdx,%rax,8)
addq	$4,%rax
cmpq	%rax,%r9
je	.L343
.L128:
vmovdqu	(%rdx,%rax,8),%ymm15
leaq	4(%rax),%rsi
leaq	8(%rax),%r10
leaq	12(%rax),%r8
vpsubd	%ymm1,%ymm15,%ymm8
vpmaxsd	%ymm0,%ymm8,%ymm11
vmovdqu	%ymm11,(%rdx,%rax,8)
vmovdqu	(%rdx,%rsi,8),%ymm6
vpsubd	%ymm1,%ymm6,%ymm7
vpmaxsd	%ymm0,%ymm7,%ymm4
vmovdqu	%ymm4,(%rdx,%rsi,8)
vmovdqu	(%rdx,%r10,8),%ymm14
leaq	16(%rax),%rsi
vpsubd	%ymm1,%ymm14,%ymm3
vpmaxsd	%ymm0,%ymm3,%ymm2
vmovdqu	%ymm2,(%rdx,%r10,8)
vmovdqu	(%rdx,%r8,8),%ymm5
leaq	20(%rax),%r10
vpsubd	%ymm1,%ymm5,%ymm12
vpmaxsd	%ymm0,%ymm12,%ymm10
vmovdqu	%ymm10,(%rdx,%r8,8)
vmovdqu	(%rdx,%rsi,8),%ymm15
leaq	24(%rax),%r8
vpsubd	%ymm1,%ymm15,%ymm8
vpmaxsd	%ymm0,%ymm8,%ymm11
vmovdqu	%ymm11,(%rdx,%rsi,8)
vmovdqu	(%rdx,%r10,8),%ymm6
leaq	28(%rax),%rsi
addq	$32,%rax
vpsubd	%ymm1,%ymm6,%ymm7
vpmaxsd	%ymm0,%ymm7,%ymm4
vmovdqu	%ymm4,(%rdx,%r10,8)
vmovdqu	(%rdx,%r8,8),%ymm14
vpsubd	%ymm1,%ymm14,%ymm3
vpmaxsd	%ymm0,%ymm3,%ymm2
vmovdqu	%ymm2,(%rdx,%r8,8)
vmovdqu	(%rdx,%rsi,8),%ymm5
vpsubd	%ymm1,%ymm5,%ymm12
vpmaxsd	%ymm0,%ymm12,%ymm10
vmovdqu	%ymm10,(%rdx,%rsi,8)
cmpq	%rax,%r9
jne	.L128
.L343:
movq	%r11,%r9
andq	$-8,%r9
addq	%r9,%rdi
testb	$7,%r11b
je	.L372
vzeroupper
.L127:
subq	%r9,%r11
leaq	-1(%r11),%rdx
cmpq	$2,%rdx
jbe	.L132
addq	%rbx,%r9
vpshufd	$0,%xmm9,%xmm9
vpxor	%xmm15,%xmm15,%xmm15
movq	%r11,%rax
leaq	(%r12,%r9,4),%rbx
andq	$-4,%rax
vmovdqu	(%rbx),%xmm1
addq	%rax,%rdi
andl	$3,%r11d
vpsubd	%xmm9,%xmm1,%xmm0
vpmaxsd	%xmm15,%xmm0,%xmm8
vmovdqu	%xmm8,(%rbx)
je	.L366
.L132:
vmovd	(%r12,%rdi,4),%xmm11
vpxor	%xmm7,%xmm7,%xmm7
leaq	1(%rdi),%r11
vpsubd	%xmm13,%xmm11,%xmm6
vpmaxsd	%xmm7,%xmm6,%xmm4
vmovd	%xmm4,(%r12,%rdi,4)
cmpq	%rcx,%r11
jge	.L366
vpinsrd	$0,(%r12,%r11,4),%xmm7,%xmm14
addq	$2,%rdi
vpsubd	%xmm13,%xmm14,%xmm3
vpmaxsd	%xmm7,%xmm3,%xmm2
vmovd	%xmm2,(%r12,%r11,4)
cmpq	%rdi,%rcx
jle	.L366
vpinsrd	$0,(%r12,%rdi,4),%xmm7,%xmm5
vpsubd	%xmm13,%xmm5,%xmm13
vpmaxsd	%xmm7,%xmm13,%xmm12
vmovd	%xmm12,(%r12,%rdi,4)
jmp	.L366
.p2align 4,,10
.p2align 3
.L122:
vmovd	8(%rax),%xmm2
vmovd	4(%rax),%xmm13
vmovd	(%rax),%xmm11
movslq	%edi,%rdi
vmovdqa	%xmm2,%xmm12
vmovdqa	%xmm13,%xmm6
vmovdqa	%xmm11,%xmm7
cmpq	%rcx,%rdi
jge	.L366
movq	%rcx,%rsi
movq	%rdi,%r11
subq	%rdi,%rsi
leaq	-1(%rsi),%r10
cmpq	$6,%r10
jbe	.L166
movq	%rsi,%r12
vpbroadcastd	%xmm11,%ymm5
vpbroadcastd	%xmm13,%ymm9
xorl	%eax,%eax
shrq	$3,%r12
vpbroadcastd	%xmm2,%ymm4
vpxor	%xmm8,%xmm8,%xmm8
salq	$2,%r12
leaq	0(,%rdi,4),%rdx
leaq	b(%rip),%r10
leaq	-4(%r12),%r8
leaq	a(%rip),%r9
shrq	$2,%r8
leaq	(%r10,%rdx),%rbx
addq	%r9,%rdx
addq	$1,%r8
andl	$3,%r8d
je	.L141
cmpq	$1,%r8
je	.L290
cmpq	$2,%r8
jne	.L375
.L291:
vmovdqu	(%rdx,%rax,8),%ymm14
vpsubd	%ymm5,%ymm14,%ymm3
vpmaxsd	%ymm8,%ymm3,%ymm10
vpaddd	%ymm9,%ymm10,%ymm15
vpminsd	(%rbx,%rax,8),%ymm15,%ymm1
vpsubd	%ymm4,%ymm1,%ymm0
vpmaxsd	%ymm8,%ymm0,%ymm14
vmovdqu	%ymm14,(%rdx,%rax,8)
addq	$4,%rax
.L290:
vmovdqu	(%rdx,%rax,8),%ymm3
vpsubd	%ymm5,%ymm3,%ymm10
vpmaxsd	%ymm8,%ymm10,%ymm15
vpaddd	%ymm9,%ymm15,%ymm1
vpminsd	(%rbx,%rax,8),%ymm1,%ymm0
vpsubd	%ymm4,%ymm0,%ymm14
vpmaxsd	%ymm8,%ymm14,%ymm3
vmovdqu	%ymm3,(%rdx,%rax,8)
addq	$4,%rax
cmpq	%r12,%rax
je	.L345
.L141:
vmovdqu	(%rdx,%rax,8),%ymm10
leaq	4(%rax),%r8
vpsubd	%ymm5,%ymm10,%ymm15
vpmaxsd	%ymm8,%ymm15,%ymm1
vpaddd	%ymm9,%ymm1,%ymm0
vpminsd	(%rbx,%rax,8),%ymm0,%ymm14
vpsubd	%ymm4,%ymm14,%ymm3
vpmaxsd	%ymm8,%ymm3,%ymm10
vmovdqu	%ymm10,(%rdx,%rax,8)
vmovdqu	(%rdx,%r8,8),%ymm15
vpsubd	%ymm5,%ymm15,%ymm1
vpmaxsd	%ymm8,%ymm1,%ymm0
vpaddd	%ymm9,%ymm0,%ymm14
vpminsd	(%rbx,%r8,8),%ymm14,%ymm3
vpsubd	%ymm4,%ymm3,%ymm10
vpmaxsd	%ymm8,%ymm10,%ymm15
vmovdqu	%ymm15,(%rdx,%r8,8)
leaq	8(%rax),%r8
vmovdqu	(%rdx,%r8,8),%ymm1
vpsubd	%ymm5,%ymm1,%ymm0
vpmaxsd	%ymm8,%ymm0,%ymm14
vpaddd	%ymm9,%ymm14,%ymm3
vpminsd	(%rbx,%r8,8),%ymm3,%ymm10
vpsubd	%ymm4,%ymm10,%ymm15
vpmaxsd	%ymm8,%ymm15,%ymm1
vmovdqu	%ymm1,(%rdx,%r8,8)
leaq	12(%rax),%r8
addq	$16,%rax
vmovdqu	(%rdx,%r8,8),%ymm0
vpsubd	%ymm5,%ymm0,%ymm14
vpmaxsd	%ymm8,%ymm14,%ymm3
vpaddd	%ymm9,%ymm3,%ymm10
vpminsd	(%rbx,%r8,8),%ymm10,%ymm15
vpsubd	%ymm4,%ymm15,%ymm1
vpmaxsd	%ymm8,%ymm1,%ymm0
vmovdqu	%ymm0,(%rdx,%r8,8)
cmpq	%r12,%rax
jne	.L141
.L345:
movq	%rsi,%rbx
andq	$-8,%rbx
addq	%rbx,%rdi
testb	$7,%sil
je	.L372
vzeroupper
.L140:
subq	%rbx,%rsi
leaq	-1(%rsi),%rdx
cmpq	$2,%rdx
jbe	.L144
addq	%r11,%rbx
vpshufd	$0,%xmm7,%xmm7
vpxor	%xmm4,%xmm4,%xmm4
movq	%rsi,%r12
leaq	(%r9,%rbx,4),%r11
vpshufd	$0,%xmm6,%xmm6
vpshufd	$0,%xmm12,%xmm12
andq	$-4,%r12
vmovdqu	(%r11),%xmm5
addq	%r12,%rdi
andl	$3,%esi
vpsubd	%xmm7,%xmm5,%xmm9
vpmaxsd	%xmm4,%xmm9,%xmm8
vpaddd	%xmm6,%xmm8,%xmm14
vpminsd	(%r10,%rbx,4),%xmm14,%xmm3
vpsubd	%xmm12,%xmm3,%xmm10
vpmaxsd	%xmm4,%xmm10,%xmm15
vmovdqu	%xmm15,(%r11)
je	.L366
.L144:
vmovd	(%r9,%rdi,4),%xmm1
vpxor	%xmm7,%xmm7,%xmm7
vpinsrd	$0,(%r10,%rdi,4),%xmm7,%xmm4
vpsubd	%xmm11,%xmm1,%xmm0
vpxor	%xmm14,%xmm14,%xmm14
leaq	1(%rdi),%rsi
vpmaxsd	%xmm7,%xmm0,%xmm5
vpaddd	%xmm13,%xmm5,%xmm9
vpminsd	%xmm4,%xmm9,%xmm8
vpsubd	%xmm2,%xmm8,%xmm6
vpmaxsd	%xmm14,%xmm6,%xmm3
vmovd	%xmm3,(%r9,%rdi,4)
cmpq	%rsi,%rcx
jle	.L366
vpinsrd	$0,(%r9,%rsi,4),%xmm14,%xmm12
vpinsrd	$0,(%r10,%rsi,4),%xmm14,%xmm1
vpxor	%xmm9,%xmm9,%xmm9
addq	$2,%rdi
vpsubd	%xmm11,%xmm12,%xmm10
vpmaxsd	%xmm14,%xmm10,%xmm15
vpaddd	%xmm13,%xmm15,%xmm0
vpminsd	%xmm1,%xmm0,%xmm7
vpsubd	%xmm2,%xmm7,%xmm5
vpmaxsd	%xmm9,%xmm5,%xmm4
vmovd	%xmm4,(%r9,%rsi,4)
cmpq	%rdi,%rcx
jle	.L366
vpinsrd	$0,(%r9,%rdi,4),%xmm9,%xmm8
vpinsrd	$0,(%r10,%rdi,4),%xmm9,%xmm14
vpxor	%xmm12,%xmm12,%xmm12
vpsubd	%xmm11,%xmm8,%xmm11
vpmaxsd	%xmm9,%xmm11,%xmm6
vpaddd	%xmm13,%xmm6,%xmm13
vpminsd	%xmm14,%xmm13,%xmm3
vpsubd	%xmm2,%xmm3,%xmm2
vpmaxsd	%xmm12,%xmm2,%xmm10
vmovd	%xmm10,(%r9,%rdi,4)
jmp	.L366
.p2align 4,,10
.p2align 3
.L121:
vmovd	12(%rax),%xmm15
vmovd	8(%rax),%xmm14
vmovd	(%rax),%xmm5
movslq	%edi,%rdi
vmovd	4(%rax),%xmm4
vmovdqa	%xmm15,%xmm6
vmovdqa	%xmm14,%xmm7
vmovdqa	%xmm4,%xmm8
vmovdqa	%xmm5,%xmm3
cmpq	%rcx,%rdi
jge	.L366
movq	%rcx,%rsi
movq	%rdi,%r11
subq	%rdi,%rsi
leaq	-1(%rsi),%r10
cmpq	$6,%r10
jbe	.L167
movq	%rsi,%r12
vpbroadcastd	%xmm5,%ymm13
vpbroadcastd	%xmm4,%ymm12
xorl	%eax,%eax
shrq	$3,%r12
vpbroadcastd	%xmm14,%ymm11
vpxor	%xmm1,%xmm1,%xmm1
salq	$2,%r12
leaq	0(,%rdi,4),%rdx
leaq	b(%rip),%r10
leaq	-4(%r12),%r8
leaq	(%r10,%rdx),%rbx
vpbroadcastd	%xmm15,%ymm10
shrq	$2,%r8
leaq	a(%rip),%r9
addq	$1,%r8
addq	%r9,%rdx
andl	$3,%r8d
je	.L147
cmpq	$1,%r8
je	.L292
cmpq	$2,%r8
jne	.L376
.L293:
vmovdqu	(%rdx,%rax,8),%ymm0
vpsubd	%ymm13,%ymm0,%ymm2
vpmaxsd	%ymm1,%ymm2,%ymm9
vpaddd	%ymm12,%ymm9,%ymm0
vpminsd	(%rbx,%rax,8),%ymm0,%ymm2
vpsubd	%ymm11,%ymm2,%ymm9
vpmaxsd	%ymm1,%ymm9,%ymm0
vpaddd	%ymm10,%ymm0,%ymm2
vpminsd	(%rbx,%rax,8),%ymm2,%ymm9
vmovdqu	%ymm9,(%rdx,%rax,8)
addq	$4,%rax
.L292:
vmovdqu	(%rdx,%rax,8),%ymm0
vpsubd	%ymm13,%ymm0,%ymm2
vpmaxsd	%ymm1,%ymm2,%ymm9
vpaddd	%ymm12,%ymm9,%ymm0
vpminsd	(%rbx,%rax,8),%ymm0,%ymm2
vpsubd	%ymm11,%ymm2,%ymm9
vpmaxsd	%ymm1,%ymm9,%ymm0
vpaddd	%ymm10,%ymm0,%ymm2
vpminsd	(%rbx,%rax,8),%ymm2,%ymm9
vmovdqu	%ymm9,(%rdx,%rax,8)
addq	$4,%rax
cmpq	%rax,%r12
je	.L346
.L147:
vmovdqu	(%rdx,%rax,8),%ymm0
leaq	4(%rax),%r8
vpsubd	%ymm13,%ymm0,%ymm2
vpmaxsd	%ymm1,%ymm2,%ymm9
vpaddd	%ymm12,%ymm9,%ymm0
vpminsd	(%rbx,%rax,8),%ymm0,%ymm2
vpsubd	%ymm11,%ymm2,%ymm9
vpmaxsd	%ymm1,%ymm9,%ymm0
vpaddd	%ymm10,%ymm0,%ymm2
vpminsd	(%rbx,%rax,8),%ymm2,%ymm9
vmovdqu	%ymm9,(%rdx,%rax,8)
vmovdqu	(%rdx,%r8,8),%ymm0
vpsubd	%ymm13,%ymm0,%ymm2
vpmaxsd	%ymm1,%ymm2,%ymm9
vpaddd	%ymm12,%ymm9,%ymm0
vpminsd	(%rbx,%r8,8),%ymm0,%ymm2
vpsubd	%ymm11,%ymm2,%ymm9
vpmaxsd	%ymm1,%ymm9,%ymm0
vpaddd	%ymm10,%ymm0,%ymm2
vpminsd	(%rbx,%r8,8),%ymm2,%ymm9
vmovdqu	%ymm9,(%rdx,%r8,8)
leaq	8(%rax),%r8
vmovdqu	(%rdx,%r8,8),%ymm0
vpsubd	%ymm13,%ymm0,%ymm2
vpmaxsd	%ymm1,%ymm2,%ymm9
vpaddd	%ymm12,%ymm9,%ymm0
vpminsd	(%rbx,%r8,8),%ymm0,%ymm2
vpsubd	%ymm11,%ymm2,%ymm9
vpmaxsd	%ymm1,%ymm9,%ymm0
vpaddd	%ymm10,%ymm0,%ymm2
vpminsd	(%rbx,%r8,8),%ymm2,%ymm9
vmovdqu	%ymm9,(%rdx,%r8,8)
leaq	12(%rax),%r8
addq	$16,%rax
vmovdqu	(%rdx,%r8,8),%ymm0
vpsubd	%ymm13,%ymm0,%ymm2
vpmaxsd	%ymm1,%ymm2,%ymm9
vpaddd	%ymm12,%ymm9,%ymm0
vpminsd	(%rbx,%r8,8),%ymm0,%ymm2
vpsubd	%ymm11,%ymm2,%ymm9
vpmaxsd	%ymm1,%ymm9,%ymm0
vpaddd	%ymm10,%ymm0,%ymm2
vpminsd	(%rbx,%r8,8),%ymm2,%ymm9
vmovdqu	%ymm9,(%rdx,%r8,8)
cmpq	%rax,%r12
jne	.L147
.L346:
movq	%rsi,%rbx
andq	$-8,%rbx
addq	%rbx,%rdi
testb	$7,%sil
je	.L372
vzeroupper
.L146:
subq	%rbx,%rsi
leaq	-1(%rsi),%rdx
cmpq	$2,%rdx
jbe	.L150
addq	%r11,%rbx
vpshufd	$0,%xmm3,%xmm3
vpxor	%xmm10,%xmm10,%xmm10
movq	%rsi,%r12
leaq	(%r9,%rbx,4),%r11
vmovdqu	(%r10,%rbx,4),%xmm13
vpshufd	$0,%xmm8,%xmm8
vpshufd	$0,%xmm7,%xmm7
vmovdqu	(%r11),%xmm12
vpshufd	$0,%xmm6,%xmm6
andq	$-4,%r12
addq	%r12,%rdi
andl	$3,%esi
vpsubd	%xmm3,%xmm12,%xmm11
vpmaxsd	%xmm10,%xmm11,%xmm1
vpaddd	%xmm8,%xmm1,%xmm0
vpminsd	%xmm13,%xmm0,%xmm2
vpsubd	%xmm7,%xmm2,%xmm9
vpmaxsd	%xmm10,%xmm9,%xmm3
vpaddd	%xmm6,%xmm3,%xmm12
vpminsd	%xmm13,%xmm12,%xmm13
vmovdqu	%xmm13,(%r11)
je	.L366
.L150:
vmovd	(%r9,%rdi,4),%xmm10
vpxor	%xmm8,%xmm8,%xmm8
vmovd	(%r10,%rdi,4),%xmm11
vpsubd	%xmm5,%xmm10,%xmm1
leaq	1(%rdi),%rsi
vpmaxsd	%xmm8,%xmm1,%xmm0
vpaddd	%xmm4,%xmm0,%xmm2
vpminsd	%xmm11,%xmm2,%xmm7
vpsubd	%xmm14,%xmm7,%xmm9
vpmaxsd	%xmm8,%xmm9,%xmm3
vpaddd	%xmm15,%xmm3,%xmm6
vpminsd	%xmm11,%xmm6,%xmm12
vmovd	%xmm12,(%r9,%rdi,4)
cmpq	%rsi,%rcx
jle	.L366
vpinsrd	$0,(%r9,%rsi,4),%xmm8,%xmm11
vpinsrd	$0,(%r10,%rsi,4),%xmm8,%xmm13
addq	$2,%rdi
vpsubd	%xmm5,%xmm11,%xmm10
vpmaxsd	%xmm8,%xmm10,%xmm1
vpaddd	%xmm4,%xmm1,%xmm0
vpminsd	%xmm13,%xmm0,%xmm2
vpsubd	%xmm14,%xmm2,%xmm7
vpmaxsd	%xmm8,%xmm7,%xmm9
vpaddd	%xmm15,%xmm9,%xmm3
vpminsd	%xmm13,%xmm3,%xmm6
vmovd	%xmm6,(%r9,%rsi,4)
cmpq	%rdi,%rcx
jle	.L366
vpinsrd	$0,(%r9,%rdi,4),%xmm8,%xmm13
vpinsrd	$0,(%r10,%rdi,4),%xmm8,%xmm12
vmovdqa	%xmm8,%xmm3
vpsubd	%xmm5,%xmm13,%xmm11
vpmaxsd	%xmm8,%xmm11,%xmm5
vpaddd	%xmm4,%xmm5,%xmm4
vpminsd	%xmm12,%xmm4,%xmm8
vpsubd	%xmm14,%xmm8,%xmm14
vpmaxsd	%xmm3,%xmm14,%xmm10
vpaddd	%xmm15,%xmm10,%xmm15
vpminsd	%xmm12,%xmm15,%xmm1
vmovd	%xmm1,(%r9,%rdi,4)
jmp	.L366
.p2align 4,,10
.p2align 3
.L123:
vmovd	4(%rax),%xmm15
vmovd	(%rax),%xmm5
movslq	%edi,%rdi
vmovdqa	%xmm15,%xmm9
vmovdqa	%xmm5,%xmm7
cmpq	%rcx,%rdi
jge	.L366
movq	%rcx,%rsi
movq	%rdi,%r11
subq	%rdi,%rsi
leaq	-1(%rsi),%r10
cmpq	$6,%r10
jbe	.L165
movq	%rsi,%r12
leaq	b(%rip),%r10
vpxor	%xmm1,%xmm1,%xmm1
xorl	%eax,%eax
shrq	$3,%r12
vpbroadcastd	%xmm5,%ymm4
vpbroadcastd	%xmm15,%ymm0
salq	$2,%r12
leaq	0(,%rdi,4),%rdx
leaq	a(%rip),%r9
leaq	-4(%r12),%r8
leaq	(%r10,%rdx),%rbx
addq	%r9,%rdx
shrq	$2,%r8
addq	$1,%r8
andl	$7,%r8d
je	.L135
cmpq	$1,%r8
je	.L284
cmpq	$2,%r8
je	.L285
cmpq	$3,%r8
je	.L286
cmpq	$4,%r8
je	.L287
cmpq	$5,%r8
je	.L288
cmpq	$6,%r8
je	.L289
vmovdqu	(%rdx),%ymm8
movl	$4,%eax
vpsubd	%ymm4,%ymm8,%ymm11
vpmaxsd	%ymm1,%ymm11,%ymm6
vpaddd	%ymm0,%ymm6,%ymm13
vpminsd	(%rbx),%ymm13,%ymm14
vmovdqu	%ymm14,(%rdx)
.L289:
vmovdqu	(%rdx,%rax,8),%ymm3
vpsubd	%ymm4,%ymm3,%ymm2
vpmaxsd	%ymm1,%ymm2,%ymm12
vpaddd	%ymm0,%ymm12,%ymm10
vpminsd	(%rbx,%rax,8),%ymm10,%ymm8
vmovdqu	%ymm8,(%rdx,%rax,8)
addq	$4,%rax
.L288:
vmovdqu	(%rdx,%rax,8),%ymm11
vpsubd	%ymm4,%ymm11,%ymm6
vpmaxsd	%ymm1,%ymm6,%ymm13
vpaddd	%ymm0,%ymm13,%ymm14
vpminsd	(%rbx,%rax,8),%ymm14,%ymm3
vmovdqu	%ymm3,(%rdx,%rax,8)
addq	$4,%rax
.L287:
vmovdqu	(%rdx,%rax,8),%ymm2
vpsubd	%ymm4,%ymm2,%ymm12
vpmaxsd	%ymm1,%ymm12,%ymm10
vpaddd	%ymm0,%ymm10,%ymm8
vpminsd	(%rbx,%rax,8),%ymm8,%ymm11
vmovdqu	%ymm11,(%rdx,%rax,8)
addq	$4,%rax
.L286:
vmovdqu	(%rdx,%rax,8),%ymm6
vpsubd	%ymm4,%ymm6,%ymm13
vpmaxsd	%ymm1,%ymm13,%ymm14
vpaddd	%ymm0,%ymm14,%ymm3
vpminsd	(%rbx,%rax,8),%ymm3,%ymm2
vmovdqu	%ymm2,(%rdx,%rax,8)
addq	$4,%rax
.L285:
vmovdqu	(%rdx,%rax,8),%ymm12
vpsubd	%ymm4,%ymm12,%ymm10
vpmaxsd	%ymm1,%ymm10,%ymm8
vpaddd	%ymm0,%ymm8,%ymm11
vpminsd	(%rbx,%rax,8),%ymm11,%ymm6
vmovdqu	%ymm6,(%rdx,%rax,8)
addq	$4,%rax
.L284:
vmovdqu	(%rdx,%rax,8),%ymm13
vpsubd	%ymm4,%ymm13,%ymm14
vpmaxsd	%ymm1,%ymm14,%ymm3
vpaddd	%ymm0,%ymm3,%ymm2
vpminsd	(%rbx,%rax,8),%ymm2,%ymm12
vmovdqu	%ymm12,(%rdx,%rax,8)
addq	$4,%rax
cmpq	%r12,%rax
je	.L344
.L135:
vmovdqu	(%rdx,%rax,8),%ymm10
leaq	4(%rax),%r8
vpsubd	%ymm4,%ymm10,%ymm8
vpmaxsd	%ymm1,%ymm8,%ymm11
vpaddd	%ymm0,%ymm11,%ymm6
vpminsd	(%rbx,%rax,8),%ymm6,%ymm13
vmovdqu	%ymm13,(%rdx,%rax,8)
vmovdqu	(%rdx,%r8,8),%ymm14
vpsubd	%ymm4,%ymm14,%ymm3
vpmaxsd	%ymm1,%ymm3,%ymm2
vpaddd	%ymm0,%ymm2,%ymm12
vpminsd	(%rbx,%r8,8),%ymm12,%ymm10
vmovdqu	%ymm10,(%rdx,%r8,8)
leaq	8(%rax),%r8
vmovdqu	(%rdx,%r8,8),%ymm8
vpsubd	%ymm4,%ymm8,%ymm11
vpmaxsd	%ymm1,%ymm11,%ymm6
vpaddd	%ymm0,%ymm6,%ymm13
vpminsd	(%rbx,%r8,8),%ymm13,%ymm14
vmovdqu	%ymm14,(%rdx,%r8,8)
leaq	12(%rax),%r8
vmovdqu	(%rdx,%r8,8),%ymm3
vpsubd	%ymm4,%ymm3,%ymm2
vpmaxsd	%ymm1,%ymm2,%ymm12
vpaddd	%ymm0,%ymm12,%ymm10
vpminsd	(%rbx,%r8,8),%ymm10,%ymm8
vmovdqu	%ymm8,(%rdx,%r8,8)
leaq	16(%rax),%r8
vmovdqu	(%rdx,%r8,8),%ymm11
vpsubd	%ymm4,%ymm11,%ymm6
vpmaxsd	%ymm1,%ymm6,%ymm13
vpaddd	%ymm0,%ymm13,%ymm14
vpminsd	(%rbx,%r8,8),%ymm14,%ymm3
vmovdqu	%ymm3,(%rdx,%r8,8)
leaq	20(%rax),%r8
vmovdqu	(%rdx,%r8,8),%ymm2
vpsubd	%ymm4,%ymm2,%ymm12
vpmaxsd	%ymm1,%ymm12,%ymm10
vpaddd	%ymm0,%ymm10,%ymm8
vpminsd	(%rbx,%r8,8),%ymm8,%ymm11
vmovdqu	%ymm11,(%rdx,%r8,8)
leaq	24(%rax),%r8
vmovdqu	(%rdx,%r8,8),%ymm6
vpsubd	%ymm4,%ymm6,%ymm13
vpmaxsd	%ymm1,%ymm13,%ymm14
vpaddd	%ymm0,%ymm14,%ymm3
vpminsd	(%rbx,%r8,8),%ymm3,%ymm2
vmovdqu	%ymm2,(%rdx,%r8,8)
leaq	28(%rax),%r8
addq	$32,%rax
vmovdqu	(%rdx,%r8,8),%ymm12
vpsubd	%ymm4,%ymm12,%ymm10
vpmaxsd	%ymm1,%ymm10,%ymm8
vpaddd	%ymm0,%ymm8,%ymm11
vpminsd	(%rbx,%r8,8),%ymm11,%ymm6
vmovdqu	%ymm6,(%rdx,%r8,8)
cmpq	%r12,%rax
jne	.L135
.L344:
movq	%rsi,%rbx
andq	$-8,%rbx
addq	%rbx,%rdi
testb	$7,%sil
je	.L372
vzeroupper
.L134:
subq	%rbx,%rsi
leaq	-1(%rsi),%rdx
cmpq	$2,%rdx
jbe	.L138
addq	%r11,%rbx
vpshufd	$0,%xmm7,%xmm7
vpxor	%xmm1,%xmm1,%xmm1
movq	%rsi,%r12
leaq	(%r9,%rbx,4),%r11
vpshufd	$0,%xmm9,%xmm9
andq	$-4,%r12
vmovdqu	(%r11),%xmm4
addq	%r12,%rdi
andl	$3,%esi
vpsubd	%xmm7,%xmm4,%xmm0
vpmaxsd	%xmm1,%xmm0,%xmm13
vpaddd	%xmm9,%xmm13,%xmm14
vpminsd	(%r10,%rbx,4),%xmm14,%xmm3
vmovdqu	%xmm3,(%r11)
je	.L366
.L138:
vmovd	(%r9,%rdi,4),%xmm2
vpxor	%xmm10,%xmm10,%xmm10
leaq	1(%rdi),%rsi
vpsubd	%xmm5,%xmm2,%xmm12
vpinsrd	$0,(%r10,%rdi,4),%xmm10,%xmm6
vpmaxsd	%xmm10,%xmm12,%xmm8
vpaddd	%xmm15,%xmm8,%xmm11
vpminsd	%xmm6,%xmm11,%xmm7
vmovd	%xmm7,(%r9,%rdi,4)
cmpq	%rsi,%rcx
jle	.L366
vmovd	(%r9,%rsi,4),%xmm4
vpxor	%xmm1,%xmm1,%xmm1
addq	$2,%rdi
vpsubd	%xmm5,%xmm4,%xmm0
vpinsrd	$0,(%r10,%rsi,4),%xmm1,%xmm14
vpmaxsd	%xmm1,%xmm0,%xmm13
vpaddd	%xmm15,%xmm13,%xmm9
vpminsd	%xmm14,%xmm9,%xmm3
vmovd	%xmm3,(%r9,%rsi,4)
cmpq	%rdi,%rcx
jle	.L366
vmovd	(%r9,%rdi,4),%xmm2
vpxor	%xmm12,%xmm12,%xmm12
vpinsrd	$0,(%r10,%rdi,4),%xmm12,%xmm8
vpsubd	%xmm5,%xmm2,%xmm5
vpmaxsd	%xmm12,%xmm5,%xmm10
vpaddd	%xmm15,%xmm10,%xmm15
vpminsd	%xmm8,%xmm15,%xmm11
vmovd	%xmm11,(%r9,%rdi,4)
jmp	.L366
.p2align 4,,10
.p2align 3
.L376:
vmovdqu	(%rdx),%ymm0
movl	$4,%eax
vpsubd	%ymm13,%ymm0,%ymm2
vpmaxsd	%ymm1,%ymm2,%ymm9
vpaddd	%ymm12,%ymm9,%ymm0
vpminsd	(%rbx),%ymm0,%ymm2
vpsubd	%ymm11,%ymm2,%ymm9
vpmaxsd	%ymm1,%ymm9,%ymm0
vpaddd	%ymm10,%ymm0,%ymm2
vpminsd	(%rbx),%ymm2,%ymm9
vmovdqu	%ymm9,(%rdx)
jmp	.L293
.p2align 4,,10
.p2align 3
.L375:
vmovdqu	(%rdx),%ymm0
movl	$4,%eax
vpsubd	%ymm5,%ymm0,%ymm14
vpmaxsd	%ymm8,%ymm14,%ymm3
vpaddd	%ymm9,%ymm3,%ymm10
vpminsd	(%rbx),%ymm10,%ymm15
vpsubd	%ymm4,%ymm15,%ymm1
vpmaxsd	%ymm8,%ymm1,%ymm0
vmovdqu	%ymm0,(%rdx)
jmp	.L291
.p2align 4,,10
.p2align 3
.L374:
vmovdqu	(%rax),%ymm0
addq	$32,%rdx
addq	$32,%rax
addq	$4,%r9
vpsubd	%ymm8,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm7,%ymm0,%ymm0
vpminsd	-32(%rdx),%ymm0,%ymm0
vpsubd	%ymm6,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm5,%ymm0,%ymm0
vpminsd	-32(%rdx),%ymm0,%ymm0
vpsubd	%ymm4,%ymm0,%ymm0
vpmaxsd	%ymm1,%ymm0,%ymm0
vpaddd	%ymm3,%ymm0,%ymm0
vpminsd	-32(%rdx),%ymm0,%ymm0
vmovdqu	%ymm0,-32(%rax)
jmp	.L297
.p2align 4,,10
.p2align 3
.L373:
vmovdqu	(%rdx),%ymm1
movl	$4,%eax
vpsubd	%ymm10,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm9,%ymm1,%ymm1
vpminsd	(%rbx),%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	(%rbx),%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm0,%ymm1,%ymm1
vmovdqu	%ymm1,(%rdx)
jmp	.L295
.L169:
xorl	%eax,%eax
leaq	a(%rip),%r10
leaq	b(%rip),%r11
jmp	.L158
.L168:
xorl	%ebx,%ebx
leaq	a(%rip),%r9
leaq	b(%rip),%r10
jmp	.L152
.L167:
xorl	%ebx,%ebx
leaq	a(%rip),%r9
leaq	b(%rip),%r10
jmp	.L146
.L166:
xorl	%ebx,%ebx
leaq	a(%rip),%r9
leaq	b(%rip),%r10
jmp	.L140
.L165:
xorl	%ebx,%ebx
leaq	a(%rip),%r9
leaq	b(%rip),%r10
jmp	.L134
.L164:
xorl	%r9d,%r9d
leaq	a(%rip),%r12
jmp	.L127
.L117:
leaq	.LC0(%rip),%rcx
movl	$63,%edx
leaq	.LC1(%rip),%rsi
leaq	.LC2(%rip),%rdi
call	__assert_fail@PLT
.cfi_endproc
.LFE11140:
.size	_Z2upILb1EEviiSt6vectorIiSaIiEE,.-_Z2upILb1EEviiSt6vectorIiSaIiEE
.section	.rodata._Z2upILb0EEviiSt6vectorIiSaIiEE.str1.8,"aMS",@progbits,1
.align 8
.LC3:
.string	"void up(int,int,std::vector<int>) [with bool start_neg = false]"
.section	.text._Z2upILb0EEviiSt6vectorIiSaIiEE,"axG",@progbits,_Z2upILb0EEviiSt6vectorIiSaIiEE,comdat
.p2align 4
.weak	_Z2upILb0EEviiSt6vectorIiSaIiEE
.type	_Z2upILb0EEviiSt6vectorIiSaIiEE,@function
_Z2upILb0EEviiSt6vectorIiSaIiEE:
.LFB11141:
.cfi_startproc
endbr64
pushq	%rbp
.cfi_def_cfa_offset 16
.cfi_offset 6,-16
movq	%rsp,%rbp
.cfi_def_cfa_register 6
pushq	%r12
pushq	%rbx
andq	$-32,%rsp
subq	$32,%rsp
.cfi_offset 12,-24
.cfi_offset 3,-32
movq	(%rdx),%rax
movq	8(%rdx),%rdx
subq	%rax,%rdx
movq	%rdx,%r8
sarq	$2,%r8
cmpq	$27,%rdx
ja	.L378
movslq	%esi,%rcx
leaq	.L380(%rip),%rsi
movslq	(%rsi,%r8,4),%rbx
addq	%rsi,%rbx
notrack jmp	*%rbx
.section	.rodata._Z2upILb0EEviiSt6vectorIiSaIiEE,"aG",@progbits,_Z2upILb0EEviiSt6vectorIiSaIiEE,comdat
.align 4
.align 4
.L380:
.long	.L627-.L380
.long	.L385-.L380
.long	.L384-.L380
.long	.L383-.L380
.long	.L382-.L380
.long	.L381-.L380
.long	.L379-.L380
.section	.text._Z2upILb0EEviiSt6vectorIiSaIiEE,"axG",@progbits,_Z2upILb0EEviiSt6vectorIiSaIiEE,comdat
.p2align 4,,10
.p2align 3
.L633:
vzeroupper
.L627:
leaq	-16(%rbp),%rsp
popq	%rbx
popq	%r12
popq	%rbp
.cfi_remember_state
.cfi_def_cfa 7,8
ret
.p2align 4,,10
.p2align 3
.L381:
.cfi_restore_state
vmovd	16(%rax),%xmm10
vmovd	12(%rax),%xmm11
vmovd	(%rax),%xmm14
movslq	%edi,%rdi
vmovd	8(%rax),%xmm12
vmovd	4(%rax),%xmm13
vmovd	%xmm10,28(%rsp)
vmovdqa	%xmm11,%xmm15
vmovdqa	%xmm12,%xmm3
vmovdqa	%xmm13,%xmm4
vmovdqa	%xmm14,%xmm0
cmpq	%rcx,%rdi
jge	.L627
movq	%rcx,%rsi
movq	%rdi,%rbx
subq	%rdi,%rsi
leaq	-1(%rsi),%r9
cmpq	$6,%r9
jbe	.L429
movq	%rsi,%r12
vpbroadcastd	%xmm14,%ymm9
vpbroadcastd	%xmm13,%ymm8
xorl	%eax,%eax
shrq	$3,%r12
vpbroadcastd	%xmm12,%ymm7
vpxor	%xmm2,%xmm2,%xmm2
salq	$2,%r12
leaq	0(,%rdi,4),%rdx
leaq	b(%rip),%r11
leaq	-4(%r12),%r8
leaq	(%r11,%rdx),%r10
vpbroadcastd	%xmm11,%ymm6
shrq	$2,%r8
leaq	a(%rip),%r9
vpbroadcastd	%xmm10,%ymm5
addq	$1,%r8
addq	%r9,%rdx
andl	$3,%r8d
je	.L620
cmpq	$1,%r8
je	.L555
cmpq	$2,%r8
jne	.L634
.L556:
vpaddd	(%rdx,%rax,8),%ymm9,%ymm1
vpminsd	(%r10,%rax,8),%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	(%r10,%rax,8),%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm5,%ymm1,%ymm1
vpminsd	(%r10,%rax,8),%ymm1,%ymm1
vmovdqu	%ymm1,(%rdx,%rax,8)
addq	$4,%rax
.L555:
vpaddd	(%rdx,%rax,8),%ymm9,%ymm1
vpminsd	(%r10,%rax,8),%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	(%r10,%rax,8),%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm5,%ymm1,%ymm1
vpminsd	(%r10,%rax,8),%ymm1,%ymm1
vmovdqu	%ymm1,(%rdx,%rax,8)
addq	$4,%rax
cmpq	%r12,%rax
je	.L608
.L620:
vmovd	%xmm15,24(%rsp)
.L414:
vmovdqu	(%r10,%rax,8),%ymm15
vpaddd	(%rdx,%rax,8),%ymm9,%ymm1
leaq	4(%rax),%r8
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm5,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm15
vmovdqu	%ymm15,(%rdx,%rax,8)
vmovdqu	(%r10,%r8,8),%ymm15
vpaddd	(%rdx,%r8,8),%ymm9,%ymm1
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm5,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm15
vmovdqu	%ymm15,(%rdx,%r8,8)
leaq	8(%rax),%r8
vmovdqu	(%r10,%r8,8),%ymm15
vpaddd	(%rdx,%r8,8),%ymm9,%ymm1
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm5,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm15
vmovdqu	%ymm15,(%rdx,%r8,8)
leaq	12(%rax),%r8
addq	$16,%rax
vmovdqu	(%r10,%r8,8),%ymm15
vpaddd	(%rdx,%r8,8),%ymm9,%ymm1
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm5,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm15
vmovdqu	%ymm15,(%rdx,%r8,8)
cmpq	%r12,%rax
jne	.L414
vmovd	24(%rsp),%xmm15
.L608:
movq	%rsi,%r10
andq	$-8,%r10
addq	%r10,%rdi
testb	$7,%sil
je	.L633
vzeroupper
.L413:
subq	%r10,%rsi
leaq	-1(%rsi),%rdx
cmpq	$2,%rdx
jbe	.L417
addq	%rbx,%r10
vpshufd	$0,%xmm0,%xmm0
vpshufd	$0,%xmm4,%xmm4
movq	%rsi,%r12
leaq	(%r9,%r10,4),%rbx
vmovdqu	(%r11,%r10,4),%xmm9
vpxor	%xmm5,%xmm5,%xmm5
vpshufd	$0,%xmm3,%xmm3
vpaddd	(%rbx),%xmm0,%xmm8
vpshufd	$0,%xmm15,%xmm15
andq	$-4,%r12
addq	%r12,%rdi
andl	$3,%esi
vpminsd	%xmm9,%xmm8,%xmm7
vpsubd	%xmm4,%xmm7,%xmm6
vmovd	28(%rsp),%xmm4
vpmaxsd	%xmm5,%xmm6,%xmm2
vpaddd	%xmm3,%xmm2,%xmm1
vpshufd	$0,%xmm4,%xmm6
vpminsd	%xmm9,%xmm1,%xmm0
vpsubd	%xmm15,%xmm0,%xmm8
vpmaxsd	%xmm5,%xmm8,%xmm7
vpaddd	%xmm6,%xmm7,%xmm5
vpminsd	%xmm9,%xmm5,%xmm9
vmovdqu	%xmm9,(%rbx)
je	.L627
.L417:
vmovd	(%r9,%rdi,4),%xmm2
vmovd	(%r11,%rdi,4),%xmm3
vpxor	%xmm8,%xmm8,%xmm8
vpaddd	%xmm2,%xmm14,%xmm1
leaq	1(%rdi),%rsi
vpminsd	%xmm3,%xmm1,%xmm0
vpsubd	%xmm13,%xmm0,%xmm15
vpmaxsd	%xmm8,%xmm15,%xmm7
vpaddd	%xmm12,%xmm7,%xmm4
vpminsd	%xmm3,%xmm4,%xmm6
vpsubd	%xmm11,%xmm6,%xmm5
vpmaxsd	%xmm8,%xmm5,%xmm9
vpaddd	%xmm10,%xmm9,%xmm2
vpminsd	%xmm3,%xmm2,%xmm3
vmovd	%xmm3,(%r9,%rdi,4)
cmpq	%rsi,%rcx
jle	.L627
vpinsrd	$0,(%r9,%rsi,4),%xmm8,%xmm0
vpinsrd	$0,(%r11,%rsi,4),%xmm8,%xmm1
addq	$2,%rdi
vpaddd	%xmm0,%xmm14,%xmm15
vpminsd	%xmm1,%xmm15,%xmm7
vpsubd	%xmm13,%xmm7,%xmm4
vpmaxsd	%xmm8,%xmm4,%xmm6
vpaddd	%xmm12,%xmm6,%xmm5
vpminsd	%xmm1,%xmm5,%xmm9
vpsubd	%xmm11,%xmm9,%xmm2
vpmaxsd	%xmm8,%xmm2,%xmm3
vpaddd	%xmm10,%xmm3,%xmm0
vpminsd	%xmm1,%xmm0,%xmm1
vmovd	%xmm1,(%r9,%rsi,4)
cmpq	%rdi,%rcx
jle	.L627
vpinsrd	$0,(%r9,%rdi,4),%xmm8,%xmm7
vpinsrd	$0,(%r11,%rdi,4),%xmm8,%xmm15
vpaddd	%xmm7,%xmm14,%xmm14
vpminsd	%xmm15,%xmm14,%xmm4
vpsubd	%xmm13,%xmm4,%xmm13
vpmaxsd	%xmm8,%xmm13,%xmm6
vpaddd	%xmm12,%xmm6,%xmm12
vpminsd	%xmm15,%xmm12,%xmm5
vpsubd	%xmm11,%xmm5,%xmm11
vpmaxsd	%xmm8,%xmm11,%xmm8
vpaddd	%xmm10,%xmm8,%xmm10
vpminsd	%xmm15,%xmm10,%xmm9
vmovd	%xmm9,(%r9,%rdi,4)
jmp	.L627
.p2align 4,,10
.p2align 3
.L379:
vmovd	20(%rax),%xmm9
vmovd	16(%rax),%xmm10
vmovd	12(%rax),%xmm11
movslq	%edi,%rdi
vmovd	8(%rax),%xmm12
vmovd	4(%rax),%xmm13
vmovd	(%rax),%xmm14
vmovd	%xmm10,28(%rsp)
vmovdqa	%xmm9,%xmm15
vmovd	%xmm11,24(%rsp)
vmovdqa	%xmm14,%xmm0
vmovd	%xmm12,20(%rsp)
vmovd	%xmm13,16(%rsp)
cmpq	%rcx,%rdi
jge	.L627
movq	%rcx,%rsi
movq	%rdi,%r12
subq	%rdi,%rsi
leaq	-1(%rsi),%r11
cmpq	$6,%r11
jbe	.L430
movq	%rsi,%r9
vpbroadcastd	%xmm14,%ymm8
vpxor	%xmm2,%xmm2,%xmm2
shrq	$3,%r9
leaq	b(%rip),%r11
leaq	0(,%rdi,4),%rax
addq	%rdi,%r9
leaq	(%r11,%rax),%rdx
vpbroadcastd	%xmm13,%ymm7
leaq	(%r11,%r9,4),%rbx
leaq	a(%rip),%r10
vpbroadcastd	%xmm12,%ymm6
movq	%rdx,%r9
movq	%rbx,%r8
vpbroadcastd	%xmm11,%ymm5
vpbroadcastd	%xmm10,%ymm4
addq	%r10,%rax
subq	%rdx,%r8
vpbroadcastd	%xmm9,%ymm3
subq	$4,%r8
shrq	$2,%r8
addq	$1,%r8
andl	$3,%r8d
je	.L615
cmpq	$1,%r8
je	.L557
cmpq	$2,%r8
jne	.L635
.L558:
vpaddd	(%rax),%ymm8,%ymm1
addq	$32,%rdx
addq	$32,%rax
addq	$4,%r9
vpminsd	-32(%rdx),%ymm1,%ymm1
vpsubd	%ymm7,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm6,%ymm1,%ymm1
vpminsd	-32(%rdx),%ymm1,%ymm1
vpsubd	%ymm5,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm4,%ymm1,%ymm1
vpminsd	-32(%rdx),%ymm1,%ymm1
vpsubd	%ymm3,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vmovdqu	%ymm1,-32(%rax)
.L557:
vpaddd	(%rax),%ymm8,%ymm1
addq	$4,%r9
addq	$32,%rdx
addq	$32,%rax
vpminsd	-32(%rdx),%ymm1,%ymm1
vpsubd	%ymm7,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm6,%ymm1,%ymm1
vpminsd	-32(%rdx),%ymm1,%ymm1
vpsubd	%ymm5,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm4,%ymm1,%ymm1
vpminsd	-32(%rdx),%ymm1,%ymm1
vpsubd	%ymm3,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vmovdqu	%ymm1,-32(%rax)
cmpq	%r9,%rbx
je	.L603
.L615:
vmovd	%xmm15,12(%rsp)
.L420:
vmovdqu	(%rdx),%ymm15
vpaddd	(%rax),%ymm8,%ymm1
addq	$16,%r9
subq	$-128,%rdx
subq	$-128,%rax
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm7,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm6,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm5,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm4,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm15
vpsubd	%ymm3,%ymm15,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm15
vpaddd	-96(%rax),%ymm8,%ymm1
vmovdqu	%ymm15,-128(%rax)
vmovdqu	-96(%rdx),%ymm15
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm7,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm6,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm5,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm4,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm15
vpsubd	%ymm3,%ymm15,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm15
vpaddd	-64(%rax),%ymm8,%ymm1
vmovdqu	%ymm15,-96(%rax)
vmovdqu	-64(%rdx),%ymm15
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm7,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm6,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm5,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm4,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm15
vpsubd	%ymm3,%ymm15,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm15
vpaddd	-32(%rax),%ymm8,%ymm1
vmovdqu	%ymm15,-64(%rax)
vmovdqu	-32(%rdx),%ymm15
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm7,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm6,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm1
vpsubd	%ymm5,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm4,%ymm1,%ymm1
vpminsd	%ymm15,%ymm1,%ymm15
vpsubd	%ymm3,%ymm15,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm15
vmovdqu	%ymm15,-32(%rax)
cmpq	%r9,%rbx
jne	.L420
vmovd	12(%rsp),%xmm15
.L603:
movq	%rsi,%rax
andq	$-8,%rax
addq	%rax,%rdi
testb	$7,%sil
je	.L633
vzeroupper
.L419:
subq	%rax,%rsi
leaq	-1(%rsi),%rdx
cmpq	$2,%rdx
jbe	.L423
addq	%r12,%rax
vmovd	16(%rsp),%xmm5
vpxor	%xmm1,%xmm1,%xmm1
movq	%rsi,%rbx
leaq	(%r10,%rax,4),%r12
vmovdqu	(%r11,%rax,4),%xmm8
vpshufd	$0,%xmm0,%xmm0
vpshufd	$0,%xmm15,%xmm15
vpaddd	(%r12),%xmm0,%xmm7
vpshufd	$0,%xmm5,%xmm4
vmovd	20(%rsp),%xmm0
andq	$-4,%rbx
vmovd	24(%rsp),%xmm5
addq	%rbx,%rdi
andl	$3,%esi
vpminsd	%xmm8,%xmm7,%xmm6
vpshufd	$0,%xmm0,%xmm7
vpsubd	%xmm4,%xmm6,%xmm3
vpmaxsd	%xmm1,%xmm3,%xmm2
vpshufd	$0,%xmm5,%xmm3
vpaddd	%xmm7,%xmm2,%xmm6
vmovd	28(%rsp),%xmm7
vpminsd	%xmm8,%xmm6,%xmm4
vpsubd	%xmm3,%xmm4,%xmm2
vpshufd	$0,%xmm7,%xmm6
vpmaxsd	%xmm1,%xmm2,%xmm0
vpaddd	%xmm6,%xmm0,%xmm4
vpminsd	%xmm8,%xmm4,%xmm8
vpsubd	%xmm15,%xmm8,%xmm5
vpmaxsd	%xmm1,%xmm5,%xmm1
vmovdqu	%xmm1,(%r12)
je	.L627
.L423:
vmovd	(%r10,%rdi,4),%xmm2
vmovd	(%r11,%rdi,4),%xmm3
vpxor	%xmm4,%xmm4,%xmm4
vpaddd	%xmm2,%xmm14,%xmm0
leaq	1(%rdi),%rsi
vpminsd	%xmm3,%xmm0,%xmm7
vpsubd	%xmm13,%xmm7,%xmm6
vpmaxsd	%xmm4,%xmm6,%xmm8
vpaddd	%xmm12,%xmm8,%xmm15
vpminsd	%xmm3,%xmm15,%xmm5
vpsubd	%xmm11,%xmm5,%xmm1
vpmaxsd	%xmm4,%xmm1,%xmm2
vpaddd	%xmm10,%xmm2,%xmm0
vpminsd	%xmm3,%xmm0,%xmm3
vpsubd	%xmm9,%xmm3,%xmm7
vpmaxsd	%xmm4,%xmm7,%xmm8
vmovd	%xmm8,(%r10,%rdi,4)
cmpq	%rcx,%rsi
jge	.L627
vpinsrd	$0,(%r10,%rsi,4),%xmm4,%xmm5
vpinsrd	$0,(%r11,%rsi,4),%xmm4,%xmm15
addq	$2,%rdi
vpaddd	%xmm5,%xmm14,%xmm1
vpminsd	%xmm15,%xmm1,%xmm2
vpsubd	%xmm13,%xmm2,%xmm0
vpmaxsd	%xmm4,%xmm0,%xmm3
vpaddd	%xmm12,%xmm3,%xmm7
vpminsd	%xmm15,%xmm7,%xmm6
vpsubd	%xmm11,%xmm6,%xmm8
vpmaxsd	%xmm4,%xmm8,%xmm5
vpaddd	%xmm10,%xmm5,%xmm1
vpminsd	%xmm15,%xmm1,%xmm15
vpsubd	%xmm9,%xmm15,%xmm2
vpmaxsd	%xmm4,%xmm2,%xmm0
vmovd	%xmm0,(%r10,%rsi,4)
cmpq	%rdi,%rcx
jle	.L627
vpinsrd	$0,(%r10,%rdi,4),%xmm4,%xmm6
vpinsrd	$0,(%r11,%rdi,4),%xmm4,%xmm7
vpaddd	%xmm6,%xmm14,%xmm14
vpminsd	%xmm7,%xmm14,%xmm8
vpsubd	%xmm13,%xmm8,%xmm13
vpmaxsd	%xmm4,%xmm13,%xmm5
vpaddd	%xmm12,%xmm5,%xmm12
vpminsd	%xmm7,%xmm12,%xmm1
vpsubd	%xmm11,%xmm1,%xmm11
vpmaxsd	%xmm4,%xmm11,%xmm15
vpaddd	%xmm10,%xmm15,%xmm10
vpminsd	%xmm7,%xmm10,%xmm2
vpsubd	%xmm9,%xmm2,%xmm9
vpmaxsd	%xmm4,%xmm9,%xmm4
vmovd	%xmm4,(%r10,%rdi,4)
jmp	.L627
.p2align 4,,10
.p2align 3
.L385:
vmovd	(%rax),%xmm13
movslq	%edi,%rdi
vmovdqa	%xmm13,%xmm5
cmpq	%rcx,%rdi
jge	.L627
movq	%rcx,%r8
movq	%rdi,%rbx
subq	%rdi,%r8
leaq	-1(%r8),%r11
cmpq	$6,%r11
jbe	.L425
movq	%r8,%r12
leaq	b(%rip),%r11
xorl	%eax,%eax
shrq	$3,%r12
leaq	0(,%rdi,4),%rdx
vpbroadcastd	%xmm13,%ymm0
salq	$2,%r12
leaq	a(%rip),%r10
leaq	(%r11,%rdx),%r9
leaq	-4(%r12),%rsi
addq	%r10,%rdx
shrq	$2,%rsi
addq	$1,%rsi
andl	$7,%esi
je	.L389
cmpq	$1,%rsi
je	.L539
cmpq	$2,%rsi
je	.L540
cmpq	$3,%rsi
je	.L541
cmpq	$4,%rsi
je	.L542
cmpq	$5,%rsi
je	.L543
cmpq	$6,%rsi
je	.L544
vpaddd	(%rdx),%ymm0,%ymm8
vpminsd	(%r9),%ymm8,%ymm2
movl	$4,%eax
vmovdqu	%ymm2,(%rdx)
.L544:
vpaddd	(%rdx,%rax,8),%ymm0,%ymm9
vpminsd	(%r9,%rax,8),%ymm9,%ymm6
vmovdqu	%ymm6,(%rdx,%rax,8)
addq	$4,%rax
.L543:
vpaddd	(%rdx,%rax,8),%ymm0,%ymm1
vpminsd	(%r9,%rax,8),%ymm1,%ymm14
vmovdqu	%ymm14,(%rdx,%rax,8)
addq	$4,%rax
.L542:
vpaddd	(%rdx,%rax,8),%ymm0,%ymm3
vpminsd	(%r9,%rax,8),%ymm3,%ymm10
vmovdqu	%ymm10,(%rdx,%rax,8)
addq	$4,%rax
.L541:
vpaddd	(%rdx,%rax,8),%ymm0,%ymm7
vpminsd	(%r9,%rax,8),%ymm7,%ymm12
vmovdqu	%ymm12,(%rdx,%rax,8)
addq	$4,%rax
.L540:
vpaddd	(%rdx,%rax,8),%ymm0,%ymm15
vpminsd	(%r9,%rax,8),%ymm15,%ymm11
vmovdqu	%ymm11,(%rdx,%rax,8)
addq	$4,%rax
.L539:
vpaddd	(%rdx,%rax,8),%ymm0,%ymm4
vpminsd	(%r9,%rax,8),%ymm4,%ymm8
vmovdqu	%ymm8,(%rdx,%rax,8)
addq	$4,%rax
cmpq	%rax,%r12
je	.L604
.L389:
vpaddd	(%rdx,%rax,8),%ymm0,%ymm2
vpminsd	(%r9,%rax,8),%ymm2,%ymm9
vpaddd	32(%rdx,%rax,8),%ymm0,%ymm6
vpaddd	64(%rdx,%rax,8),%ymm0,%ymm14
vmovdqu	%ymm9,(%rdx,%rax,8)
vpminsd	32(%r9,%rax,8),%ymm6,%ymm1
vpaddd	96(%rdx,%rax,8),%ymm0,%ymm10
vpaddd	128(%rdx,%rax,8),%ymm0,%ymm12
vpaddd	160(%rdx,%rax,8),%ymm0,%ymm11
vpaddd	192(%rdx,%rax,8),%ymm0,%ymm8
vmovdqu	%ymm1,32(%rdx,%rax,8)
vpminsd	64(%r9,%rax,8),%ymm14,%ymm3
vpaddd	224(%rdx,%rax,8),%ymm0,%ymm9
vmovdqu	%ymm3,64(%rdx,%rax,8)
vpminsd	96(%r9,%rax,8),%ymm10,%ymm7
vmovdqu	%ymm7,96(%rdx,%rax,8)
vpminsd	128(%r9,%rax,8),%ymm12,%ymm15
vmovdqu	%ymm15,128(%rdx,%rax,8)
vpminsd	160(%r9,%rax,8),%ymm11,%ymm4
vmovdqu	%ymm4,160(%rdx,%rax,8)
vpminsd	192(%r9,%rax,8),%ymm8,%ymm2
vmovdqu	%ymm2,192(%rdx,%rax,8)
vpminsd	224(%r9,%rax,8),%ymm9,%ymm6
vmovdqu	%ymm6,224(%rdx,%rax,8)
addq	$32,%rax
cmpq	%rax,%r12
jne	.L389
.L604:
movq	%r8,%r9
andq	$-8,%r9
addq	%r9,%rdi
testb	$7,%r8b
je	.L633
vzeroupper
.L388:
subq	%r9,%r8
leaq	-1(%r8),%rdx
cmpq	$2,%rdx
jbe	.L393
addq	%rbx,%r9
movq	%r8,%r12
vpshufd	$0,%xmm5,%xmm5
leaq	(%r10,%r9,4),%rbx
andq	$-4,%r12
vpaddd	(%rbx),%xmm5,%xmm0
vpminsd	(%r11,%r9,4),%xmm0,%xmm1
addq	%r12,%rdi
andl	$3,%r8d
vmovdqu	%xmm1,(%rbx)
je	.L627
.L393:
vmovd	(%r10,%rdi,4),%xmm14
vmovd	(%r11,%rdi,4),%xmm10
leaq	1(%rdi),%r8
vpaddd	%xmm14,%xmm13,%xmm3
vpminsd	%xmm10,%xmm3,%xmm7
vmovd	%xmm7,(%r10,%rdi,4)
cmpq	%rcx,%r8
jge	.L627
vmovd	(%r10,%r8,4),%xmm12
vmovd	(%r11,%r8,4),%xmm11
addq	$2,%rdi
vpaddd	%xmm12,%xmm13,%xmm15
vpminsd	%xmm11,%xmm15,%xmm4
vmovd	%xmm4,(%r10,%r8,4)
cmpq	%rdi,%rcx
jle	.L627
vmovd	(%r10,%rdi,4),%xmm8
vmovd	(%r11,%rdi,4),%xmm2
vpaddd	%xmm8,%xmm13,%xmm13
vpminsd	%xmm2,%xmm13,%xmm9
vmovd	%xmm9,(%r10,%rdi,4)
jmp	.L627
.p2align 4,,10
.p2align 3
.L383:
vmovd	8(%rax),%xmm14
vmovd	4(%rax),%xmm13
vmovd	(%rax),%xmm11
movslq	%edi,%rdi
vmovdqa	%xmm14,%xmm15
vmovdqa	%xmm13,%xmm12
vmovdqa	%xmm11,%xmm0
cmpq	%rcx,%rdi
jge	.L627
movq	%rcx,%rsi
movq	%rdi,%rbx
subq	%rdi,%rsi
leaq	-1(%rsi),%r11
cmpq	$6,%r11
jbe	.L427
movq	%rsi,%r12
vpbroadcastd	%xmm11,%ymm10
vpbroadcastd	%xmm13,%ymm5
xorl	%eax,%eax
shrq	$3,%r12
vpbroadcastd	%xmm14,%ymm6
vpxor	%xmm8,%xmm8,%xmm8
salq	$2,%r12
leaq	0(,%rdi,4),%rdx
leaq	b(%rip),%r11
leaq	-4(%r12),%r8
leaq	a(%rip),%r9
shrq	$2,%r8
leaq	(%r11,%rdx),%r10
addq	%r9,%rdx
addq	$1,%r8
andl	$3,%r8d
je	.L402
cmpq	$1,%r8
je	.L551
cmpq	$2,%r8
jne	.L636
.L552:
vpaddd	(%rdx,%rax,8),%ymm10,%ymm4
vpminsd	(%r10,%rax,8),%ymm4,%ymm3
vpsubd	%ymm5,%ymm3,%ymm9
vpmaxsd	%ymm8,%ymm9,%ymm2
vpaddd	%ymm6,%ymm2,%ymm1
vpminsd	(%r10,%rax,8),%ymm1,%ymm7
vmovdqu	%ymm7,(%rdx,%rax,8)
addq	$4,%rax
.L551:
vpaddd	(%rdx,%rax,8),%ymm10,%ymm4
vpminsd	(%r10,%rax,8),%ymm4,%ymm3
vpsubd	%ymm5,%ymm3,%ymm9
vpmaxsd	%ymm8,%ymm9,%ymm2
vpaddd	%ymm6,%ymm2,%ymm1
vpminsd	(%r10,%rax,8),%ymm1,%ymm7
vmovdqu	%ymm7,(%rdx,%rax,8)
addq	$4,%rax
cmpq	%r12,%rax
je	.L606
.L402:
vpaddd	(%rdx,%rax,8),%ymm10,%ymm4
vpminsd	(%r10,%rax,8),%ymm4,%ymm3
leaq	4(%rax),%r8
vpsubd	%ymm5,%ymm3,%ymm9
vpmaxsd	%ymm8,%ymm9,%ymm2
vpaddd	%ymm6,%ymm2,%ymm1
vpminsd	(%r10,%rax,8),%ymm1,%ymm7
vmovdqu	%ymm7,(%rdx,%rax,8)
vpaddd	(%rdx,%r8,8),%ymm10,%ymm4
vpminsd	(%r10,%r8,8),%ymm4,%ymm3
vpsubd	%ymm5,%ymm3,%ymm9
vpmaxsd	%ymm8,%ymm9,%ymm2
vpaddd	%ymm6,%ymm2,%ymm1
vpminsd	(%r10,%r8,8),%ymm1,%ymm7
vmovdqu	%ymm7,(%rdx,%r8,8)
leaq	8(%rax),%r8
vpaddd	(%rdx,%r8,8),%ymm10,%ymm4
vpminsd	(%r10,%r8,8),%ymm4,%ymm3
vpsubd	%ymm5,%ymm3,%ymm9
vpmaxsd	%ymm8,%ymm9,%ymm2
vpaddd	%ymm6,%ymm2,%ymm1
vpminsd	(%r10,%r8,8),%ymm1,%ymm7
vmovdqu	%ymm7,(%rdx,%r8,8)
leaq	12(%rax),%r8
addq	$16,%rax
vpaddd	(%rdx,%r8,8),%ymm10,%ymm4
vpminsd	(%r10,%r8,8),%ymm4,%ymm3
vpsubd	%ymm5,%ymm3,%ymm9
vpmaxsd	%ymm8,%ymm9,%ymm2
vpaddd	%ymm6,%ymm2,%ymm1
vpminsd	(%r10,%r8,8),%ymm1,%ymm7
vmovdqu	%ymm7,(%rdx,%r8,8)
cmpq	%r12,%rax
jne	.L402
.L606:
movq	%rsi,%r10
andq	$-8,%r10
addq	%r10,%rdi
testb	$7,%sil
je	.L633
vzeroupper
.L401:
subq	%r10,%rsi
leaq	-1(%rsi),%rdx
cmpq	$2,%rdx
jbe	.L405
addq	%rbx,%r10
vpshufd	$0,%xmm0,%xmm0
vpshufd	$0,%xmm12,%xmm12
movq	%rsi,%r12
leaq	(%r9,%r10,4),%rbx
vmovdqu	(%r11,%r10,4),%xmm10
vpxor	%xmm6,%xmm6,%xmm6
vpshufd	$0,%xmm15,%xmm15
vpaddd	(%rbx),%xmm0,%xmm5
andq	$-4,%r12
addq	%r12,%rdi
andl	$3,%esi
vpminsd	%xmm10,%xmm5,%xmm8
vpsubd	%xmm12,%xmm8,%xmm4
vpmaxsd	%xmm6,%xmm4,%xmm3
vpaddd	%xmm15,%xmm3,%xmm9
vpminsd	%xmm10,%xmm9,%xmm2
vmovdqu	%xmm2,(%rbx)
je	.L627
.L405:
vmovd	(%r9,%rdi,4),%xmm7
vmovd	(%r11,%rdi,4),%xmm1
vpxor	%xmm5,%xmm5,%xmm5
vpaddd	%xmm7,%xmm11,%xmm10
leaq	1(%rdi),%rsi
vpminsd	%xmm1,%xmm10,%xmm0
vpsubd	%xmm13,%xmm0,%xmm8
vpmaxsd	%xmm5,%xmm8,%xmm12
vpaddd	%xmm14,%xmm12,%xmm4
vpminsd	%xmm1,%xmm4,%xmm6
vmovd	%xmm6,(%r9,%rdi,4)
cmpq	%rsi,%rcx
jle	.L627
vpinsrd	$0,(%r9,%rsi,4),%xmm5,%xmm15
vpinsrd	$0,(%r11,%rsi,4),%xmm5,%xmm3
addq	$2,%rdi
vpaddd	%xmm15,%xmm11,%xmm9
vpminsd	%xmm3,%xmm9,%xmm2
vpsubd	%xmm13,%xmm2,%xmm1
vpmaxsd	%xmm5,%xmm1,%xmm7
vpaddd	%xmm14,%xmm7,%xmm10
vpminsd	%xmm3,%xmm10,%xmm0
vmovd	%xmm0,(%r9,%rsi,4)
cmpq	%rdi,%rcx
jle	.L627
vpinsrd	$0,(%r9,%rdi,4),%xmm5,%xmm12
vpinsrd	$0,(%r11,%rdi,4),%xmm5,%xmm8
vpaddd	%xmm12,%xmm11,%xmm11
vpminsd	%xmm8,%xmm11,%xmm4
vpsubd	%xmm13,%xmm4,%xmm13
vpmaxsd	%xmm5,%xmm13,%xmm6
vpaddd	%xmm14,%xmm6,%xmm14
vpminsd	%xmm8,%xmm14,%xmm3
vmovd	%xmm3,(%r9,%rdi,4)
jmp	.L627
.p2align 4,,10
.p2align 3
.L382:
vmovd	12(%rax),%xmm2
vmovd	8(%rax),%xmm3
vmovd	(%rax),%xmm5
movslq	%edi,%rdi
vmovd	4(%rax),%xmm4
vmovdqa	%xmm2,%xmm15
vmovdqa	%xmm3,%xmm14
vmovdqa	%xmm4,%xmm6
vmovdqa	%xmm5,%xmm0
cmpq	%rcx,%rdi
jge	.L627
movq	%rcx,%rsi
movq	%rdi,%rbx
subq	%rdi,%rsi
leaq	-1(%rsi),%r11
cmpq	$6,%r11
jbe	.L428
movq	%rsi,%r12
vpbroadcastd	%xmm5,%ymm13
vpbroadcastd	%xmm4,%ymm12
xorl	%eax,%eax
shrq	$3,%r12
vpbroadcastd	%xmm3,%ymm11
vpxor	%xmm7,%xmm7,%xmm7
salq	$2,%r12
leaq	0(,%rdi,4),%rdx
leaq	b(%rip),%r11
leaq	-4(%r12),%r8
leaq	(%r11,%rdx),%r10
vpbroadcastd	%xmm2,%ymm8
shrq	$2,%r8
leaq	a(%rip),%r9
addq	$1,%r8
addq	%r9,%rdx
andl	$3,%r8d
je	.L408
cmpq	$1,%r8
je	.L553
cmpq	$2,%r8
jne	.L637
.L554:
vpaddd	(%rdx,%rax,8),%ymm13,%ymm9
vpminsd	(%r10,%rax,8),%ymm9,%ymm1
vpsubd	%ymm12,%ymm1,%ymm10
vpmaxsd	%ymm7,%ymm10,%ymm9
vpaddd	%ymm11,%ymm9,%ymm1
vpminsd	(%r10,%rax,8),%ymm1,%ymm10
vpsubd	%ymm8,%ymm10,%ymm9
vpmaxsd	%ymm7,%ymm9,%ymm1
vmovdqu	%ymm1,(%rdx,%rax,8)
addq	$4,%rax
.L553:
vpaddd	(%rdx,%rax,8),%ymm13,%ymm10
vpminsd	(%r10,%rax,8),%ymm10,%ymm9
vpsubd	%ymm12,%ymm9,%ymm1
vpmaxsd	%ymm7,%ymm1,%ymm10
vpaddd	%ymm11,%ymm10,%ymm9
vpminsd	(%r10,%rax,8),%ymm9,%ymm1
vpsubd	%ymm8,%ymm1,%ymm10
vpmaxsd	%ymm7,%ymm10,%ymm9
vmovdqu	%ymm9,(%rdx,%rax,8)
addq	$4,%rax
cmpq	%rax,%r12
je	.L607
.L408:
vpaddd	(%rdx,%rax,8),%ymm13,%ymm1
vpminsd	(%r10,%rax,8),%ymm1,%ymm10
leaq	4(%rax),%r8
vpsubd	%ymm12,%ymm10,%ymm9
vpmaxsd	%ymm7,%ymm9,%ymm1
vpaddd	%ymm11,%ymm1,%ymm10
vpminsd	(%r10,%rax,8),%ymm10,%ymm9
vpsubd	%ymm8,%ymm9,%ymm1
vpmaxsd	%ymm7,%ymm1,%ymm10
vmovdqu	%ymm10,(%rdx,%rax,8)
vpaddd	(%rdx,%r8,8),%ymm13,%ymm9
vpminsd	(%r10,%r8,8),%ymm9,%ymm1
vpsubd	%ymm12,%ymm1,%ymm10
vpmaxsd	%ymm7,%ymm10,%ymm9
vpaddd	%ymm11,%ymm9,%ymm1
vpminsd	(%r10,%r8,8),%ymm1,%ymm10
vpsubd	%ymm8,%ymm10,%ymm9
vpmaxsd	%ymm7,%ymm9,%ymm1
vmovdqu	%ymm1,(%rdx,%r8,8)
leaq	8(%rax),%r8
vpaddd	(%rdx,%r8,8),%ymm13,%ymm10
vpminsd	(%r10,%r8,8),%ymm10,%ymm9
vpsubd	%ymm12,%ymm9,%ymm1
vpmaxsd	%ymm7,%ymm1,%ymm10
vpaddd	%ymm11,%ymm10,%ymm9
vpminsd	(%r10,%r8,8),%ymm9,%ymm1
vpsubd	%ymm8,%ymm1,%ymm10
vpmaxsd	%ymm7,%ymm10,%ymm9
vmovdqu	%ymm9,(%rdx,%r8,8)
leaq	12(%rax),%r8
addq	$16,%rax
vpaddd	(%rdx,%r8,8),%ymm13,%ymm1
vpminsd	(%r10,%r8,8),%ymm1,%ymm10
vpsubd	%ymm12,%ymm10,%ymm9
vpmaxsd	%ymm7,%ymm9,%ymm1
vpaddd	%ymm11,%ymm1,%ymm10
vpminsd	(%r10,%r8,8),%ymm10,%ymm9
vpsubd	%ymm8,%ymm9,%ymm1
vpmaxsd	%ymm7,%ymm1,%ymm10
vmovdqu	%ymm10,(%rdx,%r8,8)
cmpq	%rax,%r12
jne	.L408
.L607:
movq	%rsi,%r10
andq	$-8,%r10
addq	%r10,%rdi
testb	$7,%sil
je	.L633
vzeroupper
.L407:
subq	%r10,%rsi
leaq	-1(%rsi),%rdx
cmpq	$2,%rdx
jbe	.L411
addq	%rbx,%r10
vpshufd	$0,%xmm0,%xmm0
vpshufd	$0,%xmm6,%xmm6
movq	%rsi,%r12
leaq	(%r9,%r10,4),%rbx
vmovdqu	(%r11,%r10,4),%xmm13
vpxor	%xmm7,%xmm7,%xmm7
vpshufd	$0,%xmm14,%xmm14
vpaddd	(%rbx),%xmm0,%xmm12
vpshufd	$0,%xmm15,%xmm15
andq	$-4,%r12
addq	%r12,%rdi
andl	$3,%esi
vpminsd	%xmm13,%xmm12,%xmm11
vpsubd	%xmm6,%xmm11,%xmm8
vpmaxsd	%xmm7,%xmm8,%xmm9
vpaddd	%xmm14,%xmm9,%xmm1
vpminsd	%xmm13,%xmm1,%xmm10
vpsubd	%xmm15,%xmm10,%xmm13
vpmaxsd	%xmm7,%xmm13,%xmm0
vmovdqu	%xmm0,(%rbx)
je	.L627
.L411:
vmovd	(%r9,%rdi,4),%xmm11
vmovd	(%r11,%rdi,4),%xmm12
vpxor	%xmm9,%xmm9,%xmm9
vpaddd	%xmm11,%xmm5,%xmm6
leaq	1(%rdi),%rsi
vmovdqa	%xmm9,%xmm13
vpminsd	%xmm12,%xmm6,%xmm8
vpsubd	%xmm4,%xmm8,%xmm7
vpmaxsd	%xmm9,%xmm7,%xmm14
vpaddd	%xmm3,%xmm14,%xmm1
vpminsd	%xmm12,%xmm1,%xmm10
vpsubd	%xmm2,%xmm10,%xmm15
vpmaxsd	%xmm9,%xmm15,%xmm0
vmovd	%xmm0,(%r9,%rdi,4)
cmpq	%rsi,%rcx
jle	.L627
vpinsrd	$0,(%r9,%rsi,4),%xmm9,%xmm11
vpinsrd	$0,(%r11,%rsi,4),%xmm9,%xmm12
addq	$2,%rdi
vpaddd	%xmm11,%xmm5,%xmm6
vpminsd	%xmm12,%xmm6,%xmm8
vpsubd	%xmm4,%xmm8,%xmm7
vpmaxsd	%xmm9,%xmm7,%xmm14
vpaddd	%xmm3,%xmm14,%xmm1
vpminsd	%xmm12,%xmm1,%xmm10
vpsubd	%xmm2,%xmm10,%xmm15
vpmaxsd	%xmm9,%xmm15,%xmm0
vmovd	%xmm0,(%r9,%rsi,4)
cmpq	%rdi,%rcx
jle	.L627
vpinsrd	$0,(%r9,%rdi,4),%xmm9,%xmm11
vpinsrd	$0,(%r11,%rdi,4),%xmm9,%xmm12
vpaddd	%xmm11,%xmm5,%xmm5
vpminsd	%xmm12,%xmm5,%xmm6
vpsubd	%xmm4,%xmm6,%xmm8
vpmaxsd	%xmm9,%xmm8,%xmm4
vpaddd	%xmm3,%xmm4,%xmm3
vpminsd	%xmm12,%xmm3,%xmm9
vpsubd	%xmm2,%xmm9,%xmm2
vpmaxsd	%xmm13,%xmm2,%xmm7
vmovd	%xmm7,(%r9,%rdi,4)
jmp	.L627
.p2align 4,,10
.p2align 3
.L384:
vmovd	4(%rax),%xmm15
vmovd	(%rax),%xmm10
movslq	%edi,%rdi
vmovdqa	%xmm15,%xmm9
vmovdqa	%xmm10,%xmm0
cmpq	%rcx,%rdi
jge	.L627
movq	%rcx,%r8
movq	%rdi,%rbx
subq	%rdi,%r8
leaq	-1(%r8),%r11
cmpq	$6,%r11
jbe	.L426
movq	%r8,%r12
leaq	b(%rip),%r11
vpxor	%xmm1,%xmm1,%xmm1
xorl	%eax,%eax
shrq	$3,%r12
vpbroadcastd	%xmm10,%ymm8
vpbroadcastd	%xmm15,%ymm2
salq	$2,%r12
leaq	0(,%rdi,4),%rdx
leaq	a(%rip),%r10
leaq	-4(%r12),%rsi
leaq	(%r11,%rdx),%r9
addq	%r10,%rdx
shrq	$2,%rsi
addq	$1,%rsi
andl	$7,%esi
je	.L396
cmpq	$1,%rsi
je	.L545
cmpq	$2,%rsi
je	.L546
cmpq	$3,%rsi
je	.L547
cmpq	$4,%rsi
je	.L548
cmpq	$5,%rsi
je	.L549
cmpq	$6,%rsi
je	.L550
vpaddd	(%rdx),%ymm8,%ymm7
vpminsd	(%r9),%ymm7,%ymm12
movl	$4,%eax
vpsubd	%ymm2,%ymm12,%ymm11
vpmaxsd	%ymm1,%ymm11,%ymm4
vmovdqu	%ymm4,(%rdx)
.L550:
vpaddd	(%rdx,%rax,8),%ymm8,%ymm13
vpminsd	(%r9,%rax,8),%ymm13,%ymm5
vpsubd	%ymm2,%ymm5,%ymm6
vpmaxsd	%ymm1,%ymm6,%ymm14
vmovdqu	%ymm14,(%rdx,%rax,8)
addq	$4,%rax
.L549:
vpaddd	(%rdx,%rax,8),%ymm8,%ymm3
vpminsd	(%r9,%rax,8),%ymm3,%ymm7
vpsubd	%ymm2,%ymm7,%ymm12
vpmaxsd	%ymm1,%ymm12,%ymm11
vmovdqu	%ymm11,(%rdx,%rax,8)
addq	$4,%rax
.L548:
vpaddd	(%rdx,%rax,8),%ymm8,%ymm4
vpminsd	(%r9,%rax,8),%ymm4,%ymm13
vpsubd	%ymm2,%ymm13,%ymm5
vpmaxsd	%ymm1,%ymm5,%ymm6
vmovdqu	%ymm6,(%rdx,%rax,8)
addq	$4,%rax
.L547:
vpaddd	(%rdx,%rax,8),%ymm8,%ymm14
vpminsd	(%r9,%rax,8),%ymm14,%ymm3
vpsubd	%ymm2,%ymm3,%ymm7
vpmaxsd	%ymm1,%ymm7,%ymm12
vmovdqu	%ymm12,(%rdx,%rax,8)
addq	$4,%rax
.L546:
vpaddd	(%rdx,%rax,8),%ymm8,%ymm11
vpminsd	(%r9,%rax,8),%ymm11,%ymm4
vpsubd	%ymm2,%ymm4,%ymm13
vpmaxsd	%ymm1,%ymm13,%ymm5
vmovdqu	%ymm5,(%rdx,%rax,8)
addq	$4,%rax
.L545:
vpaddd	(%rdx,%rax,8),%ymm8,%ymm6
vpminsd	(%r9,%rax,8),%ymm6,%ymm14
vpsubd	%ymm2,%ymm14,%ymm3
vpmaxsd	%ymm1,%ymm3,%ymm7
vmovdqu	%ymm7,(%rdx,%rax,8)
addq	$4,%rax
cmpq	%r12,%rax
je	.L605
.L396:
vpaddd	(%rdx,%rax,8),%ymm8,%ymm12
vpminsd	(%r9,%rax,8),%ymm12,%ymm11
vpaddd	32(%rdx,%rax,8),%ymm8,%ymm5
vpaddd	64(%rdx,%rax,8),%ymm8,%ymm7
vpsubd	%ymm2,%ymm11,%ymm4
vpmaxsd	%ymm1,%ymm4,%ymm13
vmovdqu	%ymm13,(%rdx,%rax,8)
vpminsd	32(%r9,%rax,8),%ymm5,%ymm6
vpaddd	96(%rdx,%rax,8),%ymm8,%ymm13
vpsubd	%ymm2,%ymm6,%ymm14
vpmaxsd	%ymm1,%ymm14,%ymm3
vmovdqu	%ymm3,32(%rdx,%rax,8)
vpminsd	64(%r9,%rax,8),%ymm7,%ymm12
vpaddd	128(%rdx,%rax,8),%ymm8,%ymm3
vpsubd	%ymm2,%ymm12,%ymm11
vpmaxsd	%ymm1,%ymm11,%ymm4
vmovdqu	%ymm4,64(%rdx,%rax,8)
vpminsd	96(%r9,%rax,8),%ymm13,%ymm5
vpaddd	160(%rdx,%rax,8),%ymm8,%ymm4
vpsubd	%ymm2,%ymm5,%ymm6
vpmaxsd	%ymm1,%ymm6,%ymm14
vmovdqu	%ymm14,96(%rdx,%rax,8)
vpminsd	128(%r9,%rax,8),%ymm3,%ymm7
vpaddd	192(%rdx,%rax,8),%ymm8,%ymm14
vpsubd	%ymm2,%ymm7,%ymm12
vpmaxsd	%ymm1,%ymm12,%ymm11
vmovdqu	%ymm11,128(%rdx,%rax,8)
vpminsd	160(%r9,%rax,8),%ymm4,%ymm13
vpaddd	224(%rdx,%rax,8),%ymm8,%ymm11
vpsubd	%ymm2,%ymm13,%ymm5
vpmaxsd	%ymm1,%ymm5,%ymm6
vmovdqu	%ymm6,160(%rdx,%rax,8)
vpminsd	192(%r9,%rax,8),%ymm14,%ymm3
vpsubd	%ymm2,%ymm3,%ymm7
vpmaxsd	%ymm1,%ymm7,%ymm12
vmovdqu	%ymm12,192(%rdx,%rax,8)
vpminsd	224(%r9,%rax,8),%ymm11,%ymm4
vpsubd	%ymm2,%ymm4,%ymm13
vpmaxsd	%ymm1,%ymm13,%ymm5
vmovdqu	%ymm5,224(%rdx,%rax,8)
addq	$32,%rax
cmpq	%r12,%rax
jne	.L396
.L605:
movq	%r8,%r9
andq	$-8,%r9
addq	%r9,%rdi
testb	$7,%r8b
je	.L633
vzeroupper
.L395:
subq	%r9,%r8
leaq	-1(%r8),%rdx
cmpq	$2,%rdx
jbe	.L399
addq	%rbx,%r9
vpshufd	$0,%xmm0,%xmm0
vpshufd	$0,%xmm9,%xmm9
movq	%r8,%r12
leaq	(%r10,%r9,4),%rbx
andq	$-4,%r12
vpxor	%xmm1,%xmm1,%xmm1
vpaddd	(%rbx),%xmm0,%xmm8
vpminsd	(%r11,%r9,4),%xmm8,%xmm2
addq	%r12,%rdi
andl	$3,%r8d
vpsubd	%xmm9,%xmm2,%xmm6
vpmaxsd	%xmm1,%xmm6,%xmm14
vmovdqu	%xmm14,(%rbx)
je	.L627
.L399:
vmovd	(%r10,%rdi,4),%xmm3
vmovd	(%r11,%rdi,4),%xmm12
vpxor	%xmm13,%xmm13,%xmm13
vpaddd	%xmm3,%xmm10,%xmm7
leaq	1(%rdi),%r8
vpminsd	%xmm12,%xmm7,%xmm11
vpsubd	%xmm15,%xmm11,%xmm4
vpmaxsd	%xmm13,%xmm4,%xmm5
vmovd	%xmm5,(%r10,%rdi,4)
cmpq	%r8,%rcx
jle	.L627
vpinsrd	$0,(%r10,%r8,4),%xmm13,%xmm0
vpinsrd	$0,(%r11,%r8,4),%xmm13,%xmm2
vpxor	%xmm1,%xmm1,%xmm1
addq	$2,%rdi
vpaddd	%xmm0,%xmm10,%xmm8
vpminsd	%xmm2,%xmm8,%xmm9
vpsubd	%xmm15,%xmm9,%xmm6
vpmaxsd	%xmm1,%xmm6,%xmm14
vmovd	%xmm14,(%r10,%r8,4)
cmpq	%rdi,%rcx
jle	.L627
vpinsrd	$0,(%r10,%rdi,4),%xmm1,%xmm3
vpinsrd	$0,(%r11,%rdi,4),%xmm1,%xmm7
vpxor	%xmm11,%xmm11,%xmm11
vpaddd	%xmm3,%xmm10,%xmm10
vpminsd	%xmm7,%xmm10,%xmm12
vpsubd	%xmm15,%xmm12,%xmm15
vpmaxsd	%xmm11,%xmm15,%xmm4
vmovd	%xmm4,(%r10,%rdi,4)
jmp	.L627
.p2align 4,,10
.p2align 3
.L637:
vpaddd	(%rdx),%ymm13,%ymm1
vpminsd	(%r10),%ymm1,%ymm10
movl	$4,%eax
vpsubd	%ymm12,%ymm10,%ymm9
vpmaxsd	%ymm7,%ymm9,%ymm1
vpaddd	%ymm11,%ymm1,%ymm10
vpminsd	(%r10),%ymm10,%ymm9
vpsubd	%ymm8,%ymm9,%ymm1
vpmaxsd	%ymm7,%ymm1,%ymm10
vmovdqu	%ymm10,(%rdx)
jmp	.L554
.p2align 4,,10
.p2align 3
.L636:
vpaddd	(%rdx),%ymm10,%ymm4
vpminsd	(%r10),%ymm4,%ymm3
movl	$4,%eax
vpsubd	%ymm5,%ymm3,%ymm9
vpmaxsd	%ymm8,%ymm9,%ymm2
vpaddd	%ymm6,%ymm2,%ymm1
vpminsd	(%r10),%ymm1,%ymm7
vmovdqu	%ymm7,(%rdx)
jmp	.L552
.p2align 4,,10
.p2align 3
.L635:
vpaddd	(%rax),%ymm8,%ymm1
addq	$32,%rdx
addq	$32,%rax
addq	$4,%r9
vpminsd	-32(%rdx),%ymm1,%ymm1
vpsubd	%ymm7,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm6,%ymm1,%ymm1
vpminsd	-32(%rdx),%ymm1,%ymm1
vpsubd	%ymm5,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm4,%ymm1,%ymm1
vpminsd	-32(%rdx),%ymm1,%ymm1
vpsubd	%ymm3,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vmovdqu	%ymm1,-32(%rax)
jmp	.L558
.p2align 4,,10
.p2align 3
.L634:
vpaddd	(%rdx),%ymm9,%ymm1
vpminsd	(%r10),%ymm1,%ymm1
movl	$4,%eax
vpsubd	%ymm8,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm7,%ymm1,%ymm1
vpminsd	(%r10),%ymm1,%ymm1
vpsubd	%ymm6,%ymm1,%ymm1
vpmaxsd	%ymm2,%ymm1,%ymm1
vpaddd	%ymm5,%ymm1,%ymm1
vpminsd	(%r10),%ymm1,%ymm1
vmovdqu	%ymm1,(%rdx)
jmp	.L556
.L430:
xorl	%eax,%eax
leaq	b(%rip),%r11
leaq	a(%rip),%r10
jmp	.L419
.L429:
xorl	%r10d,%r10d
leaq	b(%rip),%r11
leaq	a(%rip),%r9
jmp	.L413
.L427:
xorl	%r10d,%r10d
leaq	b(%rip),%r11
leaq	a(%rip),%r9
jmp	.L401
.L426:
xorl	%r9d,%r9d
leaq	b(%rip),%r11
leaq	a(%rip),%r10
jmp	.L395
.L428:
xorl	%r10d,%r10d
leaq	b(%rip),%r11
leaq	a(%rip),%r9
jmp	.L407
.L425:
xorl	%r9d,%r9d
leaq	b(%rip),%r11
leaq	a(%rip),%r10
jmp	.L388
.L378:
leaq	.LC3(%rip),%rcx
movl	$63,%edx
leaq	.LC1(%rip),%rsi
leaq	.LC2(%rip),%rdi
call	__assert_fail@PLT
.cfi_endproc
.LFE11141:
.size	_Z2upILb0EEviiSt6vectorIiSaIiEE,.-_Z2upILb0EEviiSt6vectorIiSaIiEE
.section	.text._ZNSt6vectorIiSaIiEED2Ev,"axG",@progbits,_ZNSt6vectorIiSaIiEED5Ev,comdat
.align 2
.p2align 4
.weak	_ZNSt6vectorIiSaIiEED2Ev
.type	_ZNSt6vectorIiSaIiEED2Ev,@function
_ZNSt6vectorIiSaIiEED2Ev:
.LFB11137:
.cfi_startproc
endbr64
movq	(%rdi),%rax
testq	%rax,%rax
je	.L640
movq	16(%rdi),%rsi
movq	%rax,%rdi
subq	%rax,%rsi
jmp	_ZdlPvm@PLT
.p2align 4,,10
.p2align 3
.L640:
ret
.cfi_endproc
.LFE11137:
.size	_ZNSt6vectorIiSaIiEED2Ev,.-_ZNSt6vectorIiSaIiEED2Ev
.weak	_ZNSt6vectorIiSaIiEED1Ev
.set	_ZNSt6vectorIiSaIiEED1Ev,_ZNSt6vectorIiSaIiEED2Ev
.section	.rodata._ZNSt6vectorIiSaIiEE17_M_realloc_insertIJiEEEvN9__gnu_cxx17__normal_iteratorIPiS1_EEDpOT_.str1.1,"aMS",@progbits,1
.LC4:
.string	"vector::_M_realloc_insert"
.section	.text._ZNSt6vectorIiSaIiEE17_M_realloc_insertIJiEEEvN9__gnu_cxx17__normal_iteratorIPiS1_EEDpOT_,"axG",@progbits,_ZNSt6vectorIiSaIiEE17_M_realloc_insertIJiEEEvN9__gnu_cxx17__normal_iteratorIPiS1_EEDpOT_,comdat
.align 2
.p2align 4
.weak	_ZNSt6vectorIiSaIiEE17_M_realloc_insertIJiEEEvN9__gnu_cxx17__normal_iteratorIPiS1_EEDpOT_
.type	_ZNSt6vectorIiSaIiEE17_M_realloc_insertIJiEEEvN9__gnu_cxx17__normal_iteratorIPiS1_EEDpOT_,@function
_ZNSt6vectorIiSaIiEE17_M_realloc_insertIJiEEEvN9__gnu_cxx17__normal_iteratorIPiS1_EEDpOT_:
.LFB11865:
.cfi_startproc
endbr64
pushq	%r15
.cfi_def_cfa_offset 16
.cfi_offset 15,-16
movq	%rdx,%r15
movabsq	$2305843009213693951,%rdx
pushq	%r14
.cfi_def_cfa_offset 24
.cfi_offset 14,-24
pushq	%r13
.cfi_def_cfa_offset 32
.cfi_offset 13,-32
pushq	%r12
.cfi_def_cfa_offset 40
.cfi_offset 12,-40
pushq	%rbp
.cfi_def_cfa_offset 48
.cfi_offset 6,-48
pushq	%rbx
.cfi_def_cfa_offset 56
.cfi_offset 3,-56
subq	$24,%rsp
.cfi_def_cfa_offset 80
movq	8(%rdi),%r12
movq	(%rdi),%r13
movq	%r12,%rax
subq	%r13,%rax
sarq	$2,%rax
cmpq	%rdx,%rax
je	.L665
movq	%rsi,%rdx
movq	%rdi,%rbp
movq	%rsi,%r14
subq	%r13,%rdx
cmpq	%r12,%r13
je	.L666
leaq	(%rax,%rax),%rcx
cmpq	%rax,%rcx
jb	.L657
testq	%rcx,%rcx
jne	.L667
xorl	%ebx,%ebx
xorl	%ecx,%ecx
.L648:
movl	(%r15),%eax
leaq	4(%rcx,%rdx),%r8
subq	%r14,%r12
leaq	(%r8,%r12),%r15
movl	%eax,(%rcx,%rdx)
testq	%rdx,%rdx
jg	.L668
testq	%r12,%r12
jle	.L652
movq	%r12,%rdx
movq	%r14,%rsi
movq	%r8,%rdi
movq	%rcx,(%rsp)
call	memcpy@PLT
movq	(%rsp),%rcx
.L652:
testq	%r13,%r13
jne	.L651
.L654:
movq	%rcx,0(%rbp)
movq	%r15,8(%rbp)
movq	%rbx,16(%rbp)
addq	$24,%rsp
.cfi_remember_state
.cfi_def_cfa_offset 56
popq	%rbx
.cfi_def_cfa_offset 48
popq	%rbp
.cfi_def_cfa_offset 40
popq	%r12
.cfi_def_cfa_offset 32
popq	%r13
.cfi_def_cfa_offset 24
popq	%r14
.cfi_def_cfa_offset 16
popq	%r15
.cfi_def_cfa_offset 8
ret
.p2align 4,,10
.p2align 3
.L657:
.cfi_restore_state
movabsq	$9223372036854775804,%rbx
.L647:
movq	%rbx,%rdi
movq	%rdx,(%rsp)
call	_Znwm@PLT
movq	(%rsp),%rdx
movq	%rax,%rcx
addq	%rax,%rbx
jmp	.L648
.p2align 4,,10
.p2align 3
.L668:
movq	%rcx,%rdi
movq	%r13,%rsi
movq	%r8,(%rsp)
call	memmove@PLT
movq	%rax,%rcx
testq	%r12,%r12
jg	.L669
.L651:
movq	16(%rbp),%rsi
movq	%r13,%rdi
movq	%rcx,(%rsp)
subq	%r13,%rsi
call	_ZdlPvm@PLT
movq	(%rsp),%rcx
jmp	.L654
.p2align 4,,10
.p2align 3
.L666:
addq	$1,%rax
jc	.L657
movabsq	$2305843009213693951,%rcx
cmpq	%rcx,%rax
movq	%rcx,%rbx
cmovbe	%rax,%rbx
salq	$2,%rbx
jmp	.L647
.p2align 4,,10
.p2align 3
.L669:
movq	(%rsp),%rdi
movq	%r12,%rdx
movq	%r14,%rsi
movq	%rax,8(%rsp)
call	memcpy@PLT
movq	8(%rsp),%rcx
jmp	.L651
.L667:
movabsq	$2305843009213693951,%rax
cmpq	%rax,%rcx
cmova	%rax,%rcx
leaq	0(,%rcx,4),%rbx
jmp	.L647
.L665:
leaq	.LC4(%rip),%rdi
call	_ZSt20__throw_length_errorPKc@PLT
.cfi_endproc
.LFE11865:
.size	_ZNSt6vectorIiSaIiEE17_M_realloc_insertIJiEEEvN9__gnu_cxx17__normal_iteratorIPiS1_EEDpOT_,.-_ZNSt6vectorIiSaIiEE17_M_realloc_insertIJiEEEvN9__gnu_cxx17__normal_iteratorIPiS1_EEDpOT_
.section	.rodata.str1.8,"aMS",@progbits,1
.align 8
.LC7:
.string	"cannot create std::vector larger than max_size()"
.section	.text.unlikely,"ax",@progbits
.LCOLDB10:
.text
.LHOTB10:
.p2align 4
.globl	_Z18distribute_candiesSt6vectorIiSaIiEES1_S1_S1_
.type	_Z18distribute_candiesSt6vectorIiSaIiEES1_S1_S1_,@function
_Z18distribute_candiesSt6vectorIiSaIiEES1_S1_S1_:
.LFB10422:
.cfi_startproc
.cfi_personality 0x9b,DW.ref.__gxx_personality_v0
.cfi_lsda 0x1b,.LLSDA10422
endbr64
pushq	%rbp
.cfi_def_cfa_offset 16
.cfi_offset 6,-16
movq	%rsp,%rbp
.cfi_def_cfa_register 6
pushq	%r15
pushq	%r14
pushq	%r13
pushq	%r12
pushq	%rbx
.cfi_offset 15,-24
.cfi_offset 14,-32
.cfi_offset 13,-40
.cfi_offset 12,-48
.cfi_offset 3,-56
movq	%rdx,%rbx
andq	$-32,%rsp
subq	$160,%rsp
movq	8(%rcx),%r11
movq	8(%rbx),%r9
movq	%rdx,72(%rsp)
movq	(%rsi),%rdx
movq	8(%rsi),%rsi
subq	(%rbx),%r9
movq	%r8,24(%rsp)
movq	%rdi,(%rsp)
movq	(%rcx),%r8
sarq	$2,%r9
movq	%rcx,64(%rsp)
subq	%rdx,%rsi
sarq	$2,%rsi
movq	%fs:40,%rax
movq	%rax,152(%rsp)
xorl	%eax,%eax
movl	%esi,16(%rsp)
cmpq	%r8,%r11
je	.L677
leaq	-4(%r11),%rcx
movq	%r8,%rax
subq	%r8,%rcx
movq	%rcx,%r10
shrq	$2,%r10
leaq	1(%r10),%r13
cmpq	$24,%rcx
jbe	.L746
movq	%r13,%r14
movl	$1,%edi
shrq	$3,%r14
vmovd	%edi,%xmm0
salq	$5,%r14
vpbroadcastd	%xmm0,%ymm2
leaq	(%r14,%r8),%r15
subq	$32,%r14
shrq	$5,%r14
addq	$1,%r14
andl	$7,%r14d
je	.L675
cmpq	$1,%r14
je	.L846
cmpq	$2,%r14
je	.L847
cmpq	$3,%r14
je	.L848
cmpq	$4,%r14
je	.L849
cmpq	$5,%r14
je	.L850
cmpq	$6,%r14
jne	.L919
.L851:
vpaddd	(%rax),%ymm2,%ymm3
addq	$32,%rax
vmovdqu	%ymm3,-32(%rax)
.L850:
vpaddd	(%rax),%ymm2,%ymm4
addq	$32,%rax
vmovdqu	%ymm4,-32(%rax)
.L849:
vpaddd	(%rax),%ymm2,%ymm5
addq	$32,%rax
vmovdqu	%ymm5,-32(%rax)
.L848:
vpaddd	(%rax),%ymm2,%ymm6
addq	$32,%rax
vmovdqu	%ymm6,-32(%rax)
.L847:
vpaddd	(%rax),%ymm2,%ymm7
addq	$32,%rax
vmovdqu	%ymm7,-32(%rax)
.L846:
vpaddd	(%rax),%ymm2,%ymm8
addq	$32,%rax
vmovdqu	%ymm8,-32(%rax)
cmpq	%r15,%rax
je	.L905
.L675:
vpaddd	(%rax),%ymm2,%ymm9
vpaddd	32(%rax),%ymm2,%ymm10
addq	$256,%rax
vpaddd	-192(%rax),%ymm2,%ymm11
vpaddd	-160(%rax),%ymm2,%ymm12
vpaddd	-128(%rax),%ymm2,%ymm13
vpaddd	-96(%rax),%ymm2,%ymm14
vmovdqu	%ymm9,-256(%rax)
vpaddd	-64(%rax),%ymm2,%ymm15
vpaddd	-32(%rax),%ymm2,%ymm0
vmovdqu	%ymm10,-224(%rax)
vmovdqu	%ymm11,-192(%rax)
vmovdqu	%ymm12,-160(%rax)
vmovdqu	%ymm13,-128(%rax)
vmovdqu	%ymm14,-96(%rax)
vmovdqu	%ymm15,-64(%rax)
vmovdqu	%ymm0,-32(%rax)
cmpq	%r15,%rax
jne	.L675
.L905:
movq	%r13,%r12
andq	$-8,%r12
andl	$7,%r13d
leaq	(%r8,%r12,4),%r14
je	.L677
.L674:
subq	%r12,%r10
leaq	1(%r10),%r13
cmpq	$2,%r10
jbe	.L679
movl	$1,%ebx
leaq	(%r8,%r12,4),%r8
movq	%r13,%rcx
vmovd	%ebx,%xmm2
andq	$-4,%rcx
andl	$3,%r13d
vpshufd	$0,%xmm2,%xmm1
vpaddd	(%r8),%xmm1,%xmm3
leaq	(%r14,%rcx,4),%r14
vmovdqu	%xmm3,(%r8)
je	.L677
.L679:
leaq	4(%r14),%r10
addl	$1,(%r14)
cmpq	%r10,%r11
je	.L677
leaq	8(%r14),%r12
addl	$1,4(%r14)
cmpq	%r12,%r11
je	.L677
addl	$1,8(%r14)
.L677:
movslq	%esi,%r11
movq	%r11,8(%rsp)
testl	%esi,%esi
jle	.L920
movslq	%esi,%r13
leaq	-1(%r13),%r8
movq	%r13,%rdi
cmpq	$2,%r8
jbe	.L747
leaq	b(%rip),%rcx
leaq	4(%rdx),%rax
xorl	%r11d,%r11d
movq	%rcx,%rbx
subq	%rax,%rbx
cmpq	$24,%rbx
ja	.L921
.L852:
movslq	%esi,%r14
andl	$7,%r14d
je	.L689
cmpq	$1,%r14
je	.L859
cmpq	$2,%r14
je	.L860
cmpq	$3,%r14
je	.L861
cmpq	$4,%r14
je	.L862
cmpq	$5,%r14
je	.L863
cmpq	$6,%r14
jne	.L922
.L864:
movl	(%rdx,%r11,4),%r15d
movl	%r15d,(%rcx,%r11,4)
addq	$1,%r11
.L863:
movl	(%rdx,%r11,4),%r13d
movl	%r13d,(%rcx,%r11,4)
addq	$1,%r11
.L862:
movl	(%rdx,%r11,4),%r8d
movl	%r8d,(%rcx,%r11,4)
addq	$1,%r11
.L861:
movl	(%rdx,%r11,4),%eax
movl	%eax,(%rcx,%r11,4)
addq	$1,%r11
.L860:
movl	(%rdx,%r11,4),%edi
movl	%edi,(%rcx,%r11,4)
addq	$1,%r11
.L859:
movl	(%rdx,%r11,4),%r12d
movl	%r12d,(%rcx,%r11,4)
addq	$1,%r11
cmpq	%r11,8(%rsp)
je	.L690
.L689:
movl	(%rdx,%r11,4),%ebx
leaq	1(%r11),%r10
leaq	2(%r11),%r15
leaq	3(%r11),%rax
leaq	4(%r11),%rdi
movl	%ebx,(%rcx,%r11,4)
movl	(%rdx,%r10,4),%r14d
leaq	5(%r11),%rbx
movl	%r14d,(%rcx,%r10,4)
movl	(%rdx,%r15,4),%r13d
leaq	6(%r11),%r14
movl	%r13d,(%rcx,%r15,4)
movl	(%rdx,%rax,4),%r8d
leaq	7(%r11),%r13
addq	$8,%r11
movl	%r8d,(%rcx,%rax,4)
movl	(%rdx,%rdi,4),%r12d
movl	%r12d,(%rcx,%rdi,4)
movl	(%rdx,%rbx,4),%r10d
movl	%r10d,(%rcx,%rbx,4)
movl	(%rdx,%r14,4),%r15d
movl	%r15d,(%rcx,%r14,4)
movl	(%rdx,%r13,4),%eax
movl	%eax,(%rcx,%r13,4)
cmpq	%r11,8(%rsp)
jne	.L689
.p2align 4,,10
.p2align 3
.L690:
movl	$1024,%r15d
movslq	%r9d,%rcx
vpxor	%xmm5,%xmm5,%xmm5
movq	$0,112(%rsp)
cmpl	%r15d,%esi
movq	%rcx,56(%rsp)
cmovle	%esi,%r15d
vmovdqa	%xmm5,96(%rsp)
testl	%r9d,%r9d
jle	.L923
movl	$1024,20(%rsp)
xorl	%r13d,%r13d
movl	$1024,32(%rsp)
vzeroupper
.L691:
movb	$0,39(%rsp)
xorl	%r12d,%r12d
xorl	%ebx,%ebx
movb	$0,38(%rsp)
jmp	.L719
.p2align 4,,10
.p2align 3
.L695:
cmpq	%r12,%r14
je	.L703
cmpb	$0,39(%rsp)
leaq	_Z2upILb1EEviiSt6vectorIiSaIiEE(%rip),%r10
leaq	_Z2upILb0EEviiSt6vectorIiSaIiEE(%rip),%rax
movabsq	$9223372036854775804,%rcx
cmove	%rax,%r10
vpxor	%xmm14,%xmm14,%xmm14
subq	%r14,%r12
movq	$0,144(%rsp)
vmovdqa	%xmm14,128(%rsp)
movq	%r10,40(%rsp)
cmpq	%r12,%rcx
jb	.L924
leaq	96(%rsp),%rsi
movq	%r12,%rdi
movq	%rsi,48(%rsp)
.LEHB0:
call	_Znwm@PLT
.LEHE0:
vmovq	%rax,%xmm15
leaq	(%rax,%r12),%r9
movq	%rax,%rdi
vpunpcklqdq	%xmm15,%xmm15,%xmm0
movq	%r9,144(%rsp)
vmovdqa	%xmm0,128(%rsp)
cmpq	$4,%r12
jle	.L709
movq	%r12,%rdx
movq	%r14,%rsi
movq	%r9,48(%rsp)
call	memmove@PLT
movq	48(%rsp),%r9
.L710:
leaq	128(%rsp),%r12
movq	40(%rsp),%r11
movl	%r15d,%esi
movl	%r13d,%edi
movq	%r9,136(%rsp)
movq	%r12,%rdx
.LEHB1:
call	*%r11
.LEHE1:
movq	128(%rsp),%rdi
testq	%rdi,%rdi
je	.L711
movq	144(%rsp),%rsi
subq	%rdi,%rsi
call	_ZdlPvm@PLT
.L711:
movq	%r14,104(%rsp)
movq	%r14,%r12
.L703:
movq	24(%rsp),%r8
movq	88(%rsp),%rdi
leaq	_Z7uprangeILb0EJiEEviiDpT0_(%rip),%rsi
leaq	_Z7uprangeILb1EJiEEviiDpT0_(%rip),%rcx
leaq	96(%rsp),%r9
movq	(%r8),%r10
movq	%r9,48(%rsp)
movl	(%r10,%rdi),%eax
movl	84(%rsp),%edi
testl	%eax,%eax
movl	%eax,%edx
cmovns	%rsi,%rcx
negl	%edx
movl	80(%rsp),%esi
cmovs	%eax,%edx
.LEHB2:
call	*%rcx
.L699:
movq	%r12,%rdx
subq	%r14,%rdx
cmpq	$24,%rdx
jne	.L694
cmpq	%r12,%r14
jne	.L925
.L694:
movq	56(%rsp),%rdi
addq	$1,%rbx
cmpq	%rdi,%rbx
je	.L926
.L719:
movq	72(%rsp),%r8
movq	64(%rsp),%rax
movl	%r13d,%r14d
movl	%r15d,%r9d
leaq	0(,%rbx,4),%r11
movq	(%r8),%rdi
movq	(%rax),%rcx
movq	%r11,88(%rsp)
movl	(%rdi,%rbx,4),%r10d
movl	(%rcx,%rbx,4),%esi
cmpl	%r13d,%r10d
cmovge	%r10d,%r14d
cmpl	%r15d,%esi
cmovle	%esi,%r9d
movl	%r14d,84(%rsp)
movl	%r9d,80(%rsp)
cmpl	%r9d,%r14d
jge	.L694
movq	96(%rsp),%r14
cmpl	%r15d,%esi
jl	.L695
cmpl	%r13d,%r10d
jg	.L695
movq	24(%rsp),%rdx
movq	88(%rsp),%r8
addq	(%rdx),%r8
vmovd	(%r8),%xmm6
vpabsd	%xmm6,%xmm7
cmpq	%r12,%r14
je	.L927
vpsrld	$31,%xmm6,%xmm8
vmovd	%xmm8,%r11d
cmpb	%r11b,38(%rsp)
jne	.L700
vmovd	-4(%r12),%xmm9
vmovdqa	.LC9(%rip),%xmm12
vpaddd	%xmm7,%xmm9,%xmm10
vinsertps	$0xe,%xmm10,%xmm10,%xmm11
vpminsd	%xmm12,%xmm11,%xmm13
vmovd	%xmm13,-4(%r12)
jmp	.L699
.p2align 4,,10
.p2align 3
.L925:
vpxor	%xmm2,%xmm2,%xmm2
cmpb	$0,39(%rsp)
movq	$0,144(%rsp)
vmovdqa	%xmm2,128(%rsp)
jne	.L715
leaq	_Z2upILb0EEviiSt6vectorIiSaIiEE(%rip),%r12
movq	%r12,88(%rsp)
.L716:
leaq	96(%rsp),%r8
movl	$24,%edi
movq	%r8,48(%rsp)
call	_Znwm@PLT
.LEHE2:
leaq	24(%rax),%r12
vmovq	%rax,%xmm1
movq	%rax,%rdi
movq	%r14,%rsi
movq	%r12,144(%rsp)
vpunpcklqdq	%xmm1,%xmm1,%xmm3
movl	$24,%edx
vmovdqa	%xmm3,128(%rsp)
call	memmove@PLT
movq	88(%rsp),%r10
movl	%r15d,%esi
movl	%r13d,%edi
movq	%r12,136(%rsp)
leaq	128(%rsp),%r12
movq	%r12,%rdx
.LEHB3:
call	*%r10
.LEHE3:
movq	128(%rsp),%rdi
testq	%rdi,%rdi
je	.L717
movq	144(%rsp),%rsi
subq	%rdi,%rsi
call	_ZdlPvm@PLT
.L717:
movq	56(%rsp),%rdi
addq	$1,%rbx
movq	%r14,104(%rsp)
movq	%r14,%r12
cmpq	%rdi,%rbx
jne	.L719
.p2align 4,,10
.p2align 3
.L926:
movq	96(%rsp),%rbx
cmpq	%rbx,%r12
je	.L729
cmpb	$0,39(%rsp)
leaq	_Z2upILb1EEviiSt6vectorIiSaIiEE(%rip),%rax
leaq	_Z2upILb0EEviiSt6vectorIiSaIiEE(%rip),%rcx
movabsq	$9223372036854775804,%rsi
cmove	%rcx,%rax
vpxor	%xmm4,%xmm4,%xmm4
subq	%rbx,%r12
movq	$0,144(%rsp)
vmovdqa	%xmm4,128(%rsp)
movq	%rax,88(%rsp)
cmpq	%r12,%rsi
jb	.L928
leaq	96(%rsp),%r9
movq	%r12,%rdi
movq	%r9,48(%rsp)
.LEHB4:
call	_Znwm@PLT
.LEHE4:
vmovq	%rax,%xmm5
leaq	(%rax,%r12),%r14
movq	%rax,%rdi
vpunpcklqdq	%xmm5,%xmm5,%xmm6
movq	%r14,144(%rsp)
vmovdqa	%xmm6,128(%rsp)
cmpq	$4,%r12
jle	.L727
movq	%r12,%rdx
movq	%rbx,%rsi
call	memmove@PLT
.L728:
leaq	128(%rsp),%r12
movl	%r13d,%edi
movl	%r15d,%esi
movq	88(%rsp),%r13
movq	%r14,136(%rsp)
movq	%r12,%rdx
.LEHB5:
call	*%r13
.LEHE5:
movq	128(%rsp),%rdi
testq	%rdi,%rdi
je	.L729
movq	144(%rsp),%rsi
subq	%rdi,%rsi
call	_ZdlPvm@PLT
.L729:
movq	112(%rsp),%rsi
subq	%rbx,%rsi
testq	%rbx,%rbx
je	.L929
movq	%rbx,%rdi
call	_ZdlPvm@PLT
movl	16(%rsp),%r10d
movl	32(%rsp),%edi
cmpl	%edi,%r10d
jle	.L693
.L914:
movl	20(%rsp),%r15d
vpxor	%xmm8,%xmm8,%xmm8
movq	$0,112(%rsp)
vmovdqa	%xmm8,96(%rsp)
movl	%r15d,%r13d
addl	$1024,%r15d
cmpl	%r15d,%r10d
movl	%r15d,20(%rsp)
movl	%r15d,32(%rsp)
cmovle	%r10d,%r15d
jmp	.L691
.p2align 4,,10
.p2align 3
.L715:
leaq	_Z2upILb1EEviiSt6vectorIiSaIiEE(%rip),%r11
movq	%r11,88(%rsp)
jmp	.L716
.p2align 4,,10
.p2align 3
.L700:
vmovd	%xmm7,128(%rsp)
cmpq	%r12,112(%rsp)
je	.L701
vmovd	%xmm7,(%r12)
addq	$4,%r12
movq	%r12,104(%rsp)
.L702:
xorb	$1,38(%rsp)
jmp	.L699
.L709:
jne	.L710
movl	(%r14),%edx
movl	%edx,(%rax)
jmp	.L710
.p2align 4,,10
.p2align 3
.L929:
movl	16(%rsp),%r10d
movl	32(%rsp),%r12d
cmpl	%r12d,%r10d
jg	.L914
.L693:
movq	(%rsp),%r13
movq	8(%rsp),%r14
vpxor	%xmm7,%xmm7,%xmm7
movq	$0,16(%r13)
salq	$2,%r14
vmovdqu	%xmm7,0(%r13)
movq	%r14,%rdi
.LEHB6:
call	_Znwm@PLT
.LEHE6:
leaq	(%rax,%r14),%r15
movq	%rax,0(%r13)
movq	%rax,%rdi
movq	%r15,16(%r13)
cmpq	$4,%r14
jle	.L930
movq	%r14,%rdx
leaq	a(%rip),%rsi
call	memcpy@PLT
.L738:
movq	(%rsp),%rcx
movq	%r15,8(%rcx)
movq	152(%rsp),%rax
subq	%fs:40,%rax
jne	.L902
movq	(%rsp),%rax
leaq	-40(%rbp),%rsp
popq	%rbx
popq	%r12
popq	%r13
popq	%r14
popq	%r15
popq	%rbp
.cfi_remember_state
.cfi_def_cfa 7,8
ret
.p2align 4,,10
.p2align 3
.L921:
.cfi_restore_state
cmpq	$6,%r8
jbe	.L749
shrq	$3,%r13
movq	%r13,%rbx
salq	$5,%rbx
leaq	-32(%rbx),%r10
shrq	$5,%r10
addq	$1,%r10
andl	$7,%r10d
je	.L685
cmpq	$1,%r10
je	.L853
cmpq	$2,%r10
je	.L854
cmpq	$3,%r10
je	.L855
cmpq	$4,%r10
je	.L856
cmpq	$5,%r10
je	.L857
cmpq	$6,%r10
jne	.L931
.L858:
vmovdqu	(%rdx,%r11),%ymm6
vmovdqa	%ymm6,(%rcx,%r11)
addq	$32,%r11
.L857:
vmovdqu	(%rdx,%r11),%ymm7
vmovdqa	%ymm7,(%rcx,%r11)
addq	$32,%r11
.L856:
vmovdqu	(%rdx,%r11),%ymm8
vmovdqa	%ymm8,(%rcx,%r11)
addq	$32,%r11
.L855:
vmovdqu	(%rdx,%r11),%ymm9
vmovdqa	%ymm9,(%rcx,%r11)
addq	$32,%r11
.L854:
vmovdqu	(%rdx,%r11),%ymm10
vmovdqa	%ymm10,(%rcx,%r11)
addq	$32,%r11
.L853:
vmovdqu	(%rdx,%r11),%ymm11
vmovdqa	%ymm11,(%rcx,%r11)
addq	$32,%r11
cmpq	%r11,%rbx
je	.L904
.L685:
vmovdqu	(%rdx,%r11),%ymm12
vmovdqa	%ymm12,(%rcx,%r11)
vmovdqu	32(%rdx,%r11),%ymm13
vmovdqa	%ymm13,32(%rcx,%r11)
vmovdqu	64(%rdx,%r11),%ymm14
vmovdqa	%ymm14,64(%rcx,%r11)
vmovdqu	96(%rdx,%r11),%ymm15
vmovdqa	%ymm15,96(%rcx,%r11)
vmovdqu	128(%rdx,%r11),%ymm0
vmovdqa	%ymm0,128(%rcx,%r11)
vmovdqu	160(%rdx,%r11),%ymm2
vmovdqa	%ymm2,160(%rcx,%r11)
vmovdqu	192(%rdx,%r11),%ymm1
vmovdqa	%ymm1,192(%rcx,%r11)
vmovdqu	224(%rdx,%r11),%ymm3
vmovdqa	%ymm3,224(%rcx,%r11)
addq	$256,%r11
cmpq	%r11,%rbx
jne	.L685
.L904:
movq	8(%rsp),%r15
movq	%r15,%r12
andq	$-8,%r12
movq	%r12,%r14
testb	$7,%sil
je	.L690
subq	%r12,%r15
leaq	-1(%r15),%r13
movq	%r15,%rdi
cmpq	$2,%r13
jbe	.L687
.L684:
movq	%rdi,%r8
vmovdqu	(%rdx,%r12,4),%xmm4
andq	$-4,%r8
addq	%r8,%r14
andl	$3,%edi
vmovdqa	%xmm4,(%rcx,%r12,4)
je	.L690
.L687:
movl	(%rdx,%r14,4),%edi
movq	8(%rsp),%rbx
leaq	1(%r14),%r12
leaq	0(,%r14,4),%rax
movl	%edi,(%rcx,%r14,4)
cmpq	%rbx,%r12
jge	.L690
movl	4(%rdx,%rax),%r10d
addq	$2,%r14
movl	%r10d,(%rcx,%r12,4)
cmpq	%r14,%rbx
jle	.L690
movl	8(%rdx,%rax),%edx
movl	%edx,(%rcx,%r14,4)
jmp	.L690
.p2align 4,,10
.p2align 3
.L924:
testq	%r12,%r12
jns	.L706
movq	152(%rsp),%rax
subq	%fs:40,%rax
jne	.L902
leaq	96(%rsp),%rbx
movq	%rbx,48(%rsp)
.LEHB7:
call	_ZSt28__throw_bad_array_new_lengthv@PLT
.p2align 4,,10
.p2align 3
.L927:
vmovd	%xmm7,128(%rsp)
cmpq	%r12,112(%rsp)
je	.L697
vmovd	%xmm7,(%r12)
addq	$4,%r12
movq	%r12,104(%rsp)
.L698:
movl	(%r8),%edi
shrl	$31,%edi
movb	%dil,38(%rsp)
movb	%dil,39(%rsp)
jmp	.L699
.p2align 4,,10
.p2align 3
.L747:
xorl	%r11d,%r11d
leaq	b(%rip),%rcx
jmp	.L852
.p2align 4,,10
.p2align 3
.L706:
movq	152(%rsp),%rax
subq	%fs:40,%rax
jne	.L902
leaq	96(%rsp),%r15
movq	%r15,48(%rsp)
call	_ZSt17__throw_bad_allocv@PLT
.L922:
movl	(%rdx),%r11d
movl	%r11d,(%rcx)
movl	$1,%r11d
jmp	.L864
.L919:
vpaddd	(%r8),%ymm2,%ymm1
leaq	32(%r8),%rax
vmovdqu	%ymm1,(%r8)
jmp	.L851
.L727:
jne	.L728
movl	(%rbx),%edx
movl	%edx,(%rax)
jmp	.L728
.L920:
movq	8(%rsp),%rsi
movq	(%rsp),%rdx
vpxor	%xmm4,%xmm4,%xmm4
movabsq	$9223372036854775804,%r9
salq	$2,%rsi
movq	$0,16(%rdx)
vmovdqu	%xmm4,(%rdx)
cmpq	%rsi,%r9
jb	.L932
movq	(%rsp),%r15
movq	$0,(%r15)
movq	$0,16(%r15)
xorl	%r15d,%r15d
vzeroupper
jmp	.L738
.L701:
leaq	96(%rsp),%rdi
leaq	128(%rsp),%rdx
movq	%r12,%rsi
movq	%rdi,48(%rsp)
call	_ZNSt6vectorIiSaIiEE17_M_realloc_insertIJiEEEvN9__gnu_cxx17__normal_iteratorIPiS1_EEDpOT_
movq	104(%rsp),%r12
movq	96(%rsp),%r14
jmp	.L702
.L923:
movl	16(%rsp),%r9d
movl	$1024,%esi
movl	$1024,%edx
jmp	.L734
.p2align 4,,10
.p2align 3
.L692:
addl	$1024,%esi
movl	%esi,%edx
.L734:
cmpl	%edx,%r9d
jg	.L692
vzeroupper
jmp	.L693
.L928:
testq	%r12,%r12
jns	.L724
movq	152(%rsp),%rax
subq	%fs:40,%rax
jne	.L902
leaq	96(%rsp),%r8
movq	%r8,48(%rsp)
call	_ZSt28__throw_bad_array_new_lengthv@PLT
.L746:
movq	%r8,%r14
xorl	%r12d,%r12d
jmp	.L674
.L697:
leaq	96(%rsp),%rdi
leaq	128(%rsp),%rdx
movq	%r12,%rsi
movq	%rdi,48(%rsp)
call	_ZNSt6vectorIiSaIiEE17_M_realloc_insertIJiEEEvN9__gnu_cxx17__normal_iteratorIPiS1_EEDpOT_
movq	24(%rsp),%r12
movq	88(%rsp),%r8
movq	96(%rsp),%r14
addq	(%r12),%r8
movq	104(%rsp),%r12
jmp	.L698
.L724:
movq	152(%rsp),%rax
subq	%fs:40,%rax
jne	.L902
leaq	96(%rsp),%r11
movq	%r11,48(%rsp)
call	_ZSt17__throw_bad_allocv@PLT
.LEHE7:
.L931:
vmovdqu	(%rdx),%ymm5
movl	$32,%r11d
vmovdqa	%ymm5,(%rcx)
jmp	.L858
.L749:
xorl	%r14d,%r14d
xorl	%r12d,%r12d
jmp	.L684
.L930:
movl	a(%rip),%eax
movl	%eax,(%rdi)
jmp	.L738
.L932:
movq	152(%rsp),%rax
subq	%fs:40,%rax
jne	.L933
leaq	.LC7(%rip),%rdi
vzeroupper
.LEHB8:
call	_ZSt20__throw_length_errorPKc@PLT
.LEHE8:
.L902:
call	__stack_chk_fail@PLT
.L933:
vzeroupper
call	__stack_chk_fail@PLT
.L758:
endbr64
movq	%rax,%rbx
jmp	.L740
.L757:
endbr64
movq	%rax,%rbx
jmp	.L732
.L756:
endbr64
movq	%rax,%rbx
jmp	.L718
.L755:
endbr64
movq	%rax,%rbx
jmp	.L713
.L754:
endbr64
movq	%rax,%rbx
vzeroupper
jmp	.L714
.globl	__gxx_personality_v0
.section	.gcc_except_table,"a",@progbits
.LLSDA10422:
.byte	0xff
.byte	0xff
.byte	0x1
.uleb128 .LLSDACSE10422-.LLSDACSB10422
.LLSDACSB10422:
.uleb128 .LEHB0-.LFB10422
.uleb128 .LEHE0-.LEHB0
.uleb128 .L754-.LFB10422
.uleb128 0
.uleb128 .LEHB1-.LFB10422
.uleb128 .LEHE1-.LEHB1
.uleb128 .L755-.LFB10422
.uleb128 0
.uleb128 .LEHB2-.LFB10422
.uleb128 .LEHE2-.LEHB2
.uleb128 .L754-.LFB10422
.uleb128 0
.uleb128 .LEHB3-.LFB10422
.uleb128 .LEHE3-.LEHB3
.uleb128 .L756-.LFB10422
.uleb128 0
.uleb128 .LEHB4-.LFB10422
.uleb128 .LEHE4-.LEHB4
.uleb128 .L754-.LFB10422
.uleb128 0
.uleb128 .LEHB5-.LFB10422
.uleb128 .LEHE5-.LEHB5
.uleb128 .L757-.LFB10422
.uleb128 0
.uleb128 .LEHB6-.LFB10422
.uleb128 .LEHE6-.LEHB6
.uleb128 .L758-.LFB10422
.uleb128 0
.uleb128 .LEHB7-.LFB10422
.uleb128 .LEHE7-.LEHB7
.uleb128 .L754-.LFB10422
.uleb128 0
.uleb128 .LEHB8-.LFB10422
.uleb128 .LEHE8-.LEHB8
.uleb128 .L758-.LFB10422
.uleb128 0
.LLSDACSE10422:
.text
.cfi_endproc
.section	.text.unlikely
.cfi_startproc
.cfi_personality 0x9b,DW.ref.__gxx_personality_v0
.cfi_lsda 0x1b,.LLSDAC10422
.type	_Z18distribute_candiesSt6vectorIiSaIiEES1_S1_S1_.cold,@function
_Z18distribute_candiesSt6vectorIiSaIiEES1_S1_S1_.cold:
.LFSB10422:
.L740:
.cfi_def_cfa 6,16
.cfi_offset 3,-56
.cfi_offset 6,-16
.cfi_offset 12,-48
.cfi_offset 13,-40
.cfi_offset 14,-32
.cfi_offset 15,-24
movq	(%rsp),%rsi
movq	(%rsi),%rdi
movq	16(%rsi),%rsi
subq	%rdi,%rsi
testq	%rdi,%rdi
je	.L912
vzeroupper
call	_ZdlPvm@PLT
jmp	.L918
.L732:
movq	%r12,%rdi
leaq	96(%rsp),%r15
vzeroupper
call	_ZNSt6vectorIiSaIiEED1Ev
movq	%r15,48(%rsp)
.L714:
movq	48(%rsp),%rdi
call	_ZNSt6vectorIiSaIiEED1Ev
.L918:
movq	152(%rsp),%rax
subq	%fs:40,%rax
jne	.L934
movq	%rbx,%rdi
.LEHB9:
call	_Unwind_Resume@PLT
.LEHE9:
.L713:
movq	%r12,%rdi
leaq	96(%rsp),%r13
vzeroupper
call	_ZNSt6vectorIiSaIiEED1Ev
movq	%r13,48(%rsp)
jmp	.L714
.L718:
movq	%r12,%rdi
leaq	96(%rsp),%r14
vzeroupper
call	_ZNSt6vectorIiSaIiEED1Ev
movq	%r14,48(%rsp)
jmp	.L714
.L912:
vzeroupper
jmp	.L918
.L934:
call	__stack_chk_fail@PLT
.cfi_endproc
.LFE10422:
.section	.gcc_except_table
.LLSDAC10422:
.byte	0xff
.byte	0xff
.byte	0x1
.uleb128 .LLSDACSEC10422-.LLSDACSBC10422
.LLSDACSBC10422:
.uleb128 .LEHB9-.LCOLDB10
.uleb128 .LEHE9-.LEHB9
.uleb128 0
.uleb128 0
.LLSDACSEC10422:
.section	.text.unlikely
.text
.size	_Z18distribute_candiesSt6vectorIiSaIiEES1_S1_S1_,.-_Z18distribute_candiesSt6vectorIiSaIiEES1_S1_S1_
.section	.text.unlikely
.size	_Z18distribute_candiesSt6vectorIiSaIiEES1_S1_S1_.cold,.-_Z18distribute_candiesSt6vectorIiSaIiEES1_S1_S1_.cold
.LCOLDE10:
.text
.LHOTE10:
.globl	b
.bss
.align 32
.type	b,@object
.size	b,800040
b:
.zero	800040
.globl	a
.align 32
.type	a,@object
.size	a,800040
a:
.zero	800040
.section	.rodata.cst16,"aM",@progbits,16
.align 16
.LC9:
.long	1000000010
.long	0
.long	0
.long	0
.hidden	DW.ref.__gxx_personality_v0
.weak	DW.ref.__gxx_personality_v0
.section	.data.rel.local.DW.ref.__gxx_personality_v0,"awG",@progbits,DW.ref.__gxx_personality_v0,comdat
.align 8
.type	DW.ref.__gxx_personality_v0,@object
.size	DW.ref.__gxx_personality_v0,8
DW.ref.__gxx_personality_v0:
.quad	__gxx_personality_v0
.ident	"GCC: (Gentoo 13.3.1_p20240614 p17) 13.3.1 20240614"
.section	.note.GNU-stack,"",@progbits
.section	.note.gnu.property,"a"
.align 8
.long	1f - 0f
.long	4f - 1f
.long	5
0:
.string	"GNU"
1:
.align 8
.long	0xc0000002
.long	3f - 2f
2:
.long	0x3
3:
.align 8
4:
)asm");

Compilation message

/usr/bin/ld: /tmp/ccGJtNCx.o: in function `distribute_candies(std::vector<int, std::allocator<int> >, std::vector<int, std::allocator<int> >, std::vector<int, std::allocator<int> >, std::vector<int, std::allocator<int> >)':
(.text+0xac7): undefined reference to `std::__throw_bad_array_new_length()'
/usr/bin/ld: (.text+0xc44): undefined reference to `std::__throw_bad_array_new_length()'
collect2: error: ld returned 1 exit status