KGRKJGETMRETU895U-589TY5MIGM5JGB5SDFESFREWTGR54TY
Server : Apache/2.4.58 (Win64) OpenSSL/3.1.3 PHP/8.2.12
System : Windows NT SERVER-PC 10.0 build 26200 (Windows 11) AMD64
User : ServerPC ( 0)
PHP Version : 8.2.12
Disable Function : NONE
Directory :  C:/Users/ServerPC/AppData/Roaming/NVIDIA/ComputeCache/f/6/

Upload File :
current_dir [ Writeable ] document_root [ Writeable ]

 

Current File : C:/Users/ServerPC/AppData/Roaming/NVIDIA/ComputeCache/f/6/6d0233d3f099a5
A��H˰��!Nov  6 202420:35:47HOST64sm_61//
// Generated by LLVM NVPTX Back-End
//

.version 4.2
.target sm_30
.address_size 64

	// .globl	Subsample_Nearest_yuv420p_yuv420p
.func  (.param .align 16 .b8 func_retval0[4]) _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif
(
	.param .b64 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_0,
	.param .b32 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_1,
	.param .b32 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_2,
	.param .b32 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_3,
	.param .b32 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_4,
	.param .b32 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_5,
	.param .b32 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_6
)
;

.visible .entry Subsample_Nearest_yuv420p_yuv420p(
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_param_0,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_param_1,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_param_2,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_param_3,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_param_4,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_param_5,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_param_6,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_param_7,
	.param .u32 Subsample_Nearest_yuv420p_yuv420p_param_8,
	.param .u32 Subsample_Nearest_yuv420p_yuv420p_param_9,
	.param .u32 Subsample_Nearest_yuv420p_yuv420p_param_10,
	.param .u32 Subsample_Nearest_yuv420p_yuv420p_param_11,
	.param .u32 Subsample_Nearest_yuv420p_yuv420p_param_12,
	.param .f32 Subsample_Nearest_yuv420p_yuv420p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv420p_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv420p_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB0_2;
	bra.uni 	$L__BB0_1;
$L__BB0_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv420p_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv420p_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv420p_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv420p_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv420p_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r17;
$L__BB0_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv420p_yuv420p_uv
.visible .entry Subsample_Nearest_yuv420p_yuv420p_uv(
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_uv_param_0,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_uv_param_1,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_uv_param_2,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_uv_param_3,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_uv_param_4,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_uv_param_5,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_uv_param_6,
	.param .u64 Subsample_Nearest_yuv420p_yuv420p_uv_param_7,
	.param .u32 Subsample_Nearest_yuv420p_yuv420p_uv_param_8,
	.param .u32 Subsample_Nearest_yuv420p_yuv420p_uv_param_9,
	.param .u32 Subsample_Nearest_yuv420p_yuv420p_uv_param_10,
	.param .u32 Subsample_Nearest_yuv420p_yuv420p_uv_param_11,
	.param .u32 Subsample_Nearest_yuv420p_yuv420p_uv_param_12,
	.param .f32 Subsample_Nearest_yuv420p_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<14>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv420p_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv420p_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB1_2;
	bra.uni 	$L__BB1_1;
$L__BB1_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv420p_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv420p_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv420p_yuv420p_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Nearest_yuv420p_yuv420p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Nearest_yuv420p_yuv420p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv420p_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv420p_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd9, %r2, %r5;
	cvt.s64.s32 	%rd10, %r1;
	add.s64 	%rd11, %rd9, %rd10;
	add.s64 	%rd12, %rd2, %rd11;
	st.global.u8 	[%rd12], %r17;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f1, %f2}];
	// end inline asm
	add.s64 	%rd13, %rd1, %rd11;
	st.global.u8 	[%rd13], %r21;
$L__BB1_2:
	ret;

}
	// .globl	Subsample_Nearest_nv12_yuv420p
.visible .entry Subsample_Nearest_nv12_yuv420p(
	.param .u64 Subsample_Nearest_nv12_yuv420p_param_0,
	.param .u64 Subsample_Nearest_nv12_yuv420p_param_1,
	.param .u64 Subsample_Nearest_nv12_yuv420p_param_2,
	.param .u64 Subsample_Nearest_nv12_yuv420p_param_3,
	.param .u64 Subsample_Nearest_nv12_yuv420p_param_4,
	.param .u64 Subsample_Nearest_nv12_yuv420p_param_5,
	.param .u64 Subsample_Nearest_nv12_yuv420p_param_6,
	.param .u64 Subsample_Nearest_nv12_yuv420p_param_7,
	.param .u32 Subsample_Nearest_nv12_yuv420p_param_8,
	.param .u32 Subsample_Nearest_nv12_yuv420p_param_9,
	.param .u32 Subsample_Nearest_nv12_yuv420p_param_10,
	.param .u32 Subsample_Nearest_nv12_yuv420p_param_11,
	.param .u32 Subsample_Nearest_nv12_yuv420p_param_12,
	.param .f32 Subsample_Nearest_nv12_yuv420p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_nv12_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_nv12_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB2_2;
	bra.uni 	$L__BB2_1;
$L__BB2_1:
	ld.param.u32 	%r7, [Subsample_Nearest_nv12_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_nv12_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_nv12_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_nv12_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_nv12_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r17;
$L__BB2_2:
	ret;

}
	// .globl	Subsample_Nearest_nv12_yuv420p_uv
.visible .entry Subsample_Nearest_nv12_yuv420p_uv(
	.param .u64 Subsample_Nearest_nv12_yuv420p_uv_param_0,
	.param .u64 Subsample_Nearest_nv12_yuv420p_uv_param_1,
	.param .u64 Subsample_Nearest_nv12_yuv420p_uv_param_2,
	.param .u64 Subsample_Nearest_nv12_yuv420p_uv_param_3,
	.param .u64 Subsample_Nearest_nv12_yuv420p_uv_param_4,
	.param .u64 Subsample_Nearest_nv12_yuv420p_uv_param_5,
	.param .u64 Subsample_Nearest_nv12_yuv420p_uv_param_6,
	.param .u64 Subsample_Nearest_nv12_yuv420p_uv_param_7,
	.param .u32 Subsample_Nearest_nv12_yuv420p_uv_param_8,
	.param .u32 Subsample_Nearest_nv12_yuv420p_uv_param_9,
	.param .u32 Subsample_Nearest_nv12_yuv420p_uv_param_10,
	.param .u32 Subsample_Nearest_nv12_yuv420p_uv_param_11,
	.param .u32 Subsample_Nearest_nv12_yuv420p_uv_param_12,
	.param .f32 Subsample_Nearest_nv12_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Nearest_nv12_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_nv12_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB3_2;
	bra.uni 	$L__BB3_1;
$L__BB3_1:
	ld.param.u32 	%r7, [Subsample_Nearest_nv12_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_nv12_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_nv12_yuv420p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_nv12_yuv420p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_nv12_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Nearest_nv12_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd7, %r2, %r5;
	cvt.s64.s32 	%rd8, %r1;
	add.s64 	%rd9, %rd7, %rd8;
	add.s64 	%rd10, %rd2, %rd9;
	st.global.u8 	[%rd10], %r17;
	add.s64 	%rd11, %rd1, %rd9;
	st.global.u8 	[%rd11], %r18;
$L__BB3_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p_yuv420p
.visible .entry Subsample_Nearest_yuv444p_yuv420p(
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_param_0,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_param_1,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_param_2,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_param_3,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_param_4,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_param_5,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_param_6,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_param_7,
	.param .u32 Subsample_Nearest_yuv444p_yuv420p_param_8,
	.param .u32 Subsample_Nearest_yuv444p_yuv420p_param_9,
	.param .u32 Subsample_Nearest_yuv444p_yuv420p_param_10,
	.param .u32 Subsample_Nearest_yuv444p_yuv420p_param_11,
	.param .u32 Subsample_Nearest_yuv444p_yuv420p_param_12,
	.param .f32 Subsample_Nearest_yuv444p_yuv420p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB4_2;
	bra.uni 	$L__BB4_1;
$L__BB4_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv444p_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r17;
$L__BB4_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p_yuv420p_uv
.visible .entry Subsample_Nearest_yuv444p_yuv420p_uv(
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_uv_param_0,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_uv_param_1,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_uv_param_2,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_uv_param_3,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_uv_param_4,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_uv_param_5,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_uv_param_6,
	.param .u64 Subsample_Nearest_yuv444p_yuv420p_uv_param_7,
	.param .u32 Subsample_Nearest_yuv444p_yuv420p_uv_param_8,
	.param .u32 Subsample_Nearest_yuv444p_yuv420p_uv_param_9,
	.param .u32 Subsample_Nearest_yuv444p_yuv420p_uv_param_10,
	.param .u32 Subsample_Nearest_yuv444p_yuv420p_uv_param_11,
	.param .u32 Subsample_Nearest_yuv444p_yuv420p_uv_param_12,
	.param .f32 Subsample_Nearest_yuv444p_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<14>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB5_2;
	bra.uni 	$L__BB5_1;
$L__BB5_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p_yuv420p_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Nearest_yuv444p_yuv420p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Nearest_yuv444p_yuv420p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv444p_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv444p_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd9, %r2, %r5;
	cvt.s64.s32 	%rd10, %r1;
	add.s64 	%rd11, %rd9, %rd10;
	add.s64 	%rd12, %rd2, %rd11;
	st.global.u8 	[%rd12], %r17;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f1, %f2}];
	// end inline asm
	add.s64 	%rd13, %rd1, %rd11;
	st.global.u8 	[%rd13], %r21;
$L__BB5_2:
	ret;

}
	// .globl	Subsample_Nearest_p010le_yuv420p
.visible .entry Subsample_Nearest_p010le_yuv420p(
	.param .u64 Subsample_Nearest_p010le_yuv420p_param_0,
	.param .u64 Subsample_Nearest_p010le_yuv420p_param_1,
	.param .u64 Subsample_Nearest_p010le_yuv420p_param_2,
	.param .u64 Subsample_Nearest_p010le_yuv420p_param_3,
	.param .u64 Subsample_Nearest_p010le_yuv420p_param_4,
	.param .u64 Subsample_Nearest_p010le_yuv420p_param_5,
	.param .u64 Subsample_Nearest_p010le_yuv420p_param_6,
	.param .u64 Subsample_Nearest_p010le_yuv420p_param_7,
	.param .u32 Subsample_Nearest_p010le_yuv420p_param_8,
	.param .u32 Subsample_Nearest_p010le_yuv420p_param_9,
	.param .u32 Subsample_Nearest_p010le_yuv420p_param_10,
	.param .u32 Subsample_Nearest_p010le_yuv420p_param_11,
	.param .u32 Subsample_Nearest_p010le_yuv420p_param_12,
	.param .f32 Subsample_Nearest_p010le_yuv420p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<22>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_p010le_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p010le_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB6_2;
	bra.uni 	$L__BB6_1;
$L__BB6_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p010le_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p010le_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p010le_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p010le_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_p010le_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r21;
$L__BB6_2:
	ret;

}
	// .globl	Subsample_Nearest_p010le_yuv420p_uv
.visible .entry Subsample_Nearest_p010le_yuv420p_uv(
	.param .u64 Subsample_Nearest_p010le_yuv420p_uv_param_0,
	.param .u64 Subsample_Nearest_p010le_yuv420p_uv_param_1,
	.param .u64 Subsample_Nearest_p010le_yuv420p_uv_param_2,
	.param .u64 Subsample_Nearest_p010le_yuv420p_uv_param_3,
	.param .u64 Subsample_Nearest_p010le_yuv420p_uv_param_4,
	.param .u64 Subsample_Nearest_p010le_yuv420p_uv_param_5,
	.param .u64 Subsample_Nearest_p010le_yuv420p_uv_param_6,
	.param .u64 Subsample_Nearest_p010le_yuv420p_uv_param_7,
	.param .u32 Subsample_Nearest_p010le_yuv420p_uv_param_8,
	.param .u32 Subsample_Nearest_p010le_yuv420p_uv_param_9,
	.param .u32 Subsample_Nearest_p010le_yuv420p_uv_param_10,
	.param .u32 Subsample_Nearest_p010le_yuv420p_uv_param_11,
	.param .u32 Subsample_Nearest_p010le_yuv420p_uv_param_12,
	.param .f32 Subsample_Nearest_p010le_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<23>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Nearest_p010le_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p010le_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB7_2;
	bra.uni 	$L__BB7_1;
$L__BB7_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p010le_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p010le_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p010le_yuv420p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_p010le_yuv420p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_p010le_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Nearest_p010le_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd7, %r2, %r5;
	cvt.s64.s32 	%rd8, %r1;
	add.s64 	%rd9, %rd7, %rd8;
	add.s64 	%rd10, %rd2, %rd9;
	st.global.u8 	[%rd10], %r21;
	shr.u32 	%r22, %r18, 8;
	add.s64 	%rd11, %rd1, %rd9;
	st.global.u8 	[%rd11], %r22;
$L__BB7_2:
	ret;

}
	// .globl	Subsample_Nearest_p016le_yuv420p
.visible .entry Subsample_Nearest_p016le_yuv420p(
	.param .u64 Subsample_Nearest_p016le_yuv420p_param_0,
	.param .u64 Subsample_Nearest_p016le_yuv420p_param_1,
	.param .u64 Subsample_Nearest_p016le_yuv420p_param_2,
	.param .u64 Subsample_Nearest_p016le_yuv420p_param_3,
	.param .u64 Subsample_Nearest_p016le_yuv420p_param_4,
	.param .u64 Subsample_Nearest_p016le_yuv420p_param_5,
	.param .u64 Subsample_Nearest_p016le_yuv420p_param_6,
	.param .u64 Subsample_Nearest_p016le_yuv420p_param_7,
	.param .u32 Subsample_Nearest_p016le_yuv420p_param_8,
	.param .u32 Subsample_Nearest_p016le_yuv420p_param_9,
	.param .u32 Subsample_Nearest_p016le_yuv420p_param_10,
	.param .u32 Subsample_Nearest_p016le_yuv420p_param_11,
	.param .u32 Subsample_Nearest_p016le_yuv420p_param_12,
	.param .f32 Subsample_Nearest_p016le_yuv420p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<22>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_p016le_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p016le_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB8_2;
	bra.uni 	$L__BB8_1;
$L__BB8_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p016le_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p016le_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p016le_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p016le_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_p016le_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r21;
$L__BB8_2:
	ret;

}
	// .globl	Subsample_Nearest_p016le_yuv420p_uv
.visible .entry Subsample_Nearest_p016le_yuv420p_uv(
	.param .u64 Subsample_Nearest_p016le_yuv420p_uv_param_0,
	.param .u64 Subsample_Nearest_p016le_yuv420p_uv_param_1,
	.param .u64 Subsample_Nearest_p016le_yuv420p_uv_param_2,
	.param .u64 Subsample_Nearest_p016le_yuv420p_uv_param_3,
	.param .u64 Subsample_Nearest_p016le_yuv420p_uv_param_4,
	.param .u64 Subsample_Nearest_p016le_yuv420p_uv_param_5,
	.param .u64 Subsample_Nearest_p016le_yuv420p_uv_param_6,
	.param .u64 Subsample_Nearest_p016le_yuv420p_uv_param_7,
	.param .u32 Subsample_Nearest_p016le_yuv420p_uv_param_8,
	.param .u32 Subsample_Nearest_p016le_yuv420p_uv_param_9,
	.param .u32 Subsample_Nearest_p016le_yuv420p_uv_param_10,
	.param .u32 Subsample_Nearest_p016le_yuv420p_uv_param_11,
	.param .u32 Subsample_Nearest_p016le_yuv420p_uv_param_12,
	.param .f32 Subsample_Nearest_p016le_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<23>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Nearest_p016le_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p016le_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB9_2;
	bra.uni 	$L__BB9_1;
$L__BB9_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p016le_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p016le_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p016le_yuv420p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_p016le_yuv420p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_p016le_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Nearest_p016le_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd7, %r2, %r5;
	cvt.s64.s32 	%rd8, %r1;
	add.s64 	%rd9, %rd7, %rd8;
	add.s64 	%rd10, %rd2, %rd9;
	st.global.u8 	[%rd10], %r21;
	shr.u32 	%r22, %r18, 8;
	add.s64 	%rd11, %rd1, %rd9;
	st.global.u8 	[%rd11], %r22;
$L__BB9_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p16le_yuv420p
.visible .entry Subsample_Nearest_yuv444p16le_yuv420p(
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_param_0,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_param_1,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_param_2,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_param_3,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_param_4,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_param_5,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_param_6,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_param_7,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv420p_param_8,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv420p_param_9,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv420p_param_10,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv420p_param_11,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv420p_param_12,
	.param .f32 Subsample_Nearest_yuv444p16le_yuv420p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<22>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p16le_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p16le_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB10_2;
	bra.uni 	$L__BB10_1;
$L__BB10_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p16le_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p16le_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p16le_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p16le_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv444p16le_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r21;
$L__BB10_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p16le_yuv420p_uv
.visible .entry Subsample_Nearest_yuv444p16le_yuv420p_uv(
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_0,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_1,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_2,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_3,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_4,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_5,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_6,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_7,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_8,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_9,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_10,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_11,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_12,
	.param .f32 Subsample_Nearest_yuv444p16le_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<27>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<14>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p16le_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p16le_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB11_2;
	bra.uni 	$L__BB11_1;
$L__BB11_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p16le_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p16le_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p16le_yuv420p_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Nearest_yuv444p16le_yuv420p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Nearest_yuv444p16le_yuv420p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv444p16le_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv444p16le_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r25, %r17, 8;
	mul.wide.s32 	%rd9, %r2, %r5;
	cvt.s64.s32 	%rd10, %r1;
	add.s64 	%rd11, %rd9, %rd10;
	add.s64 	%rd12, %rd2, %rd11;
	st.global.u8 	[%rd12], %r25;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r26, %r21, 8;
	add.s64 	%rd13, %rd1, %rd11;
	st.global.u8 	[%rd13], %r26;
$L__BB11_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv420p_nv12
.visible .entry Subsample_Nearest_yuv420p_nv12(
	.param .u64 Subsample_Nearest_yuv420p_nv12_param_0,
	.param .u64 Subsample_Nearest_yuv420p_nv12_param_1,
	.param .u64 Subsample_Nearest_yuv420p_nv12_param_2,
	.param .u64 Subsample_Nearest_yuv420p_nv12_param_3,
	.param .u64 Subsample_Nearest_yuv420p_nv12_param_4,
	.param .u64 Subsample_Nearest_yuv420p_nv12_param_5,
	.param .u64 Subsample_Nearest_yuv420p_nv12_param_6,
	.param .u64 Subsample_Nearest_yuv420p_nv12_param_7,
	.param .u32 Subsample_Nearest_yuv420p_nv12_param_8,
	.param .u32 Subsample_Nearest_yuv420p_nv12_param_9,
	.param .u32 Subsample_Nearest_yuv420p_nv12_param_10,
	.param .u32 Subsample_Nearest_yuv420p_nv12_param_11,
	.param .u32 Subsample_Nearest_yuv420p_nv12_param_12,
	.param .f32 Subsample_Nearest_yuv420p_nv12_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv420p_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv420p_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB12_2;
	bra.uni 	$L__BB12_1;
$L__BB12_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv420p_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv420p_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv420p_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv420p_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv420p_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r17;
$L__BB12_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv420p_nv12_uv
.visible .entry Subsample_Nearest_yuv420p_nv12_uv(
	.param .u64 Subsample_Nearest_yuv420p_nv12_uv_param_0,
	.param .u64 Subsample_Nearest_yuv420p_nv12_uv_param_1,
	.param .u64 Subsample_Nearest_yuv420p_nv12_uv_param_2,
	.param .u64 Subsample_Nearest_yuv420p_nv12_uv_param_3,
	.param .u64 Subsample_Nearest_yuv420p_nv12_uv_param_4,
	.param .u64 Subsample_Nearest_yuv420p_nv12_uv_param_5,
	.param .u64 Subsample_Nearest_yuv420p_nv12_uv_param_6,
	.param .u64 Subsample_Nearest_yuv420p_nv12_uv_param_7,
	.param .u32 Subsample_Nearest_yuv420p_nv12_uv_param_8,
	.param .u32 Subsample_Nearest_yuv420p_nv12_uv_param_9,
	.param .u32 Subsample_Nearest_yuv420p_nv12_uv_param_10,
	.param .u32 Subsample_Nearest_yuv420p_nv12_uv_param_11,
	.param .u32 Subsample_Nearest_yuv420p_nv12_uv_param_12,
	.param .f32 Subsample_Nearest_yuv420p_nv12_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv420p_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv420p_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB13_2;
	bra.uni 	$L__BB13_1;
$L__BB13_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv420p_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv420p_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv420p_nv12_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv420p_nv12_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv420p_nv12_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv420p_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs2, %r21;
	cvt.s64.s32 	%rd7, %r2;
	cvt.s64.s32 	%rd8, %r5;
	shr.u64 	%rd9, %rd8, 1;
	mul.lo.s64 	%rd10, %rd9, %rd7;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	shl.b64 	%rd13, %rd12, 1;
	add.s64 	%rd14, %rd1, %rd13;
	st.global.v2.u8 	[%rd14], {%rs1, %rs2};
$L__BB13_2:
	ret;

}
	// .globl	Subsample_Nearest_nv12_nv12
.visible .entry Subsample_Nearest_nv12_nv12(
	.param .u64 Subsample_Nearest_nv12_nv12_param_0,
	.param .u64 Subsample_Nearest_nv12_nv12_param_1,
	.param .u64 Subsample_Nearest_nv12_nv12_param_2,
	.param .u64 Subsample_Nearest_nv12_nv12_param_3,
	.param .u64 Subsample_Nearest_nv12_nv12_param_4,
	.param .u64 Subsample_Nearest_nv12_nv12_param_5,
	.param .u64 Subsample_Nearest_nv12_nv12_param_6,
	.param .u64 Subsample_Nearest_nv12_nv12_param_7,
	.param .u32 Subsample_Nearest_nv12_nv12_param_8,
	.param .u32 Subsample_Nearest_nv12_nv12_param_9,
	.param .u32 Subsample_Nearest_nv12_nv12_param_10,
	.param .u32 Subsample_Nearest_nv12_nv12_param_11,
	.param .u32 Subsample_Nearest_nv12_nv12_param_12,
	.param .f32 Subsample_Nearest_nv12_nv12_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_nv12_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_nv12_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB14_2;
	bra.uni 	$L__BB14_1;
$L__BB14_1:
	ld.param.u32 	%r7, [Subsample_Nearest_nv12_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_nv12_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_nv12_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_nv12_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_nv12_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r17;
$L__BB14_2:
	ret;

}
	// .globl	Subsample_Nearest_nv12_nv12_uv
.visible .entry Subsample_Nearest_nv12_nv12_uv(
	.param .u64 Subsample_Nearest_nv12_nv12_uv_param_0,
	.param .u64 Subsample_Nearest_nv12_nv12_uv_param_1,
	.param .u64 Subsample_Nearest_nv12_nv12_uv_param_2,
	.param .u64 Subsample_Nearest_nv12_nv12_uv_param_3,
	.param .u64 Subsample_Nearest_nv12_nv12_uv_param_4,
	.param .u64 Subsample_Nearest_nv12_nv12_uv_param_5,
	.param .u64 Subsample_Nearest_nv12_nv12_uv_param_6,
	.param .u64 Subsample_Nearest_nv12_nv12_uv_param_7,
	.param .u32 Subsample_Nearest_nv12_nv12_uv_param_8,
	.param .u32 Subsample_Nearest_nv12_nv12_uv_param_9,
	.param .u32 Subsample_Nearest_nv12_nv12_uv_param_10,
	.param .u32 Subsample_Nearest_nv12_nv12_uv_param_11,
	.param .u32 Subsample_Nearest_nv12_nv12_uv_param_12,
	.param .f32 Subsample_Nearest_nv12_nv12_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_nv12_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_nv12_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB15_2;
	bra.uni 	$L__BB15_1;
$L__BB15_1:
	ld.param.u32 	%r7, [Subsample_Nearest_nv12_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_nv12_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_nv12_nv12_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_nv12_nv12_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Nearest_nv12_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v2.u8 	[%rd12], {%rs1, %rs2};
$L__BB15_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p_nv12
.visible .entry Subsample_Nearest_yuv444p_nv12(
	.param .u64 Subsample_Nearest_yuv444p_nv12_param_0,
	.param .u64 Subsample_Nearest_yuv444p_nv12_param_1,
	.param .u64 Subsample_Nearest_yuv444p_nv12_param_2,
	.param .u64 Subsample_Nearest_yuv444p_nv12_param_3,
	.param .u64 Subsample_Nearest_yuv444p_nv12_param_4,
	.param .u64 Subsample_Nearest_yuv444p_nv12_param_5,
	.param .u64 Subsample_Nearest_yuv444p_nv12_param_6,
	.param .u64 Subsample_Nearest_yuv444p_nv12_param_7,
	.param .u32 Subsample_Nearest_yuv444p_nv12_param_8,
	.param .u32 Subsample_Nearest_yuv444p_nv12_param_9,
	.param .u32 Subsample_Nearest_yuv444p_nv12_param_10,
	.param .u32 Subsample_Nearest_yuv444p_nv12_param_11,
	.param .u32 Subsample_Nearest_yuv444p_nv12_param_12,
	.param .f32 Subsample_Nearest_yuv444p_nv12_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB16_2;
	bra.uni 	$L__BB16_1;
$L__BB16_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv444p_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r17;
$L__BB16_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p_nv12_uv
.visible .entry Subsample_Nearest_yuv444p_nv12_uv(
	.param .u64 Subsample_Nearest_yuv444p_nv12_uv_param_0,
	.param .u64 Subsample_Nearest_yuv444p_nv12_uv_param_1,
	.param .u64 Subsample_Nearest_yuv444p_nv12_uv_param_2,
	.param .u64 Subsample_Nearest_yuv444p_nv12_uv_param_3,
	.param .u64 Subsample_Nearest_yuv444p_nv12_uv_param_4,
	.param .u64 Subsample_Nearest_yuv444p_nv12_uv_param_5,
	.param .u64 Subsample_Nearest_yuv444p_nv12_uv_param_6,
	.param .u64 Subsample_Nearest_yuv444p_nv12_uv_param_7,
	.param .u32 Subsample_Nearest_yuv444p_nv12_uv_param_8,
	.param .u32 Subsample_Nearest_yuv444p_nv12_uv_param_9,
	.param .u32 Subsample_Nearest_yuv444p_nv12_uv_param_10,
	.param .u32 Subsample_Nearest_yuv444p_nv12_uv_param_11,
	.param .u32 Subsample_Nearest_yuv444p_nv12_uv_param_12,
	.param .f32 Subsample_Nearest_yuv444p_nv12_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB17_2;
	bra.uni 	$L__BB17_1;
$L__BB17_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p_nv12_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv444p_nv12_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv444p_nv12_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs2, %r21;
	cvt.s64.s32 	%rd7, %r2;
	cvt.s64.s32 	%rd8, %r5;
	shr.u64 	%rd9, %rd8, 1;
	mul.lo.s64 	%rd10, %rd9, %rd7;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	shl.b64 	%rd13, %rd12, 1;
	add.s64 	%rd14, %rd1, %rd13;
	st.global.v2.u8 	[%rd14], {%rs1, %rs2};
$L__BB17_2:
	ret;

}
	// .globl	Subsample_Nearest_p010le_nv12
.visible .entry Subsample_Nearest_p010le_nv12(
	.param .u64 Subsample_Nearest_p010le_nv12_param_0,
	.param .u64 Subsample_Nearest_p010le_nv12_param_1,
	.param .u64 Subsample_Nearest_p010le_nv12_param_2,
	.param .u64 Subsample_Nearest_p010le_nv12_param_3,
	.param .u64 Subsample_Nearest_p010le_nv12_param_4,
	.param .u64 Subsample_Nearest_p010le_nv12_param_5,
	.param .u64 Subsample_Nearest_p010le_nv12_param_6,
	.param .u64 Subsample_Nearest_p010le_nv12_param_7,
	.param .u32 Subsample_Nearest_p010le_nv12_param_8,
	.param .u32 Subsample_Nearest_p010le_nv12_param_9,
	.param .u32 Subsample_Nearest_p010le_nv12_param_10,
	.param .u32 Subsample_Nearest_p010le_nv12_param_11,
	.param .u32 Subsample_Nearest_p010le_nv12_param_12,
	.param .f32 Subsample_Nearest_p010le_nv12_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<22>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_p010le_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p010le_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB18_2;
	bra.uni 	$L__BB18_1;
$L__BB18_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p010le_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p010le_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p010le_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p010le_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_p010le_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r21;
$L__BB18_2:
	ret;

}
	// .globl	Subsample_Nearest_p010le_nv12_uv
.visible .entry Subsample_Nearest_p010le_nv12_uv(
	.param .u64 Subsample_Nearest_p010le_nv12_uv_param_0,
	.param .u64 Subsample_Nearest_p010le_nv12_uv_param_1,
	.param .u64 Subsample_Nearest_p010le_nv12_uv_param_2,
	.param .u64 Subsample_Nearest_p010le_nv12_uv_param_3,
	.param .u64 Subsample_Nearest_p010le_nv12_uv_param_4,
	.param .u64 Subsample_Nearest_p010le_nv12_uv_param_5,
	.param .u64 Subsample_Nearest_p010le_nv12_uv_param_6,
	.param .u64 Subsample_Nearest_p010le_nv12_uv_param_7,
	.param .u32 Subsample_Nearest_p010le_nv12_uv_param_8,
	.param .u32 Subsample_Nearest_p010le_nv12_uv_param_9,
	.param .u32 Subsample_Nearest_p010le_nv12_uv_param_10,
	.param .u32 Subsample_Nearest_p010le_nv12_uv_param_11,
	.param .u32 Subsample_Nearest_p010le_nv12_uv_param_12,
	.param .f32 Subsample_Nearest_p010le_nv12_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<23>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_p010le_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p010le_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB19_2;
	bra.uni 	$L__BB19_1;
$L__BB19_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p010le_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p010le_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p010le_nv12_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p010le_nv12_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Nearest_p010le_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	cvt.u16.u32 	%rs1, %r21;
	shr.u32 	%r22, %r18, 8;
	cvt.u16.u32 	%rs2, %r22;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v2.u8 	[%rd12], {%rs1, %rs2};
$L__BB19_2:
	ret;

}
	// .globl	Subsample_Nearest_p016le_nv12
.visible .entry Subsample_Nearest_p016le_nv12(
	.param .u64 Subsample_Nearest_p016le_nv12_param_0,
	.param .u64 Subsample_Nearest_p016le_nv12_param_1,
	.param .u64 Subsample_Nearest_p016le_nv12_param_2,
	.param .u64 Subsample_Nearest_p016le_nv12_param_3,
	.param .u64 Subsample_Nearest_p016le_nv12_param_4,
	.param .u64 Subsample_Nearest_p016le_nv12_param_5,
	.param .u64 Subsample_Nearest_p016le_nv12_param_6,
	.param .u64 Subsample_Nearest_p016le_nv12_param_7,
	.param .u32 Subsample_Nearest_p016le_nv12_param_8,
	.param .u32 Subsample_Nearest_p016le_nv12_param_9,
	.param .u32 Subsample_Nearest_p016le_nv12_param_10,
	.param .u32 Subsample_Nearest_p016le_nv12_param_11,
	.param .u32 Subsample_Nearest_p016le_nv12_param_12,
	.param .f32 Subsample_Nearest_p016le_nv12_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<22>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_p016le_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p016le_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB20_2;
	bra.uni 	$L__BB20_1;
$L__BB20_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p016le_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p016le_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p016le_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p016le_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_p016le_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r21;
$L__BB20_2:
	ret;

}
	// .globl	Subsample_Nearest_p016le_nv12_uv
.visible .entry Subsample_Nearest_p016le_nv12_uv(
	.param .u64 Subsample_Nearest_p016le_nv12_uv_param_0,
	.param .u64 Subsample_Nearest_p016le_nv12_uv_param_1,
	.param .u64 Subsample_Nearest_p016le_nv12_uv_param_2,
	.param .u64 Subsample_Nearest_p016le_nv12_uv_param_3,
	.param .u64 Subsample_Nearest_p016le_nv12_uv_param_4,
	.param .u64 Subsample_Nearest_p016le_nv12_uv_param_5,
	.param .u64 Subsample_Nearest_p016le_nv12_uv_param_6,
	.param .u64 Subsample_Nearest_p016le_nv12_uv_param_7,
	.param .u32 Subsample_Nearest_p016le_nv12_uv_param_8,
	.param .u32 Subsample_Nearest_p016le_nv12_uv_param_9,
	.param .u32 Subsample_Nearest_p016le_nv12_uv_param_10,
	.param .u32 Subsample_Nearest_p016le_nv12_uv_param_11,
	.param .u32 Subsample_Nearest_p016le_nv12_uv_param_12,
	.param .f32 Subsample_Nearest_p016le_nv12_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<23>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_p016le_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p016le_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB21_2;
	bra.uni 	$L__BB21_1;
$L__BB21_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p016le_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p016le_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p016le_nv12_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p016le_nv12_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Nearest_p016le_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	cvt.u16.u32 	%rs1, %r21;
	shr.u32 	%r22, %r18, 8;
	cvt.u16.u32 	%rs2, %r22;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v2.u8 	[%rd12], {%rs1, %rs2};
$L__BB21_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p16le_nv12
.visible .entry Subsample_Nearest_yuv444p16le_nv12(
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_param_0,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_param_1,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_param_2,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_param_3,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_param_4,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_param_5,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_param_6,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_param_7,
	.param .u32 Subsample_Nearest_yuv444p16le_nv12_param_8,
	.param .u32 Subsample_Nearest_yuv444p16le_nv12_param_9,
	.param .u32 Subsample_Nearest_yuv444p16le_nv12_param_10,
	.param .u32 Subsample_Nearest_yuv444p16le_nv12_param_11,
	.param .u32 Subsample_Nearest_yuv444p16le_nv12_param_12,
	.param .f32 Subsample_Nearest_yuv444p16le_nv12_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<22>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p16le_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p16le_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB22_2;
	bra.uni 	$L__BB22_1;
$L__BB22_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p16le_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p16le_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p16le_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p16le_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv444p16le_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r21;
$L__BB22_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p16le_nv12_uv
.visible .entry Subsample_Nearest_yuv444p16le_nv12_uv(
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_uv_param_0,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_uv_param_1,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_uv_param_2,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_uv_param_3,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_uv_param_4,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_uv_param_5,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_uv_param_6,
	.param .u64 Subsample_Nearest_yuv444p16le_nv12_uv_param_7,
	.param .u32 Subsample_Nearest_yuv444p16le_nv12_uv_param_8,
	.param .u32 Subsample_Nearest_yuv444p16le_nv12_uv_param_9,
	.param .u32 Subsample_Nearest_yuv444p16le_nv12_uv_param_10,
	.param .u32 Subsample_Nearest_yuv444p16le_nv12_uv_param_11,
	.param .u32 Subsample_Nearest_yuv444p16le_nv12_uv_param_12,
	.param .f32 Subsample_Nearest_yuv444p16le_nv12_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<27>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p16le_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p16le_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB23_2;
	bra.uni 	$L__BB23_1;
$L__BB23_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p16le_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p16le_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p16le_nv12_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv444p16le_nv12_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv444p16le_nv12_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p16le_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r25, %r17, 8;
	cvt.u16.u32 	%rs1, %r25;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r26, %r21, 8;
	cvt.u16.u32 	%rs2, %r26;
	cvt.s64.s32 	%rd7, %r2;
	cvt.s64.s32 	%rd8, %r5;
	shr.u64 	%rd9, %rd8, 1;
	mul.lo.s64 	%rd10, %rd9, %rd7;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	shl.b64 	%rd13, %rd12, 1;
	add.s64 	%rd14, %rd1, %rd13;
	st.global.v2.u8 	[%rd14], {%rs1, %rs2};
$L__BB23_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv420p_yuv444p
.visible .entry Subsample_Nearest_yuv420p_yuv444p(
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_param_0,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_param_1,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_param_2,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_param_3,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_param_4,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_param_5,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_param_6,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_param_7,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p_param_8,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p_param_9,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p_param_10,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p_param_11,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p_param_12,
	.param .f32 Subsample_Nearest_yuv420p_yuv444p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv420p_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv420p_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB24_2;
	bra.uni 	$L__BB24_1;
$L__BB24_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv420p_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv420p_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv420p_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv420p_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv420p_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r17;
$L__BB24_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv420p_yuv444p_uv
.visible .entry Subsample_Nearest_yuv420p_yuv444p_uv(
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_uv_param_0,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_uv_param_1,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_uv_param_2,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_uv_param_3,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_uv_param_4,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_uv_param_5,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_uv_param_6,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p_uv_param_7,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p_uv_param_8,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p_uv_param_9,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p_uv_param_10,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p_uv_param_11,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p_uv_param_12,
	.param .f32 Subsample_Nearest_yuv420p_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<14>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv420p_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv420p_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB25_2;
	bra.uni 	$L__BB25_1;
$L__BB25_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv420p_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv420p_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv420p_yuv444p_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Nearest_yuv420p_yuv444p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Nearest_yuv420p_yuv444p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv420p_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv420p_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd9, %r2, %r5;
	cvt.s64.s32 	%rd10, %r1;
	add.s64 	%rd11, %rd9, %rd10;
	add.s64 	%rd12, %rd2, %rd11;
	st.global.u8 	[%rd12], %r17;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f1, %f2}];
	// end inline asm
	add.s64 	%rd13, %rd1, %rd11;
	st.global.u8 	[%rd13], %r21;
$L__BB25_2:
	ret;

}
	// .globl	Subsample_Nearest_nv12_yuv444p
.visible .entry Subsample_Nearest_nv12_yuv444p(
	.param .u64 Subsample_Nearest_nv12_yuv444p_param_0,
	.param .u64 Subsample_Nearest_nv12_yuv444p_param_1,
	.param .u64 Subsample_Nearest_nv12_yuv444p_param_2,
	.param .u64 Subsample_Nearest_nv12_yuv444p_param_3,
	.param .u64 Subsample_Nearest_nv12_yuv444p_param_4,
	.param .u64 Subsample_Nearest_nv12_yuv444p_param_5,
	.param .u64 Subsample_Nearest_nv12_yuv444p_param_6,
	.param .u64 Subsample_Nearest_nv12_yuv444p_param_7,
	.param .u32 Subsample_Nearest_nv12_yuv444p_param_8,
	.param .u32 Subsample_Nearest_nv12_yuv444p_param_9,
	.param .u32 Subsample_Nearest_nv12_yuv444p_param_10,
	.param .u32 Subsample_Nearest_nv12_yuv444p_param_11,
	.param .u32 Subsample_Nearest_nv12_yuv444p_param_12,
	.param .f32 Subsample_Nearest_nv12_yuv444p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_nv12_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_nv12_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB26_2;
	bra.uni 	$L__BB26_1;
$L__BB26_1:
	ld.param.u32 	%r7, [Subsample_Nearest_nv12_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_nv12_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_nv12_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_nv12_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_nv12_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r17;
$L__BB26_2:
	ret;

}
	// .globl	Subsample_Nearest_nv12_yuv444p_uv
.visible .entry Subsample_Nearest_nv12_yuv444p_uv(
	.param .u64 Subsample_Nearest_nv12_yuv444p_uv_param_0,
	.param .u64 Subsample_Nearest_nv12_yuv444p_uv_param_1,
	.param .u64 Subsample_Nearest_nv12_yuv444p_uv_param_2,
	.param .u64 Subsample_Nearest_nv12_yuv444p_uv_param_3,
	.param .u64 Subsample_Nearest_nv12_yuv444p_uv_param_4,
	.param .u64 Subsample_Nearest_nv12_yuv444p_uv_param_5,
	.param .u64 Subsample_Nearest_nv12_yuv444p_uv_param_6,
	.param .u64 Subsample_Nearest_nv12_yuv444p_uv_param_7,
	.param .u32 Subsample_Nearest_nv12_yuv444p_uv_param_8,
	.param .u32 Subsample_Nearest_nv12_yuv444p_uv_param_9,
	.param .u32 Subsample_Nearest_nv12_yuv444p_uv_param_10,
	.param .u32 Subsample_Nearest_nv12_yuv444p_uv_param_11,
	.param .u32 Subsample_Nearest_nv12_yuv444p_uv_param_12,
	.param .f32 Subsample_Nearest_nv12_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Nearest_nv12_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_nv12_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB27_2;
	bra.uni 	$L__BB27_1;
$L__BB27_1:
	ld.param.u32 	%r7, [Subsample_Nearest_nv12_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_nv12_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_nv12_yuv444p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_nv12_yuv444p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_nv12_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Nearest_nv12_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd7, %r2, %r5;
	cvt.s64.s32 	%rd8, %r1;
	add.s64 	%rd9, %rd7, %rd8;
	add.s64 	%rd10, %rd2, %rd9;
	st.global.u8 	[%rd10], %r17;
	add.s64 	%rd11, %rd1, %rd9;
	st.global.u8 	[%rd11], %r18;
$L__BB27_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p_yuv444p
.visible .entry Subsample_Nearest_yuv444p_yuv444p(
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_param_0,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_param_1,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_param_2,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_param_3,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_param_4,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_param_5,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_param_6,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_param_7,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p_param_8,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p_param_9,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p_param_10,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p_param_11,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p_param_12,
	.param .f32 Subsample_Nearest_yuv444p_yuv444p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB28_2;
	bra.uni 	$L__BB28_1;
$L__BB28_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv444p_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r17;
$L__BB28_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p_yuv444p_uv
.visible .entry Subsample_Nearest_yuv444p_yuv444p_uv(
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_uv_param_0,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_uv_param_1,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_uv_param_2,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_uv_param_3,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_uv_param_4,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_uv_param_5,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_uv_param_6,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p_uv_param_7,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p_uv_param_8,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p_uv_param_9,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p_uv_param_10,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p_uv_param_11,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p_uv_param_12,
	.param .f32 Subsample_Nearest_yuv444p_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<14>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB29_2;
	bra.uni 	$L__BB29_1;
$L__BB29_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p_yuv444p_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Nearest_yuv444p_yuv444p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Nearest_yuv444p_yuv444p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv444p_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv444p_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f1, %f2}];
	// end inline asm
	mul.wide.s32 	%rd9, %r2, %r5;
	cvt.s64.s32 	%rd10, %r1;
	add.s64 	%rd11, %rd9, %rd10;
	add.s64 	%rd12, %rd2, %rd11;
	st.global.u8 	[%rd12], %r17;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f1, %f2}];
	// end inline asm
	add.s64 	%rd13, %rd1, %rd11;
	st.global.u8 	[%rd13], %r21;
$L__BB29_2:
	ret;

}
	// .globl	Subsample_Nearest_p010le_yuv444p
.visible .entry Subsample_Nearest_p010le_yuv444p(
	.param .u64 Subsample_Nearest_p010le_yuv444p_param_0,
	.param .u64 Subsample_Nearest_p010le_yuv444p_param_1,
	.param .u64 Subsample_Nearest_p010le_yuv444p_param_2,
	.param .u64 Subsample_Nearest_p010le_yuv444p_param_3,
	.param .u64 Subsample_Nearest_p010le_yuv444p_param_4,
	.param .u64 Subsample_Nearest_p010le_yuv444p_param_5,
	.param .u64 Subsample_Nearest_p010le_yuv444p_param_6,
	.param .u64 Subsample_Nearest_p010le_yuv444p_param_7,
	.param .u32 Subsample_Nearest_p010le_yuv444p_param_8,
	.param .u32 Subsample_Nearest_p010le_yuv444p_param_9,
	.param .u32 Subsample_Nearest_p010le_yuv444p_param_10,
	.param .u32 Subsample_Nearest_p010le_yuv444p_param_11,
	.param .u32 Subsample_Nearest_p010le_yuv444p_param_12,
	.param .f32 Subsample_Nearest_p010le_yuv444p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<22>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_p010le_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p010le_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB30_2;
	bra.uni 	$L__BB30_1;
$L__BB30_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p010le_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p010le_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p010le_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p010le_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_p010le_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r21;
$L__BB30_2:
	ret;

}
	// .globl	Subsample_Nearest_p010le_yuv444p_uv
.visible .entry Subsample_Nearest_p010le_yuv444p_uv(
	.param .u64 Subsample_Nearest_p010le_yuv444p_uv_param_0,
	.param .u64 Subsample_Nearest_p010le_yuv444p_uv_param_1,
	.param .u64 Subsample_Nearest_p010le_yuv444p_uv_param_2,
	.param .u64 Subsample_Nearest_p010le_yuv444p_uv_param_3,
	.param .u64 Subsample_Nearest_p010le_yuv444p_uv_param_4,
	.param .u64 Subsample_Nearest_p010le_yuv444p_uv_param_5,
	.param .u64 Subsample_Nearest_p010le_yuv444p_uv_param_6,
	.param .u64 Subsample_Nearest_p010le_yuv444p_uv_param_7,
	.param .u32 Subsample_Nearest_p010le_yuv444p_uv_param_8,
	.param .u32 Subsample_Nearest_p010le_yuv444p_uv_param_9,
	.param .u32 Subsample_Nearest_p010le_yuv444p_uv_param_10,
	.param .u32 Subsample_Nearest_p010le_yuv444p_uv_param_11,
	.param .u32 Subsample_Nearest_p010le_yuv444p_uv_param_12,
	.param .f32 Subsample_Nearest_p010le_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<23>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Nearest_p010le_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p010le_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB31_2;
	bra.uni 	$L__BB31_1;
$L__BB31_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p010le_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p010le_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p010le_yuv444p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_p010le_yuv444p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_p010le_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Nearest_p010le_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd7, %r2, %r5;
	cvt.s64.s32 	%rd8, %r1;
	add.s64 	%rd9, %rd7, %rd8;
	add.s64 	%rd10, %rd2, %rd9;
	st.global.u8 	[%rd10], %r21;
	shr.u32 	%r22, %r18, 8;
	add.s64 	%rd11, %rd1, %rd9;
	st.global.u8 	[%rd11], %r22;
$L__BB31_2:
	ret;

}
	// .globl	Subsample_Nearest_p016le_yuv444p
.visible .entry Subsample_Nearest_p016le_yuv444p(
	.param .u64 Subsample_Nearest_p016le_yuv444p_param_0,
	.param .u64 Subsample_Nearest_p016le_yuv444p_param_1,
	.param .u64 Subsample_Nearest_p016le_yuv444p_param_2,
	.param .u64 Subsample_Nearest_p016le_yuv444p_param_3,
	.param .u64 Subsample_Nearest_p016le_yuv444p_param_4,
	.param .u64 Subsample_Nearest_p016le_yuv444p_param_5,
	.param .u64 Subsample_Nearest_p016le_yuv444p_param_6,
	.param .u64 Subsample_Nearest_p016le_yuv444p_param_7,
	.param .u32 Subsample_Nearest_p016le_yuv444p_param_8,
	.param .u32 Subsample_Nearest_p016le_yuv444p_param_9,
	.param .u32 Subsample_Nearest_p016le_yuv444p_param_10,
	.param .u32 Subsample_Nearest_p016le_yuv444p_param_11,
	.param .u32 Subsample_Nearest_p016le_yuv444p_param_12,
	.param .f32 Subsample_Nearest_p016le_yuv444p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<22>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_p016le_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p016le_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB32_2;
	bra.uni 	$L__BB32_1;
$L__BB32_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p016le_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p016le_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p016le_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p016le_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_p016le_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r21;
$L__BB32_2:
	ret;

}
	// .globl	Subsample_Nearest_p016le_yuv444p_uv
.visible .entry Subsample_Nearest_p016le_yuv444p_uv(
	.param .u64 Subsample_Nearest_p016le_yuv444p_uv_param_0,
	.param .u64 Subsample_Nearest_p016le_yuv444p_uv_param_1,
	.param .u64 Subsample_Nearest_p016le_yuv444p_uv_param_2,
	.param .u64 Subsample_Nearest_p016le_yuv444p_uv_param_3,
	.param .u64 Subsample_Nearest_p016le_yuv444p_uv_param_4,
	.param .u64 Subsample_Nearest_p016le_yuv444p_uv_param_5,
	.param .u64 Subsample_Nearest_p016le_yuv444p_uv_param_6,
	.param .u64 Subsample_Nearest_p016le_yuv444p_uv_param_7,
	.param .u32 Subsample_Nearest_p016le_yuv444p_uv_param_8,
	.param .u32 Subsample_Nearest_p016le_yuv444p_uv_param_9,
	.param .u32 Subsample_Nearest_p016le_yuv444p_uv_param_10,
	.param .u32 Subsample_Nearest_p016le_yuv444p_uv_param_11,
	.param .u32 Subsample_Nearest_p016le_yuv444p_uv_param_12,
	.param .f32 Subsample_Nearest_p016le_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<23>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Nearest_p016le_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p016le_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB33_2;
	bra.uni 	$L__BB33_1;
$L__BB33_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p016le_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p016le_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p016le_yuv444p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_p016le_yuv444p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_p016le_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Nearest_p016le_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd7, %r2, %r5;
	cvt.s64.s32 	%rd8, %r1;
	add.s64 	%rd9, %rd7, %rd8;
	add.s64 	%rd10, %rd2, %rd9;
	st.global.u8 	[%rd10], %r21;
	shr.u32 	%r22, %r18, 8;
	add.s64 	%rd11, %rd1, %rd9;
	st.global.u8 	[%rd11], %r22;
$L__BB33_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p16le_yuv444p
.visible .entry Subsample_Nearest_yuv444p16le_yuv444p(
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_param_0,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_param_1,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_param_2,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_param_3,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_param_4,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_param_5,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_param_6,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_param_7,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p_param_8,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p_param_9,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p_param_10,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p_param_11,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p_param_12,
	.param .f32 Subsample_Nearest_yuv444p16le_yuv444p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<22>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<9>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p16le_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p16le_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB34_2;
	bra.uni 	$L__BB34_1;
$L__BB34_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p16le_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p16le_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p16le_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p16le_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv444p16le_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r21, %r17, 8;
	mul.wide.s32 	%rd5, %r2, %r5;
	cvt.s64.s32 	%rd6, %r1;
	add.s64 	%rd7, %rd5, %rd6;
	add.s64 	%rd8, %rd1, %rd7;
	st.global.u8 	[%rd8], %r21;
$L__BB34_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p16le_yuv444p_uv
.visible .entry Subsample_Nearest_yuv444p16le_yuv444p_uv(
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_0,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_1,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_2,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_3,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_4,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_5,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_6,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_7,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_8,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_9,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_10,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_11,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_12,
	.param .f32 Subsample_Nearest_yuv444p16le_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<27>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<14>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p16le_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p16le_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB35_2;
	bra.uni 	$L__BB35_1;
$L__BB35_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p16le_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p16le_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p16le_yuv444p_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Nearest_yuv444p16le_yuv444p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Nearest_yuv444p16le_yuv444p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv444p16le_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv444p16le_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r25, %r17, 8;
	mul.wide.s32 	%rd9, %r2, %r5;
	cvt.s64.s32 	%rd10, %r1;
	add.s64 	%rd11, %rd9, %rd10;
	add.s64 	%rd12, %rd2, %rd11;
	st.global.u8 	[%rd12], %r25;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f1, %f2}];
	// end inline asm
	shr.u32 	%r26, %r21, 8;
	add.s64 	%rd13, %rd1, %rd11;
	st.global.u8 	[%rd13], %r26;
$L__BB35_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv420p_p010le
.visible .entry Subsample_Nearest_yuv420p_p010le(
	.param .u64 Subsample_Nearest_yuv420p_p010le_param_0,
	.param .u64 Subsample_Nearest_yuv420p_p010le_param_1,
	.param .u64 Subsample_Nearest_yuv420p_p010le_param_2,
	.param .u64 Subsample_Nearest_yuv420p_p010le_param_3,
	.param .u64 Subsample_Nearest_yuv420p_p010le_param_4,
	.param .u64 Subsample_Nearest_yuv420p_p010le_param_5,
	.param .u64 Subsample_Nearest_yuv420p_p010le_param_6,
	.param .u64 Subsample_Nearest_yuv420p_p010le_param_7,
	.param .u32 Subsample_Nearest_yuv420p_p010le_param_8,
	.param .u32 Subsample_Nearest_yuv420p_p010le_param_9,
	.param .u32 Subsample_Nearest_yuv420p_p010le_param_10,
	.param .u32 Subsample_Nearest_yuv420p_p010le_param_11,
	.param .u32 Subsample_Nearest_yuv420p_p010le_param_12,
	.param .f32 Subsample_Nearest_yuv420p_p010le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv420p_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv420p_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB36_2;
	bra.uni 	$L__BB36_1;
$L__BB36_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv420p_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv420p_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv420p_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv420p_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv420p_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	and.b16  	%rs4, %rs3, -64;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs4;
$L__BB36_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv420p_p010le_uv
.visible .entry Subsample_Nearest_yuv420p_p010le_uv(
	.param .u64 Subsample_Nearest_yuv420p_p010le_uv_param_0,
	.param .u64 Subsample_Nearest_yuv420p_p010le_uv_param_1,
	.param .u64 Subsample_Nearest_yuv420p_p010le_uv_param_2,
	.param .u64 Subsample_Nearest_yuv420p_p010le_uv_param_3,
	.param .u64 Subsample_Nearest_yuv420p_p010le_uv_param_4,
	.param .u64 Subsample_Nearest_yuv420p_p010le_uv_param_5,
	.param .u64 Subsample_Nearest_yuv420p_p010le_uv_param_6,
	.param .u64 Subsample_Nearest_yuv420p_p010le_uv_param_7,
	.param .u32 Subsample_Nearest_yuv420p_p010le_uv_param_8,
	.param .u32 Subsample_Nearest_yuv420p_p010le_uv_param_9,
	.param .u32 Subsample_Nearest_yuv420p_p010le_uv_param_10,
	.param .u32 Subsample_Nearest_yuv420p_p010le_uv_param_11,
	.param .u32 Subsample_Nearest_yuv420p_p010le_uv_param_12,
	.param .f32 Subsample_Nearest_yuv420p_p010le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv420p_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv420p_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB37_2;
	bra.uni 	$L__BB37_1;
$L__BB37_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv420p_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv420p_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv420p_p010le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv420p_p010le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv420p_p010le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv420p_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	and.b16  	%rs4, %rs3, -64;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs5, %r21;
	and.b16  	%rs6, %rs5, 255;
	mul.lo.s16 	%rs7, %rs6, 257;
	and.b16  	%rs8, %rs7, -64;
	cvt.s64.s32 	%rd7, %r2;
	cvt.s64.s32 	%rd8, %r5;
	shr.u64 	%rd9, %rd8, 2;
	mul.lo.s64 	%rd10, %rd9, %rd7;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	shl.b64 	%rd13, %rd12, 2;
	add.s64 	%rd14, %rd1, %rd13;
	st.global.v2.u16 	[%rd14], {%rs4, %rs8};
$L__BB37_2:
	ret;

}
	// .globl	Subsample_Nearest_nv12_p010le
.visible .entry Subsample_Nearest_nv12_p010le(
	.param .u64 Subsample_Nearest_nv12_p010le_param_0,
	.param .u64 Subsample_Nearest_nv12_p010le_param_1,
	.param .u64 Subsample_Nearest_nv12_p010le_param_2,
	.param .u64 Subsample_Nearest_nv12_p010le_param_3,
	.param .u64 Subsample_Nearest_nv12_p010le_param_4,
	.param .u64 Subsample_Nearest_nv12_p010le_param_5,
	.param .u64 Subsample_Nearest_nv12_p010le_param_6,
	.param .u64 Subsample_Nearest_nv12_p010le_param_7,
	.param .u32 Subsample_Nearest_nv12_p010le_param_8,
	.param .u32 Subsample_Nearest_nv12_p010le_param_9,
	.param .u32 Subsample_Nearest_nv12_p010le_param_10,
	.param .u32 Subsample_Nearest_nv12_p010le_param_11,
	.param .u32 Subsample_Nearest_nv12_p010le_param_12,
	.param .f32 Subsample_Nearest_nv12_p010le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_nv12_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_nv12_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB38_2;
	bra.uni 	$L__BB38_1;
$L__BB38_1:
	ld.param.u32 	%r7, [Subsample_Nearest_nv12_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_nv12_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_nv12_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_nv12_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_nv12_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	and.b16  	%rs4, %rs3, -64;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs4;
$L__BB38_2:
	ret;

}
	// .globl	Subsample_Nearest_nv12_p010le_uv
.visible .entry Subsample_Nearest_nv12_p010le_uv(
	.param .u64 Subsample_Nearest_nv12_p010le_uv_param_0,
	.param .u64 Subsample_Nearest_nv12_p010le_uv_param_1,
	.param .u64 Subsample_Nearest_nv12_p010le_uv_param_2,
	.param .u64 Subsample_Nearest_nv12_p010le_uv_param_3,
	.param .u64 Subsample_Nearest_nv12_p010le_uv_param_4,
	.param .u64 Subsample_Nearest_nv12_p010le_uv_param_5,
	.param .u64 Subsample_Nearest_nv12_p010le_uv_param_6,
	.param .u64 Subsample_Nearest_nv12_p010le_uv_param_7,
	.param .u32 Subsample_Nearest_nv12_p010le_uv_param_8,
	.param .u32 Subsample_Nearest_nv12_p010le_uv_param_9,
	.param .u32 Subsample_Nearest_nv12_p010le_uv_param_10,
	.param .u32 Subsample_Nearest_nv12_p010le_uv_param_11,
	.param .u32 Subsample_Nearest_nv12_p010le_uv_param_12,
	.param .f32 Subsample_Nearest_nv12_p010le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_nv12_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_nv12_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB39_2;
	bra.uni 	$L__BB39_1;
$L__BB39_1:
	ld.param.u32 	%r7, [Subsample_Nearest_nv12_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_nv12_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_nv12_p010le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_nv12_p010le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Nearest_nv12_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	and.b16  	%rs3, %rs1, 255;
	mul.lo.s16 	%rs4, %rs3, 257;
	and.b16  	%rs5, %rs4, -64;
	and.b16  	%rs6, %rs2, 255;
	mul.lo.s16 	%rs7, %rs6, 257;
	and.b16  	%rs8, %rs7, -64;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 2;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 2;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v2.u16 	[%rd12], {%rs5, %rs8};
$L__BB39_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p_p010le
.visible .entry Subsample_Nearest_yuv444p_p010le(
	.param .u64 Subsample_Nearest_yuv444p_p010le_param_0,
	.param .u64 Subsample_Nearest_yuv444p_p010le_param_1,
	.param .u64 Subsample_Nearest_yuv444p_p010le_param_2,
	.param .u64 Subsample_Nearest_yuv444p_p010le_param_3,
	.param .u64 Subsample_Nearest_yuv444p_p010le_param_4,
	.param .u64 Subsample_Nearest_yuv444p_p010le_param_5,
	.param .u64 Subsample_Nearest_yuv444p_p010le_param_6,
	.param .u64 Subsample_Nearest_yuv444p_p010le_param_7,
	.param .u32 Subsample_Nearest_yuv444p_p010le_param_8,
	.param .u32 Subsample_Nearest_yuv444p_p010le_param_9,
	.param .u32 Subsample_Nearest_yuv444p_p010le_param_10,
	.param .u32 Subsample_Nearest_yuv444p_p010le_param_11,
	.param .u32 Subsample_Nearest_yuv444p_p010le_param_12,
	.param .f32 Subsample_Nearest_yuv444p_p010le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB40_2;
	bra.uni 	$L__BB40_1;
$L__BB40_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv444p_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	and.b16  	%rs4, %rs3, -64;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs4;
$L__BB40_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p_p010le_uv
.visible .entry Subsample_Nearest_yuv444p_p010le_uv(
	.param .u64 Subsample_Nearest_yuv444p_p010le_uv_param_0,
	.param .u64 Subsample_Nearest_yuv444p_p010le_uv_param_1,
	.param .u64 Subsample_Nearest_yuv444p_p010le_uv_param_2,
	.param .u64 Subsample_Nearest_yuv444p_p010le_uv_param_3,
	.param .u64 Subsample_Nearest_yuv444p_p010le_uv_param_4,
	.param .u64 Subsample_Nearest_yuv444p_p010le_uv_param_5,
	.param .u64 Subsample_Nearest_yuv444p_p010le_uv_param_6,
	.param .u64 Subsample_Nearest_yuv444p_p010le_uv_param_7,
	.param .u32 Subsample_Nearest_yuv444p_p010le_uv_param_8,
	.param .u32 Subsample_Nearest_yuv444p_p010le_uv_param_9,
	.param .u32 Subsample_Nearest_yuv444p_p010le_uv_param_10,
	.param .u32 Subsample_Nearest_yuv444p_p010le_uv_param_11,
	.param .u32 Subsample_Nearest_yuv444p_p010le_uv_param_12,
	.param .f32 Subsample_Nearest_yuv444p_p010le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB41_2;
	bra.uni 	$L__BB41_1;
$L__BB41_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p_p010le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv444p_p010le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv444p_p010le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	and.b16  	%rs4, %rs3, -64;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs5, %r21;
	and.b16  	%rs6, %rs5, 255;
	mul.lo.s16 	%rs7, %rs6, 257;
	and.b16  	%rs8, %rs7, -64;
	cvt.s64.s32 	%rd7, %r2;
	cvt.s64.s32 	%rd8, %r5;
	shr.u64 	%rd9, %rd8, 2;
	mul.lo.s64 	%rd10, %rd9, %rd7;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	shl.b64 	%rd13, %rd12, 2;
	add.s64 	%rd14, %rd1, %rd13;
	st.global.v2.u16 	[%rd14], {%rs4, %rs8};
$L__BB41_2:
	ret;

}
	// .globl	Subsample_Nearest_p010le_p010le
.visible .entry Subsample_Nearest_p010le_p010le(
	.param .u64 Subsample_Nearest_p010le_p010le_param_0,
	.param .u64 Subsample_Nearest_p010le_p010le_param_1,
	.param .u64 Subsample_Nearest_p010le_p010le_param_2,
	.param .u64 Subsample_Nearest_p010le_p010le_param_3,
	.param .u64 Subsample_Nearest_p010le_p010le_param_4,
	.param .u64 Subsample_Nearest_p010le_p010le_param_5,
	.param .u64 Subsample_Nearest_p010le_p010le_param_6,
	.param .u64 Subsample_Nearest_p010le_p010le_param_7,
	.param .u32 Subsample_Nearest_p010le_p010le_param_8,
	.param .u32 Subsample_Nearest_p010le_p010le_param_9,
	.param .u32 Subsample_Nearest_p010le_p010le_param_10,
	.param .u32 Subsample_Nearest_p010le_p010le_param_11,
	.param .u32 Subsample_Nearest_p010le_p010le_param_12,
	.param .f32 Subsample_Nearest_p010le_p010le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_p010le_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p010le_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB42_2;
	bra.uni 	$L__BB42_1;
$L__BB42_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p010le_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p010le_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p010le_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p010le_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_p010le_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %r17;
$L__BB42_2:
	ret;

}
	// .globl	Subsample_Nearest_p010le_p010le_uv
.visible .entry Subsample_Nearest_p010le_p010le_uv(
	.param .u64 Subsample_Nearest_p010le_p010le_uv_param_0,
	.param .u64 Subsample_Nearest_p010le_p010le_uv_param_1,
	.param .u64 Subsample_Nearest_p010le_p010le_uv_param_2,
	.param .u64 Subsample_Nearest_p010le_p010le_uv_param_3,
	.param .u64 Subsample_Nearest_p010le_p010le_uv_param_4,
	.param .u64 Subsample_Nearest_p010le_p010le_uv_param_5,
	.param .u64 Subsample_Nearest_p010le_p010le_uv_param_6,
	.param .u64 Subsample_Nearest_p010le_p010le_uv_param_7,
	.param .u32 Subsample_Nearest_p010le_p010le_uv_param_8,
	.param .u32 Subsample_Nearest_p010le_p010le_uv_param_9,
	.param .u32 Subsample_Nearest_p010le_p010le_uv_param_10,
	.param .u32 Subsample_Nearest_p010le_p010le_uv_param_11,
	.param .u32 Subsample_Nearest_p010le_p010le_uv_param_12,
	.param .f32 Subsample_Nearest_p010le_p010le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_p010le_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p010le_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB43_2;
	bra.uni 	$L__BB43_1;
$L__BB43_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p010le_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p010le_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p010le_p010le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p010le_p010le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Nearest_p010le_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 2;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 2;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v2.u16 	[%rd12], {%rs1, %rs2};
$L__BB43_2:
	ret;

}
	// .globl	Subsample_Nearest_p016le_p010le
.visible .entry Subsample_Nearest_p016le_p010le(
	.param .u64 Subsample_Nearest_p016le_p010le_param_0,
	.param .u64 Subsample_Nearest_p016le_p010le_param_1,
	.param .u64 Subsample_Nearest_p016le_p010le_param_2,
	.param .u64 Subsample_Nearest_p016le_p010le_param_3,
	.param .u64 Subsample_Nearest_p016le_p010le_param_4,
	.param .u64 Subsample_Nearest_p016le_p010le_param_5,
	.param .u64 Subsample_Nearest_p016le_p010le_param_6,
	.param .u64 Subsample_Nearest_p016le_p010le_param_7,
	.param .u32 Subsample_Nearest_p016le_p010le_param_8,
	.param .u32 Subsample_Nearest_p016le_p010le_param_9,
	.param .u32 Subsample_Nearest_p016le_p010le_param_10,
	.param .u32 Subsample_Nearest_p016le_p010le_param_11,
	.param .u32 Subsample_Nearest_p016le_p010le_param_12,
	.param .f32 Subsample_Nearest_p016le_p010le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_p016le_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p016le_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB44_2;
	bra.uni 	$L__BB44_1;
$L__BB44_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p016le_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p016le_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p016le_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p016le_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_p016le_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, -64;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs2;
$L__BB44_2:
	ret;

}
	// .globl	Subsample_Nearest_p016le_p010le_uv
.visible .entry Subsample_Nearest_p016le_p010le_uv(
	.param .u64 Subsample_Nearest_p016le_p010le_uv_param_0,
	.param .u64 Subsample_Nearest_p016le_p010le_uv_param_1,
	.param .u64 Subsample_Nearest_p016le_p010le_uv_param_2,
	.param .u64 Subsample_Nearest_p016le_p010le_uv_param_3,
	.param .u64 Subsample_Nearest_p016le_p010le_uv_param_4,
	.param .u64 Subsample_Nearest_p016le_p010le_uv_param_5,
	.param .u64 Subsample_Nearest_p016le_p010le_uv_param_6,
	.param .u64 Subsample_Nearest_p016le_p010le_uv_param_7,
	.param .u32 Subsample_Nearest_p016le_p010le_uv_param_8,
	.param .u32 Subsample_Nearest_p016le_p010le_uv_param_9,
	.param .u32 Subsample_Nearest_p016le_p010le_uv_param_10,
	.param .u32 Subsample_Nearest_p016le_p010le_uv_param_11,
	.param .u32 Subsample_Nearest_p016le_p010le_uv_param_12,
	.param .f32 Subsample_Nearest_p016le_p010le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_p016le_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p016le_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB45_2;
	bra.uni 	$L__BB45_1;
$L__BB45_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p016le_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p016le_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p016le_p010le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p016le_p010le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Nearest_p016le_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	and.b16  	%rs3, %rs1, -64;
	and.b16  	%rs4, %rs2, -64;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 2;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 2;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v2.u16 	[%rd12], {%rs3, %rs4};
$L__BB45_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p16le_p010le
.visible .entry Subsample_Nearest_yuv444p16le_p010le(
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_param_0,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_param_1,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_param_2,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_param_3,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_param_4,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_param_5,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_param_6,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_param_7,
	.param .u32 Subsample_Nearest_yuv444p16le_p010le_param_8,
	.param .u32 Subsample_Nearest_yuv444p16le_p010le_param_9,
	.param .u32 Subsample_Nearest_yuv444p16le_p010le_param_10,
	.param .u32 Subsample_Nearest_yuv444p16le_p010le_param_11,
	.param .u32 Subsample_Nearest_yuv444p16le_p010le_param_12,
	.param .f32 Subsample_Nearest_yuv444p16le_p010le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p16le_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p16le_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB46_2;
	bra.uni 	$L__BB46_1;
$L__BB46_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p16le_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p16le_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p16le_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p16le_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv444p16le_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, -64;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs2;
$L__BB46_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p16le_p010le_uv
.visible .entry Subsample_Nearest_yuv444p16le_p010le_uv(
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_uv_param_0,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_uv_param_1,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_uv_param_2,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_uv_param_3,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_uv_param_4,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_uv_param_5,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_uv_param_6,
	.param .u64 Subsample_Nearest_yuv444p16le_p010le_uv_param_7,
	.param .u32 Subsample_Nearest_yuv444p16le_p010le_uv_param_8,
	.param .u32 Subsample_Nearest_yuv444p16le_p010le_uv_param_9,
	.param .u32 Subsample_Nearest_yuv444p16le_p010le_uv_param_10,
	.param .u32 Subsample_Nearest_yuv444p16le_p010le_uv_param_11,
	.param .u32 Subsample_Nearest_yuv444p16le_p010le_uv_param_12,
	.param .f32 Subsample_Nearest_yuv444p16le_p010le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p16le_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p16le_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB47_2;
	bra.uni 	$L__BB47_1;
$L__BB47_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p16le_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p16le_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p16le_p010le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv444p16le_p010le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv444p16le_p010le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p16le_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, -64;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs3, %r21;
	and.b16  	%rs4, %rs3, -64;
	cvt.s64.s32 	%rd7, %r2;
	cvt.s64.s32 	%rd8, %r5;
	shr.u64 	%rd9, %rd8, 2;
	mul.lo.s64 	%rd10, %rd9, %rd7;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	shl.b64 	%rd13, %rd12, 2;
	add.s64 	%rd14, %rd1, %rd13;
	st.global.v2.u16 	[%rd14], {%rs2, %rs4};
$L__BB47_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv420p_p016le
.visible .entry Subsample_Nearest_yuv420p_p016le(
	.param .u64 Subsample_Nearest_yuv420p_p016le_param_0,
	.param .u64 Subsample_Nearest_yuv420p_p016le_param_1,
	.param .u64 Subsample_Nearest_yuv420p_p016le_param_2,
	.param .u64 Subsample_Nearest_yuv420p_p016le_param_3,
	.param .u64 Subsample_Nearest_yuv420p_p016le_param_4,
	.param .u64 Subsample_Nearest_yuv420p_p016le_param_5,
	.param .u64 Subsample_Nearest_yuv420p_p016le_param_6,
	.param .u64 Subsample_Nearest_yuv420p_p016le_param_7,
	.param .u32 Subsample_Nearest_yuv420p_p016le_param_8,
	.param .u32 Subsample_Nearest_yuv420p_p016le_param_9,
	.param .u32 Subsample_Nearest_yuv420p_p016le_param_10,
	.param .u32 Subsample_Nearest_yuv420p_p016le_param_11,
	.param .u32 Subsample_Nearest_yuv420p_p016le_param_12,
	.param .f32 Subsample_Nearest_yuv420p_p016le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv420p_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv420p_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB48_2;
	bra.uni 	$L__BB48_1;
$L__BB48_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv420p_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv420p_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv420p_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv420p_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv420p_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs3;
$L__BB48_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv420p_p016le_uv
.visible .entry Subsample_Nearest_yuv420p_p016le_uv(
	.param .u64 Subsample_Nearest_yuv420p_p016le_uv_param_0,
	.param .u64 Subsample_Nearest_yuv420p_p016le_uv_param_1,
	.param .u64 Subsample_Nearest_yuv420p_p016le_uv_param_2,
	.param .u64 Subsample_Nearest_yuv420p_p016le_uv_param_3,
	.param .u64 Subsample_Nearest_yuv420p_p016le_uv_param_4,
	.param .u64 Subsample_Nearest_yuv420p_p016le_uv_param_5,
	.param .u64 Subsample_Nearest_yuv420p_p016le_uv_param_6,
	.param .u64 Subsample_Nearest_yuv420p_p016le_uv_param_7,
	.param .u32 Subsample_Nearest_yuv420p_p016le_uv_param_8,
	.param .u32 Subsample_Nearest_yuv420p_p016le_uv_param_9,
	.param .u32 Subsample_Nearest_yuv420p_p016le_uv_param_10,
	.param .u32 Subsample_Nearest_yuv420p_p016le_uv_param_11,
	.param .u32 Subsample_Nearest_yuv420p_p016le_uv_param_12,
	.param .f32 Subsample_Nearest_yuv420p_p016le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv420p_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv420p_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB49_2;
	bra.uni 	$L__BB49_1;
$L__BB49_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv420p_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv420p_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv420p_p016le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv420p_p016le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv420p_p016le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv420p_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs4, %r21;
	and.b16  	%rs5, %rs4, 255;
	mul.lo.s16 	%rs6, %rs5, 257;
	cvt.s64.s32 	%rd7, %r2;
	cvt.s64.s32 	%rd8, %r5;
	shr.u64 	%rd9, %rd8, 2;
	mul.lo.s64 	%rd10, %rd9, %rd7;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	shl.b64 	%rd13, %rd12, 2;
	add.s64 	%rd14, %rd1, %rd13;
	st.global.v2.u16 	[%rd14], {%rs3, %rs6};
$L__BB49_2:
	ret;

}
	// .globl	Subsample_Nearest_nv12_p016le
.visible .entry Subsample_Nearest_nv12_p016le(
	.param .u64 Subsample_Nearest_nv12_p016le_param_0,
	.param .u64 Subsample_Nearest_nv12_p016le_param_1,
	.param .u64 Subsample_Nearest_nv12_p016le_param_2,
	.param .u64 Subsample_Nearest_nv12_p016le_param_3,
	.param .u64 Subsample_Nearest_nv12_p016le_param_4,
	.param .u64 Subsample_Nearest_nv12_p016le_param_5,
	.param .u64 Subsample_Nearest_nv12_p016le_param_6,
	.param .u64 Subsample_Nearest_nv12_p016le_param_7,
	.param .u32 Subsample_Nearest_nv12_p016le_param_8,
	.param .u32 Subsample_Nearest_nv12_p016le_param_9,
	.param .u32 Subsample_Nearest_nv12_p016le_param_10,
	.param .u32 Subsample_Nearest_nv12_p016le_param_11,
	.param .u32 Subsample_Nearest_nv12_p016le_param_12,
	.param .f32 Subsample_Nearest_nv12_p016le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_nv12_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_nv12_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB50_2;
	bra.uni 	$L__BB50_1;
$L__BB50_1:
	ld.param.u32 	%r7, [Subsample_Nearest_nv12_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_nv12_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_nv12_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_nv12_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_nv12_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs3;
$L__BB50_2:
	ret;

}
	// .globl	Subsample_Nearest_nv12_p016le_uv
.visible .entry Subsample_Nearest_nv12_p016le_uv(
	.param .u64 Subsample_Nearest_nv12_p016le_uv_param_0,
	.param .u64 Subsample_Nearest_nv12_p016le_uv_param_1,
	.param .u64 Subsample_Nearest_nv12_p016le_uv_param_2,
	.param .u64 Subsample_Nearest_nv12_p016le_uv_param_3,
	.param .u64 Subsample_Nearest_nv12_p016le_uv_param_4,
	.param .u64 Subsample_Nearest_nv12_p016le_uv_param_5,
	.param .u64 Subsample_Nearest_nv12_p016le_uv_param_6,
	.param .u64 Subsample_Nearest_nv12_p016le_uv_param_7,
	.param .u32 Subsample_Nearest_nv12_p016le_uv_param_8,
	.param .u32 Subsample_Nearest_nv12_p016le_uv_param_9,
	.param .u32 Subsample_Nearest_nv12_p016le_uv_param_10,
	.param .u32 Subsample_Nearest_nv12_p016le_uv_param_11,
	.param .u32 Subsample_Nearest_nv12_p016le_uv_param_12,
	.param .f32 Subsample_Nearest_nv12_p016le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_nv12_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_nv12_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB51_2;
	bra.uni 	$L__BB51_1;
$L__BB51_1:
	ld.param.u32 	%r7, [Subsample_Nearest_nv12_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_nv12_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_nv12_p016le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_nv12_p016le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Nearest_nv12_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	and.b16  	%rs3, %rs1, 255;
	mul.lo.s16 	%rs4, %rs3, 257;
	and.b16  	%rs5, %rs2, 255;
	mul.lo.s16 	%rs6, %rs5, 257;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 2;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 2;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v2.u16 	[%rd12], {%rs4, %rs6};
$L__BB51_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p_p016le
.visible .entry Subsample_Nearest_yuv444p_p016le(
	.param .u64 Subsample_Nearest_yuv444p_p016le_param_0,
	.param .u64 Subsample_Nearest_yuv444p_p016le_param_1,
	.param .u64 Subsample_Nearest_yuv444p_p016le_param_2,
	.param .u64 Subsample_Nearest_yuv444p_p016le_param_3,
	.param .u64 Subsample_Nearest_yuv444p_p016le_param_4,
	.param .u64 Subsample_Nearest_yuv444p_p016le_param_5,
	.param .u64 Subsample_Nearest_yuv444p_p016le_param_6,
	.param .u64 Subsample_Nearest_yuv444p_p016le_param_7,
	.param .u32 Subsample_Nearest_yuv444p_p016le_param_8,
	.param .u32 Subsample_Nearest_yuv444p_p016le_param_9,
	.param .u32 Subsample_Nearest_yuv444p_p016le_param_10,
	.param .u32 Subsample_Nearest_yuv444p_p016le_param_11,
	.param .u32 Subsample_Nearest_yuv444p_p016le_param_12,
	.param .f32 Subsample_Nearest_yuv444p_p016le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB52_2;
	bra.uni 	$L__BB52_1;
$L__BB52_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv444p_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs3;
$L__BB52_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p_p016le_uv
.visible .entry Subsample_Nearest_yuv444p_p016le_uv(
	.param .u64 Subsample_Nearest_yuv444p_p016le_uv_param_0,
	.param .u64 Subsample_Nearest_yuv444p_p016le_uv_param_1,
	.param .u64 Subsample_Nearest_yuv444p_p016le_uv_param_2,
	.param .u64 Subsample_Nearest_yuv444p_p016le_uv_param_3,
	.param .u64 Subsample_Nearest_yuv444p_p016le_uv_param_4,
	.param .u64 Subsample_Nearest_yuv444p_p016le_uv_param_5,
	.param .u64 Subsample_Nearest_yuv444p_p016le_uv_param_6,
	.param .u64 Subsample_Nearest_yuv444p_p016le_uv_param_7,
	.param .u32 Subsample_Nearest_yuv444p_p016le_uv_param_8,
	.param .u32 Subsample_Nearest_yuv444p_p016le_uv_param_9,
	.param .u32 Subsample_Nearest_yuv444p_p016le_uv_param_10,
	.param .u32 Subsample_Nearest_yuv444p_p016le_uv_param_11,
	.param .u32 Subsample_Nearest_yuv444p_p016le_uv_param_12,
	.param .f32 Subsample_Nearest_yuv444p_p016le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB53_2;
	bra.uni 	$L__BB53_1;
$L__BB53_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p_p016le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv444p_p016le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv444p_p016le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs4, %r21;
	and.b16  	%rs5, %rs4, 255;
	mul.lo.s16 	%rs6, %rs5, 257;
	cvt.s64.s32 	%rd7, %r2;
	cvt.s64.s32 	%rd8, %r5;
	shr.u64 	%rd9, %rd8, 2;
	mul.lo.s64 	%rd10, %rd9, %rd7;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	shl.b64 	%rd13, %rd12, 2;
	add.s64 	%rd14, %rd1, %rd13;
	st.global.v2.u16 	[%rd14], {%rs3, %rs6};
$L__BB53_2:
	ret;

}
	// .globl	Subsample_Nearest_p010le_p016le
.visible .entry Subsample_Nearest_p010le_p016le(
	.param .u64 Subsample_Nearest_p010le_p016le_param_0,
	.param .u64 Subsample_Nearest_p010le_p016le_param_1,
	.param .u64 Subsample_Nearest_p010le_p016le_param_2,
	.param .u64 Subsample_Nearest_p010le_p016le_param_3,
	.param .u64 Subsample_Nearest_p010le_p016le_param_4,
	.param .u64 Subsample_Nearest_p010le_p016le_param_5,
	.param .u64 Subsample_Nearest_p010le_p016le_param_6,
	.param .u64 Subsample_Nearest_p010le_p016le_param_7,
	.param .u32 Subsample_Nearest_p010le_p016le_param_8,
	.param .u32 Subsample_Nearest_p010le_p016le_param_9,
	.param .u32 Subsample_Nearest_p010le_p016le_param_10,
	.param .u32 Subsample_Nearest_p010le_p016le_param_11,
	.param .u32 Subsample_Nearest_p010le_p016le_param_12,
	.param .f32 Subsample_Nearest_p010le_p016le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_p010le_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p010le_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB54_2;
	bra.uni 	$L__BB54_1;
$L__BB54_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p010le_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p010le_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p010le_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p010le_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_p010le_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	shr.u16 	%rs2, %rs1, 10;
	or.b16  	%rs3, %rs2, %rs1;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs3;
$L__BB54_2:
	ret;

}
	// .globl	Subsample_Nearest_p010le_p016le_uv
.visible .entry Subsample_Nearest_p010le_p016le_uv(
	.param .u64 Subsample_Nearest_p010le_p016le_uv_param_0,
	.param .u64 Subsample_Nearest_p010le_p016le_uv_param_1,
	.param .u64 Subsample_Nearest_p010le_p016le_uv_param_2,
	.param .u64 Subsample_Nearest_p010le_p016le_uv_param_3,
	.param .u64 Subsample_Nearest_p010le_p016le_uv_param_4,
	.param .u64 Subsample_Nearest_p010le_p016le_uv_param_5,
	.param .u64 Subsample_Nearest_p010le_p016le_uv_param_6,
	.param .u64 Subsample_Nearest_p010le_p016le_uv_param_7,
	.param .u32 Subsample_Nearest_p010le_p016le_uv_param_8,
	.param .u32 Subsample_Nearest_p010le_p016le_uv_param_9,
	.param .u32 Subsample_Nearest_p010le_p016le_uv_param_10,
	.param .u32 Subsample_Nearest_p010le_p016le_uv_param_11,
	.param .u32 Subsample_Nearest_p010le_p016le_uv_param_12,
	.param .f32 Subsample_Nearest_p010le_p016le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_p010le_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p010le_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB55_2;
	bra.uni 	$L__BB55_1;
$L__BB55_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p010le_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p010le_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p010le_p016le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p010le_p016le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Nearest_p010le_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	shr.u16 	%rs3, %rs1, 10;
	or.b16  	%rs4, %rs3, %rs1;
	shr.u16 	%rs5, %rs2, 10;
	or.b16  	%rs6, %rs5, %rs2;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 2;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 2;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v2.u16 	[%rd12], {%rs4, %rs6};
$L__BB55_2:
	ret;

}
	// .globl	Subsample_Nearest_p016le_p016le
.visible .entry Subsample_Nearest_p016le_p016le(
	.param .u64 Subsample_Nearest_p016le_p016le_param_0,
	.param .u64 Subsample_Nearest_p016le_p016le_param_1,
	.param .u64 Subsample_Nearest_p016le_p016le_param_2,
	.param .u64 Subsample_Nearest_p016le_p016le_param_3,
	.param .u64 Subsample_Nearest_p016le_p016le_param_4,
	.param .u64 Subsample_Nearest_p016le_p016le_param_5,
	.param .u64 Subsample_Nearest_p016le_p016le_param_6,
	.param .u64 Subsample_Nearest_p016le_p016le_param_7,
	.param .u32 Subsample_Nearest_p016le_p016le_param_8,
	.param .u32 Subsample_Nearest_p016le_p016le_param_9,
	.param .u32 Subsample_Nearest_p016le_p016le_param_10,
	.param .u32 Subsample_Nearest_p016le_p016le_param_11,
	.param .u32 Subsample_Nearest_p016le_p016le_param_12,
	.param .f32 Subsample_Nearest_p016le_p016le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_p016le_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p016le_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB56_2;
	bra.uni 	$L__BB56_1;
$L__BB56_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p016le_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p016le_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p016le_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p016le_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_p016le_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %r17;
$L__BB56_2:
	ret;

}
	// .globl	Subsample_Nearest_p016le_p016le_uv
.visible .entry Subsample_Nearest_p016le_p016le_uv(
	.param .u64 Subsample_Nearest_p016le_p016le_uv_param_0,
	.param .u64 Subsample_Nearest_p016le_p016le_uv_param_1,
	.param .u64 Subsample_Nearest_p016le_p016le_uv_param_2,
	.param .u64 Subsample_Nearest_p016le_p016le_uv_param_3,
	.param .u64 Subsample_Nearest_p016le_p016le_uv_param_4,
	.param .u64 Subsample_Nearest_p016le_p016le_uv_param_5,
	.param .u64 Subsample_Nearest_p016le_p016le_uv_param_6,
	.param .u64 Subsample_Nearest_p016le_p016le_uv_param_7,
	.param .u32 Subsample_Nearest_p016le_p016le_uv_param_8,
	.param .u32 Subsample_Nearest_p016le_p016le_uv_param_9,
	.param .u32 Subsample_Nearest_p016le_p016le_uv_param_10,
	.param .u32 Subsample_Nearest_p016le_p016le_uv_param_11,
	.param .u32 Subsample_Nearest_p016le_p016le_uv_param_12,
	.param .f32 Subsample_Nearest_p016le_p016le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_p016le_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p016le_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB57_2;
	bra.uni 	$L__BB57_1;
$L__BB57_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p016le_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p016le_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p016le_p016le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p016le_p016le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Nearest_p016le_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 2;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 2;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v2.u16 	[%rd12], {%rs1, %rs2};
$L__BB57_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p16le_p016le
.visible .entry Subsample_Nearest_yuv444p16le_p016le(
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_param_0,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_param_1,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_param_2,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_param_3,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_param_4,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_param_5,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_param_6,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_param_7,
	.param .u32 Subsample_Nearest_yuv444p16le_p016le_param_8,
	.param .u32 Subsample_Nearest_yuv444p16le_p016le_param_9,
	.param .u32 Subsample_Nearest_yuv444p16le_p016le_param_10,
	.param .u32 Subsample_Nearest_yuv444p16le_p016le_param_11,
	.param .u32 Subsample_Nearest_yuv444p16le_p016le_param_12,
	.param .f32 Subsample_Nearest_yuv444p16le_p016le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p16le_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p16le_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB58_2;
	bra.uni 	$L__BB58_1;
$L__BB58_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p16le_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p16le_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p16le_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p16le_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv444p16le_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %r17;
$L__BB58_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p16le_p016le_uv
.visible .entry Subsample_Nearest_yuv444p16le_p016le_uv(
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_uv_param_0,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_uv_param_1,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_uv_param_2,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_uv_param_3,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_uv_param_4,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_uv_param_5,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_uv_param_6,
	.param .u64 Subsample_Nearest_yuv444p16le_p016le_uv_param_7,
	.param .u32 Subsample_Nearest_yuv444p16le_p016le_uv_param_8,
	.param .u32 Subsample_Nearest_yuv444p16le_p016le_uv_param_9,
	.param .u32 Subsample_Nearest_yuv444p16le_p016le_uv_param_10,
	.param .u32 Subsample_Nearest_yuv444p16le_p016le_uv_param_11,
	.param .u32 Subsample_Nearest_yuv444p16le_p016le_uv_param_12,
	.param .f32 Subsample_Nearest_yuv444p16le_p016le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p16le_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p16le_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB59_2;
	bra.uni 	$L__BB59_1;
$L__BB59_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p16le_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p16le_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p16le_p016le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv444p16le_p016le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv444p16le_p016le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p16le_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs2, %r21;
	cvt.s64.s32 	%rd7, %r2;
	cvt.s64.s32 	%rd8, %r5;
	shr.u64 	%rd9, %rd8, 2;
	mul.lo.s64 	%rd10, %rd9, %rd7;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	shl.b64 	%rd13, %rd12, 2;
	add.s64 	%rd14, %rd1, %rd13;
	st.global.v2.u16 	[%rd14], {%rs1, %rs2};
$L__BB59_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv420p_yuv444p16le
.visible .entry Subsample_Nearest_yuv420p_yuv444p16le(
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_param_0,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_param_1,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_param_2,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_param_3,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_param_4,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_param_5,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_param_6,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_param_7,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p16le_param_8,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p16le_param_9,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p16le_param_10,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p16le_param_11,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p16le_param_12,
	.param .f32 Subsample_Nearest_yuv420p_yuv444p16le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv420p_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv420p_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB60_2;
	bra.uni 	$L__BB60_1;
$L__BB60_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv420p_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv420p_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv420p_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv420p_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv420p_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs3;
$L__BB60_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv420p_yuv444p16le_uv
.visible .entry Subsample_Nearest_yuv420p_yuv444p16le_uv(
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Nearest_yuv420p_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<18>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv420p_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv420p_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB61_2;
	bra.uni 	$L__BB61_1;
$L__BB61_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv420p_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv420p_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv420p_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Nearest_yuv420p_yuv444p16le_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Nearest_yuv420p_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv420p_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv420p_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	cvt.s64.s32 	%rd9, %r2;
	cvt.s64.s32 	%rd10, %r5;
	shr.u64 	%rd11, %rd10, 1;
	mul.lo.s64 	%rd12, %rd11, %rd9;
	cvt.s64.s32 	%rd13, %r1;
	add.s64 	%rd14, %rd12, %rd13;
	shl.b64 	%rd15, %rd14, 1;
	add.s64 	%rd16, %rd2, %rd15;
	st.global.u16 	[%rd16], %rs3;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs4, %r21;
	and.b16  	%rs5, %rs4, 255;
	mul.lo.s16 	%rs6, %rs5, 257;
	add.s64 	%rd17, %rd1, %rd15;
	st.global.u16 	[%rd17], %rs6;
$L__BB61_2:
	ret;

}
	// .globl	Subsample_Nearest_nv12_yuv444p16le
.visible .entry Subsample_Nearest_nv12_yuv444p16le(
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_param_0,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_param_1,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_param_2,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_param_3,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_param_4,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_param_5,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_param_6,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_param_7,
	.param .u32 Subsample_Nearest_nv12_yuv444p16le_param_8,
	.param .u32 Subsample_Nearest_nv12_yuv444p16le_param_9,
	.param .u32 Subsample_Nearest_nv12_yuv444p16le_param_10,
	.param .u32 Subsample_Nearest_nv12_yuv444p16le_param_11,
	.param .u32 Subsample_Nearest_nv12_yuv444p16le_param_12,
	.param .f32 Subsample_Nearest_nv12_yuv444p16le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_nv12_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_nv12_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB62_2;
	bra.uni 	$L__BB62_1;
$L__BB62_1:
	ld.param.u32 	%r7, [Subsample_Nearest_nv12_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_nv12_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_nv12_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_nv12_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_nv12_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs3;
$L__BB62_2:
	ret;

}
	// .globl	Subsample_Nearest_nv12_yuv444p16le_uv
.visible .entry Subsample_Nearest_nv12_yuv444p16le_uv(
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Nearest_nv12_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Nearest_nv12_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Nearest_nv12_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Nearest_nv12_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Nearest_nv12_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Nearest_nv12_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Nearest_nv12_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Nearest_nv12_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_nv12_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB63_2;
	bra.uni 	$L__BB63_1;
$L__BB63_1:
	ld.param.u32 	%r7, [Subsample_Nearest_nv12_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_nv12_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_nv12_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_nv12_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_nv12_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Nearest_nv12_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	and.b16  	%rs3, %rs1, 255;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd7, %r2;
	cvt.s64.s32 	%rd8, %r5;
	shr.u64 	%rd9, %rd8, 1;
	mul.lo.s64 	%rd10, %rd9, %rd7;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	shl.b64 	%rd13, %rd12, 1;
	add.s64 	%rd14, %rd2, %rd13;
	st.global.u16 	[%rd14], %rs4;
	and.b16  	%rs5, %rs2, 255;
	mul.lo.s16 	%rs6, %rs5, 257;
	add.s64 	%rd15, %rd1, %rd13;
	st.global.u16 	[%rd15], %rs6;
$L__BB63_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p_yuv444p16le
.visible .entry Subsample_Nearest_yuv444p_yuv444p16le(
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_param_0,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_param_1,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_param_2,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_param_3,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_param_4,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_param_5,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_param_6,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_param_7,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p16le_param_8,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p16le_param_9,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p16le_param_10,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p16le_param_11,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p16le_param_12,
	.param .f32 Subsample_Nearest_yuv444p_yuv444p16le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB64_2;
	bra.uni 	$L__BB64_1;
$L__BB64_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv444p_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs3;
$L__BB64_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p_yuv444p16le_uv
.visible .entry Subsample_Nearest_yuv444p_yuv444p16le_uv(
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Nearest_yuv444p_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<18>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB65_2;
	bra.uni 	$L__BB65_1;
$L__BB65_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Nearest_yuv444p_yuv444p16le_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Nearest_yuv444p_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv444p_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv444p_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	and.b16  	%rs2, %rs1, 255;
	mul.lo.s16 	%rs3, %rs2, 257;
	cvt.s64.s32 	%rd9, %r2;
	cvt.s64.s32 	%rd10, %r5;
	shr.u64 	%rd11, %rd10, 1;
	mul.lo.s64 	%rd12, %rd11, %rd9;
	cvt.s64.s32 	%rd13, %r1;
	add.s64 	%rd14, %rd12, %rd13;
	shl.b64 	%rd15, %rd14, 1;
	add.s64 	%rd16, %rd2, %rd15;
	st.global.u16 	[%rd16], %rs3;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs4, %r21;
	and.b16  	%rs5, %rs4, 255;
	mul.lo.s16 	%rs6, %rs5, 257;
	add.s64 	%rd17, %rd1, %rd15;
	st.global.u16 	[%rd17], %rs6;
$L__BB65_2:
	ret;

}
	// .globl	Subsample_Nearest_p010le_yuv444p16le
.visible .entry Subsample_Nearest_p010le_yuv444p16le(
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_param_0,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_param_1,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_param_2,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_param_3,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_param_4,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_param_5,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_param_6,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_param_7,
	.param .u32 Subsample_Nearest_p010le_yuv444p16le_param_8,
	.param .u32 Subsample_Nearest_p010le_yuv444p16le_param_9,
	.param .u32 Subsample_Nearest_p010le_yuv444p16le_param_10,
	.param .u32 Subsample_Nearest_p010le_yuv444p16le_param_11,
	.param .u32 Subsample_Nearest_p010le_yuv444p16le_param_12,
	.param .f32 Subsample_Nearest_p010le_yuv444p16le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_p010le_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p010le_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB66_2;
	bra.uni 	$L__BB66_1;
$L__BB66_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p010le_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p010le_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p010le_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p010le_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_p010le_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	shr.u16 	%rs2, %rs1, 10;
	or.b16  	%rs3, %rs2, %rs1;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %rs3;
$L__BB66_2:
	ret;

}
	// .globl	Subsample_Nearest_p010le_yuv444p16le_uv
.visible .entry Subsample_Nearest_p010le_yuv444p16le_uv(
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Nearest_p010le_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Nearest_p010le_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Nearest_p010le_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Nearest_p010le_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Nearest_p010le_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Nearest_p010le_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Nearest_p010le_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Nearest_p010le_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p010le_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB67_2;
	bra.uni 	$L__BB67_1;
$L__BB67_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p010le_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p010le_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p010le_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_p010le_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_p010le_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Nearest_p010le_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	shr.u16 	%rs3, %rs1, 10;
	or.b16  	%rs4, %rs3, %rs1;
	cvt.s64.s32 	%rd7, %r2;
	cvt.s64.s32 	%rd8, %r5;
	shr.u64 	%rd9, %rd8, 1;
	mul.lo.s64 	%rd10, %rd9, %rd7;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	shl.b64 	%rd13, %rd12, 1;
	add.s64 	%rd14, %rd2, %rd13;
	st.global.u16 	[%rd14], %rs4;
	shr.u16 	%rs5, %rs2, 10;
	or.b16  	%rs6, %rs5, %rs2;
	add.s64 	%rd15, %rd1, %rd13;
	st.global.u16 	[%rd15], %rs6;
$L__BB67_2:
	ret;

}
	// .globl	Subsample_Nearest_p016le_yuv444p16le
.visible .entry Subsample_Nearest_p016le_yuv444p16le(
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_param_0,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_param_1,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_param_2,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_param_3,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_param_4,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_param_5,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_param_6,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_param_7,
	.param .u32 Subsample_Nearest_p016le_yuv444p16le_param_8,
	.param .u32 Subsample_Nearest_p016le_yuv444p16le_param_9,
	.param .u32 Subsample_Nearest_p016le_yuv444p16le_param_10,
	.param .u32 Subsample_Nearest_p016le_yuv444p16le_param_11,
	.param .u32 Subsample_Nearest_p016le_yuv444p16le_param_12,
	.param .f32 Subsample_Nearest_p016le_yuv444p16le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_p016le_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p016le_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB68_2;
	bra.uni 	$L__BB68_1;
$L__BB68_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p016le_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p016le_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p016le_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_p016le_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_p016le_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %r17;
$L__BB68_2:
	ret;

}
	// .globl	Subsample_Nearest_p016le_yuv444p16le_uv
.visible .entry Subsample_Nearest_p016le_yuv444p16le_uv(
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Nearest_p016le_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Nearest_p016le_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Nearest_p016le_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Nearest_p016le_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Nearest_p016le_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Nearest_p016le_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Nearest_p016le_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Nearest_p016le_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_p016le_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB69_2;
	bra.uni 	$L__BB69_1;
$L__BB69_1:
	ld.param.u32 	%r7, [Subsample_Nearest_p016le_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_p016le_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_p016le_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Nearest_p016le_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Nearest_p016le_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Nearest_p016le_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f1, %f2}];
	// end inline asm
	cvt.s64.s32 	%rd7, %r2;
	cvt.s64.s32 	%rd8, %r5;
	shr.u64 	%rd9, %rd8, 1;
	mul.lo.s64 	%rd10, %rd9, %rd7;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	shl.b64 	%rd13, %rd12, 1;
	add.s64 	%rd14, %rd2, %rd13;
	st.global.u16 	[%rd14], %r17;
	add.s64 	%rd15, %rd1, %rd13;
	st.global.u16 	[%rd15], %r18;
$L__BB69_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p16le_yuv444p16le
.visible .entry Subsample_Nearest_yuv444p16le_yuv444p16le(
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_param_0,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_param_1,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_param_2,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_param_3,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_param_4,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_param_5,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_param_6,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_param_7,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p16le_param_8,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p16le_param_9,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p16le_param_10,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p16le_param_11,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p16le_param_12,
	.param .f32 Subsample_Nearest_yuv444p16le_yuv444p16le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p16le_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p16le_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB70_2;
	bra.uni 	$L__BB70_1;
$L__BB70_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p16le_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p16le_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p16le_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_yuv444p16le_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_yuv444p16le_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 1;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 1;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.u16 	[%rd12], %r17;
$L__BB70_2:
	ret;

}
	// .globl	Subsample_Nearest_yuv444p16le_yuv444p16le_uv
.visible .entry Subsample_Nearest_yuv444p16le_yuv444p16le_uv(
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<25>;
	.reg .f32 	%f<15>;
	.reg .b64 	%rd<18>;

	ld.param.u32 	%r4, [Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB71_2;
	bra.uni 	$L__BB71_1;
$L__BB71_1:
	ld.param.u32 	%r7, [Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Nearest_yuv444p16le_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f5, %r6;
	cvt.rn.f32.s32 	%f6, %r3;
	div.rn.f32 	%f7, %f5, %f6;
	cvt.rn.f32.s32 	%f8, %r7;
	cvt.rn.f32.s32 	%f9, %r4;
	div.rn.f32 	%f10, %f8, %f9;
	cvt.rn.f32.s32 	%f11, %r1;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f1, %f7, %f12;
	cvt.rn.f32.s32 	%f13, %r2;
	add.f32 	%f14, %f13, 0f3F000000;
	mul.f32 	%f2, %f10, %f14;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f1, %f2}];
	// end inline asm
	cvt.s64.s32 	%rd9, %r2;
	cvt.s64.s32 	%rd10, %r5;
	shr.u64 	%rd11, %rd10, 1;
	mul.lo.s64 	%rd12, %rd11, %rd9;
	cvt.s64.s32 	%rd13, %r1;
	add.s64 	%rd14, %rd12, %rd13;
	shl.b64 	%rd15, %rd14, 1;
	add.s64 	%rd16, %rd2, %rd15;
	st.global.u16 	[%rd16], %r17;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f1, %f2}];
	// end inline asm
	add.s64 	%rd17, %rd1, %rd15;
	st.global.u16 	[%rd17], %r21;
$L__BB71_2:
	ret;

}
	// .globl	Subsample_Nearest_bgr0_bgr0
.visible .entry Subsample_Nearest_bgr0_bgr0(
	.param .u64 Subsample_Nearest_bgr0_bgr0_param_0,
	.param .u64 Subsample_Nearest_bgr0_bgr0_param_1,
	.param .u64 Subsample_Nearest_bgr0_bgr0_param_2,
	.param .u64 Subsample_Nearest_bgr0_bgr0_param_3,
	.param .u64 Subsample_Nearest_bgr0_bgr0_param_4,
	.param .u64 Subsample_Nearest_bgr0_bgr0_param_5,
	.param .u64 Subsample_Nearest_bgr0_bgr0_param_6,
	.param .u64 Subsample_Nearest_bgr0_bgr0_param_7,
	.param .u32 Subsample_Nearest_bgr0_bgr0_param_8,
	.param .u32 Subsample_Nearest_bgr0_bgr0_param_9,
	.param .u32 Subsample_Nearest_bgr0_bgr0_param_10,
	.param .u32 Subsample_Nearest_bgr0_bgr0_param_11,
	.param .u32 Subsample_Nearest_bgr0_bgr0_param_12,
	.param .f32 Subsample_Nearest_bgr0_bgr0_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_bgr0_bgr0_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_bgr0_bgr0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB72_2;
	bra.uni 	$L__BB72_1;
$L__BB72_1:
	ld.param.u32 	%r7, [Subsample_Nearest_bgr0_bgr0_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_bgr0_bgr0_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_bgr0_bgr0_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_bgr0_bgr0_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_bgr0_bgr0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	cvt.u16.u32 	%rs3, %r19;
	cvt.u16.u32 	%rs4, %r20;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 2;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 2;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v4.u8 	[%rd12], {%rs1, %rs2, %rs3, %rs4};
$L__BB72_2:
	ret;

}
	// .globl	Subsample_Nearest_bgr0_bgr0_uv
.visible .entry Subsample_Nearest_bgr0_bgr0_uv(
	.param .u64 Subsample_Nearest_bgr0_bgr0_uv_param_0,
	.param .u64 Subsample_Nearest_bgr0_bgr0_uv_param_1,
	.param .u64 Subsample_Nearest_bgr0_bgr0_uv_param_2,
	.param .u64 Subsample_Nearest_bgr0_bgr0_uv_param_3,
	.param .u64 Subsample_Nearest_bgr0_bgr0_uv_param_4,
	.param .u64 Subsample_Nearest_bgr0_bgr0_uv_param_5,
	.param .u64 Subsample_Nearest_bgr0_bgr0_uv_param_6,
	.param .u64 Subsample_Nearest_bgr0_bgr0_uv_param_7,
	.param .u32 Subsample_Nearest_bgr0_bgr0_uv_param_8,
	.param .u32 Subsample_Nearest_bgr0_bgr0_uv_param_9,
	.param .u32 Subsample_Nearest_bgr0_bgr0_uv_param_10,
	.param .u32 Subsample_Nearest_bgr0_bgr0_uv_param_11,
	.param .u32 Subsample_Nearest_bgr0_bgr0_uv_param_12,
	.param .f32 Subsample_Nearest_bgr0_bgr0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Nearest_rgb0_rgb0
.visible .entry Subsample_Nearest_rgb0_rgb0(
	.param .u64 Subsample_Nearest_rgb0_rgb0_param_0,
	.param .u64 Subsample_Nearest_rgb0_rgb0_param_1,
	.param .u64 Subsample_Nearest_rgb0_rgb0_param_2,
	.param .u64 Subsample_Nearest_rgb0_rgb0_param_3,
	.param .u64 Subsample_Nearest_rgb0_rgb0_param_4,
	.param .u64 Subsample_Nearest_rgb0_rgb0_param_5,
	.param .u64 Subsample_Nearest_rgb0_rgb0_param_6,
	.param .u64 Subsample_Nearest_rgb0_rgb0_param_7,
	.param .u32 Subsample_Nearest_rgb0_rgb0_param_8,
	.param .u32 Subsample_Nearest_rgb0_rgb0_param_9,
	.param .u32 Subsample_Nearest_rgb0_rgb0_param_10,
	.param .u32 Subsample_Nearest_rgb0_rgb0_param_11,
	.param .u32 Subsample_Nearest_rgb0_rgb0_param_12,
	.param .f32 Subsample_Nearest_rgb0_rgb0_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_rgb0_rgb0_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_rgb0_rgb0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB74_2;
	bra.uni 	$L__BB74_1;
$L__BB74_1:
	ld.param.u32 	%r7, [Subsample_Nearest_rgb0_rgb0_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_rgb0_rgb0_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_rgb0_rgb0_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_rgb0_rgb0_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_rgb0_rgb0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	cvt.u16.u32 	%rs3, %r19;
	cvt.u16.u32 	%rs4, %r20;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 2;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 2;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v4.u8 	[%rd12], {%rs1, %rs2, %rs3, %rs4};
$L__BB74_2:
	ret;

}
	// .globl	Subsample_Nearest_rgb0_rgb0_uv
.visible .entry Subsample_Nearest_rgb0_rgb0_uv(
	.param .u64 Subsample_Nearest_rgb0_rgb0_uv_param_0,
	.param .u64 Subsample_Nearest_rgb0_rgb0_uv_param_1,
	.param .u64 Subsample_Nearest_rgb0_rgb0_uv_param_2,
	.param .u64 Subsample_Nearest_rgb0_rgb0_uv_param_3,
	.param .u64 Subsample_Nearest_rgb0_rgb0_uv_param_4,
	.param .u64 Subsample_Nearest_rgb0_rgb0_uv_param_5,
	.param .u64 Subsample_Nearest_rgb0_rgb0_uv_param_6,
	.param .u64 Subsample_Nearest_rgb0_rgb0_uv_param_7,
	.param .u32 Subsample_Nearest_rgb0_rgb0_uv_param_8,
	.param .u32 Subsample_Nearest_rgb0_rgb0_uv_param_9,
	.param .u32 Subsample_Nearest_rgb0_rgb0_uv_param_10,
	.param .u32 Subsample_Nearest_rgb0_rgb0_uv_param_11,
	.param .u32 Subsample_Nearest_rgb0_rgb0_uv_param_12,
	.param .f32 Subsample_Nearest_rgb0_rgb0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Nearest_bgr0_rgb0
.visible .entry Subsample_Nearest_bgr0_rgb0(
	.param .u64 Subsample_Nearest_bgr0_rgb0_param_0,
	.param .u64 Subsample_Nearest_bgr0_rgb0_param_1,
	.param .u64 Subsample_Nearest_bgr0_rgb0_param_2,
	.param .u64 Subsample_Nearest_bgr0_rgb0_param_3,
	.param .u64 Subsample_Nearest_bgr0_rgb0_param_4,
	.param .u64 Subsample_Nearest_bgr0_rgb0_param_5,
	.param .u64 Subsample_Nearest_bgr0_rgb0_param_6,
	.param .u64 Subsample_Nearest_bgr0_rgb0_param_7,
	.param .u32 Subsample_Nearest_bgr0_rgb0_param_8,
	.param .u32 Subsample_Nearest_bgr0_rgb0_param_9,
	.param .u32 Subsample_Nearest_bgr0_rgb0_param_10,
	.param .u32 Subsample_Nearest_bgr0_rgb0_param_11,
	.param .u32 Subsample_Nearest_bgr0_rgb0_param_12,
	.param .f32 Subsample_Nearest_bgr0_rgb0_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_bgr0_rgb0_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_bgr0_rgb0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB76_2;
	bra.uni 	$L__BB76_1;
$L__BB76_1:
	ld.param.u32 	%r7, [Subsample_Nearest_bgr0_rgb0_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_bgr0_rgb0_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_bgr0_rgb0_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_bgr0_rgb0_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_bgr0_rgb0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	cvt.u16.u32 	%rs3, %r19;
	cvt.u16.u32 	%rs4, %r20;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 2;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 2;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v4.u8 	[%rd12], {%rs3, %rs2, %rs1, %rs4};
$L__BB76_2:
	ret;

}
	// .globl	Subsample_Nearest_bgr0_rgb0_uv
.visible .entry Subsample_Nearest_bgr0_rgb0_uv(
	.param .u64 Subsample_Nearest_bgr0_rgb0_uv_param_0,
	.param .u64 Subsample_Nearest_bgr0_rgb0_uv_param_1,
	.param .u64 Subsample_Nearest_bgr0_rgb0_uv_param_2,
	.param .u64 Subsample_Nearest_bgr0_rgb0_uv_param_3,
	.param .u64 Subsample_Nearest_bgr0_rgb0_uv_param_4,
	.param .u64 Subsample_Nearest_bgr0_rgb0_uv_param_5,
	.param .u64 Subsample_Nearest_bgr0_rgb0_uv_param_6,
	.param .u64 Subsample_Nearest_bgr0_rgb0_uv_param_7,
	.param .u32 Subsample_Nearest_bgr0_rgb0_uv_param_8,
	.param .u32 Subsample_Nearest_bgr0_rgb0_uv_param_9,
	.param .u32 Subsample_Nearest_bgr0_rgb0_uv_param_10,
	.param .u32 Subsample_Nearest_bgr0_rgb0_uv_param_11,
	.param .u32 Subsample_Nearest_bgr0_rgb0_uv_param_12,
	.param .f32 Subsample_Nearest_bgr0_rgb0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Nearest_rgb0_bgr0
.visible .entry Subsample_Nearest_rgb0_bgr0(
	.param .u64 Subsample_Nearest_rgb0_bgr0_param_0,
	.param .u64 Subsample_Nearest_rgb0_bgr0_param_1,
	.param .u64 Subsample_Nearest_rgb0_bgr0_param_2,
	.param .u64 Subsample_Nearest_rgb0_bgr0_param_3,
	.param .u64 Subsample_Nearest_rgb0_bgr0_param_4,
	.param .u64 Subsample_Nearest_rgb0_bgr0_param_5,
	.param .u64 Subsample_Nearest_rgb0_bgr0_param_6,
	.param .u64 Subsample_Nearest_rgb0_bgr0_param_7,
	.param .u32 Subsample_Nearest_rgb0_bgr0_param_8,
	.param .u32 Subsample_Nearest_rgb0_bgr0_param_9,
	.param .u32 Subsample_Nearest_rgb0_bgr0_param_10,
	.param .u32 Subsample_Nearest_rgb0_bgr0_param_11,
	.param .u32 Subsample_Nearest_rgb0_bgr0_param_12,
	.param .f32 Subsample_Nearest_rgb0_bgr0_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<21>;
	.reg .f32 	%f<13>;
	.reg .b64 	%rd<13>;

	ld.param.u32 	%r4, [Subsample_Nearest_rgb0_bgr0_param_9];
	ld.param.u32 	%r3, [Subsample_Nearest_rgb0_bgr0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB78_2;
	bra.uni 	$L__BB78_1;
$L__BB78_1:
	ld.param.u32 	%r7, [Subsample_Nearest_rgb0_bgr0_param_12];
	ld.param.u32 	%r6, [Subsample_Nearest_rgb0_bgr0_param_11];
	ld.param.u32 	%r5, [Subsample_Nearest_rgb0_bgr0_param_10];
	ld.param.u64 	%rd4, [Subsample_Nearest_rgb0_bgr0_param_0];
	ld.param.u64 	%rd3, [Subsample_Nearest_rgb0_bgr0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f3, %r6;
	cvt.rn.f32.s32 	%f4, %r3;
	div.rn.f32 	%f5, %f3, %f4;
	cvt.rn.f32.s32 	%f6, %r7;
	cvt.rn.f32.s32 	%f7, %r4;
	div.rn.f32 	%f8, %f6, %f7;
	cvt.rn.f32.s32 	%f9, %r1;
	add.f32 	%f10, %f9, 0f3F000000;
	mul.f32 	%f1, %f5, %f10;
	cvt.rn.f32.s32 	%f11, %r2;
	add.f32 	%f12, %f11, 0f3F000000;
	mul.f32 	%f2, %f8, %f12;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f1, %f2}];
	// end inline asm
	cvt.u16.u32 	%rs1, %r17;
	cvt.u16.u32 	%rs2, %r18;
	cvt.u16.u32 	%rs3, %r19;
	cvt.u16.u32 	%rs4, %r20;
	cvt.s64.s32 	%rd5, %r2;
	cvt.s64.s32 	%rd6, %r5;
	shr.u64 	%rd7, %rd6, 2;
	mul.lo.s64 	%rd8, %rd7, %rd5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	shl.b64 	%rd11, %rd10, 2;
	add.s64 	%rd12, %rd1, %rd11;
	st.global.v4.u8 	[%rd12], {%rs3, %rs2, %rs1, %rs4};
$L__BB78_2:
	ret;

}
	// .globl	Subsample_Nearest_rgb0_bgr0_uv
.visible .entry Subsample_Nearest_rgb0_bgr0_uv(
	.param .u64 Subsample_Nearest_rgb0_bgr0_uv_param_0,
	.param .u64 Subsample_Nearest_rgb0_bgr0_uv_param_1,
	.param .u64 Subsample_Nearest_rgb0_bgr0_uv_param_2,
	.param .u64 Subsample_Nearest_rgb0_bgr0_uv_param_3,
	.param .u64 Subsample_Nearest_rgb0_bgr0_uv_param_4,
	.param .u64 Subsample_Nearest_rgb0_bgr0_uv_param_5,
	.param .u64 Subsample_Nearest_rgb0_bgr0_uv_param_6,
	.param .u64 Subsample_Nearest_rgb0_bgr0_uv_param_7,
	.param .u32 Subsample_Nearest_rgb0_bgr0_uv_param_8,
	.param .u32 Subsample_Nearest_rgb0_bgr0_uv_param_9,
	.param .u32 Subsample_Nearest_rgb0_bgr0_uv_param_10,
	.param .u32 Subsample_Nearest_rgb0_bgr0_uv_param_11,
	.param .u32 Subsample_Nearest_rgb0_bgr0_uv_param_12,
	.param .f32 Subsample_Nearest_rgb0_bgr0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Bilinear_yuv420p_yuv420p
.visible .entry Subsample_Bilinear_yuv420p_yuv420p(
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_param_0,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_param_1,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_param_2,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_param_3,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_param_4,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_param_5,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_param_6,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_param_7,
	.param .u32 Subsample_Bilinear_yuv420p_yuv420p_param_8,
	.param .u32 Subsample_Bilinear_yuv420p_yuv420p_param_9,
	.param .u32 Subsample_Bilinear_yuv420p_yuv420p_param_10,
	.param .u32 Subsample_Bilinear_yuv420p_yuv420p_param_11,
	.param .u32 Subsample_Bilinear_yuv420p_yuv420p_param_12,
	.param .f32 Subsample_Bilinear_yuv420p_yuv420p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv420p_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv420p_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB80_2;
	bra.uni 	$L__BB80_1;
$L__BB80_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv420p_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv420p_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv420p_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv420p_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv420p_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB80_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv420p_yuv420p_uv
.visible .entry Subsample_Bilinear_yuv420p_yuv420p_uv(
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv420p_yuv420p_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv420p_yuv420p_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv420p_yuv420p_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv420p_yuv420p_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv420p_yuv420p_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv420p_yuv420p_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv420p_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<67>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<20>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv420p_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv420p_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB81_2;
	bra.uni 	$L__BB81_1;
$L__BB81_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv420p_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv420p_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv420p_yuv420p_uv_param_10];
	ld.param.u64 	%rd11, [Subsample_Bilinear_yuv420p_yuv420p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bilinear_yuv420p_yuv420p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv420p_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bilinear_yuv420p_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 255;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 255;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 255;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 255;
	add.s32 	%r55, %r53, %r54;
	add.s32 	%r56, %r55, 2;
	shr.u32 	%r57, %r56, 2;
	mul.wide.s32 	%rd15, %r2, %r5;
	cvt.s64.s32 	%rd16, %r1;
	add.s64 	%rd17, %rd15, %rd16;
	add.s64 	%rd18, %rd2, %rd17;
	st.global.u8 	[%rd18], %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd11, {%f5, %f4}];
	// end inline asm
	and.b32  	%r58, %r33, 255;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd11, {%f7, %f4}];
	// end inline asm
	and.b32  	%r59, %r37, 255;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd11, {%f5, %f8}];
	// end inline asm
	and.b32  	%r61, %r41, 255;
	add.s32 	%r62, %r60, %r61;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd11, {%f7, %f8}];
	// end inline asm
	and.b32  	%r63, %r45, 255;
	add.s32 	%r64, %r62, %r63;
	add.s32 	%r65, %r64, 2;
	shr.u32 	%r66, %r65, 2;
	add.s64 	%rd19, %rd1, %rd17;
	st.global.u8 	[%rd19], %r66;
$L__BB81_2:
	ret;

}
	// .globl	Subsample_Bilinear_nv12_yuv420p
.visible .entry Subsample_Bilinear_nv12_yuv420p(
	.param .u64 Subsample_Bilinear_nv12_yuv420p_param_0,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_param_1,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_param_2,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_param_3,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_param_4,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_param_5,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_param_6,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_param_7,
	.param .u32 Subsample_Bilinear_nv12_yuv420p_param_8,
	.param .u32 Subsample_Bilinear_nv12_yuv420p_param_9,
	.param .u32 Subsample_Bilinear_nv12_yuv420p_param_10,
	.param .u32 Subsample_Bilinear_nv12_yuv420p_param_11,
	.param .u32 Subsample_Bilinear_nv12_yuv420p_param_12,
	.param .f32 Subsample_Bilinear_nv12_yuv420p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_nv12_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_nv12_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB82_2;
	bra.uni 	$L__BB82_1;
$L__BB82_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_nv12_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_nv12_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_nv12_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_nv12_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_nv12_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB82_2:
	ret;

}
	// .globl	Subsample_Bilinear_nv12_yuv420p_uv
.visible .entry Subsample_Bilinear_nv12_yuv420p_uv(
	.param .u64 Subsample_Bilinear_nv12_yuv420p_uv_param_0,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_uv_param_1,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_uv_param_2,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_uv_param_3,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_uv_param_4,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_uv_param_5,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_uv_param_6,
	.param .u64 Subsample_Bilinear_nv12_yuv420p_uv_param_7,
	.param .u32 Subsample_Bilinear_nv12_yuv420p_uv_param_8,
	.param .u32 Subsample_Bilinear_nv12_yuv420p_uv_param_9,
	.param .u32 Subsample_Bilinear_nv12_yuv420p_uv_param_10,
	.param .u32 Subsample_Bilinear_nv12_yuv420p_uv_param_11,
	.param .u32 Subsample_Bilinear_nv12_yuv420p_uv_param_12,
	.param .f32 Subsample_Bilinear_nv12_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Bilinear_nv12_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_nv12_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB83_2;
	bra.uni 	$L__BB83_1;
$L__BB83_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_nv12_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_nv12_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_nv12_yuv420p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bilinear_nv12_yuv420p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_nv12_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bilinear_nv12_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 255;
	and.b32  	%r37, %r22, 255;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 255;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 255;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 255;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 255;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 2;
	shr.u32 	%r50, %r48, 2;
	mul.wide.s32 	%rd10, %r2, %r5;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	add.s64 	%rd13, %rd2, %rd12;
	st.global.u8 	[%rd13], %r49;
	add.s64 	%rd14, %rd1, %rd12;
	st.global.u8 	[%rd14], %r50;
$L__BB83_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p_yuv420p
.visible .entry Subsample_Bilinear_yuv444p_yuv420p(
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_param_0,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_param_1,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_param_2,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_param_3,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_param_4,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_param_5,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_param_6,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_param_7,
	.param .u32 Subsample_Bilinear_yuv444p_yuv420p_param_8,
	.param .u32 Subsample_Bilinear_yuv444p_yuv420p_param_9,
	.param .u32 Subsample_Bilinear_yuv444p_yuv420p_param_10,
	.param .u32 Subsample_Bilinear_yuv444p_yuv420p_param_11,
	.param .u32 Subsample_Bilinear_yuv444p_yuv420p_param_12,
	.param .f32 Subsample_Bilinear_yuv444p_yuv420p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB84_2;
	bra.uni 	$L__BB84_1;
$L__BB84_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv444p_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB84_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p_yuv420p_uv
.visible .entry Subsample_Bilinear_yuv444p_yuv420p_uv(
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv444p_yuv420p_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv444p_yuv420p_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv444p_yuv420p_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv444p_yuv420p_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv444p_yuv420p_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv444p_yuv420p_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv444p_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<67>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<20>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB85_2;
	bra.uni 	$L__BB85_1;
$L__BB85_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p_yuv420p_uv_param_10];
	ld.param.u64 	%rd11, [Subsample_Bilinear_yuv444p_yuv420p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bilinear_yuv444p_yuv420p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv444p_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bilinear_yuv444p_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 255;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 255;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 255;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 255;
	add.s32 	%r55, %r53, %r54;
	add.s32 	%r56, %r55, 2;
	shr.u32 	%r57, %r56, 2;
	mul.wide.s32 	%rd15, %r2, %r5;
	cvt.s64.s32 	%rd16, %r1;
	add.s64 	%rd17, %rd15, %rd16;
	add.s64 	%rd18, %rd2, %rd17;
	st.global.u8 	[%rd18], %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd11, {%f5, %f4}];
	// end inline asm
	and.b32  	%r58, %r33, 255;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd11, {%f7, %f4}];
	// end inline asm
	and.b32  	%r59, %r37, 255;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd11, {%f5, %f8}];
	// end inline asm
	and.b32  	%r61, %r41, 255;
	add.s32 	%r62, %r60, %r61;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd11, {%f7, %f8}];
	// end inline asm
	and.b32  	%r63, %r45, 255;
	add.s32 	%r64, %r62, %r63;
	add.s32 	%r65, %r64, 2;
	shr.u32 	%r66, %r65, 2;
	add.s64 	%rd19, %rd1, %rd17;
	st.global.u8 	[%rd19], %r66;
$L__BB85_2:
	ret;

}
	// .globl	Subsample_Bilinear_p010le_yuv420p
.visible .entry Subsample_Bilinear_p010le_yuv420p(
	.param .u64 Subsample_Bilinear_p010le_yuv420p_param_0,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_param_1,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_param_2,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_param_3,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_param_4,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_param_5,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_param_6,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_param_7,
	.param .u32 Subsample_Bilinear_p010le_yuv420p_param_8,
	.param .u32 Subsample_Bilinear_p010le_yuv420p_param_9,
	.param .u32 Subsample_Bilinear_p010le_yuv420p_param_10,
	.param .u32 Subsample_Bilinear_p010le_yuv420p_param_11,
	.param .u32 Subsample_Bilinear_p010le_yuv420p_param_12,
	.param .f32 Subsample_Bilinear_p010le_yuv420p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p010le_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p010le_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB86_2;
	bra.uni 	$L__BB86_1;
$L__BB86_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p010le_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p010le_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p010le_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p010le_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p010le_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 10;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB86_2:
	ret;

}
	// .globl	Subsample_Bilinear_p010le_yuv420p_uv
.visible .entry Subsample_Bilinear_p010le_yuv420p_uv(
	.param .u64 Subsample_Bilinear_p010le_yuv420p_uv_param_0,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_uv_param_1,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_uv_param_2,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_uv_param_3,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_uv_param_4,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_uv_param_5,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_uv_param_6,
	.param .u64 Subsample_Bilinear_p010le_yuv420p_uv_param_7,
	.param .u32 Subsample_Bilinear_p010le_yuv420p_uv_param_8,
	.param .u32 Subsample_Bilinear_p010le_yuv420p_uv_param_9,
	.param .u32 Subsample_Bilinear_p010le_yuv420p_uv_param_10,
	.param .u32 Subsample_Bilinear_p010le_yuv420p_uv_param_11,
	.param .u32 Subsample_Bilinear_p010le_yuv420p_uv_param_12,
	.param .f32 Subsample_Bilinear_p010le_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p010le_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p010le_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB87_2;
	bra.uni 	$L__BB87_1;
$L__BB87_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p010le_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p010le_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p010le_yuv420p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bilinear_p010le_yuv420p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p010le_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bilinear_p010le_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 65535;
	and.b32  	%r37, %r22, 65535;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 65535;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 65535;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 65535;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 65535;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 10;
	mul.wide.s32 	%rd10, %r2, %r5;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	add.s64 	%rd13, %rd2, %rd12;
	st.global.u8 	[%rd13], %r49;
	shr.u32 	%r50, %r48, 10;
	add.s64 	%rd14, %rd1, %rd12;
	st.global.u8 	[%rd14], %r50;
$L__BB87_2:
	ret;

}
	// .globl	Subsample_Bilinear_p016le_yuv420p
.visible .entry Subsample_Bilinear_p016le_yuv420p(
	.param .u64 Subsample_Bilinear_p016le_yuv420p_param_0,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_param_1,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_param_2,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_param_3,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_param_4,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_param_5,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_param_6,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_param_7,
	.param .u32 Subsample_Bilinear_p016le_yuv420p_param_8,
	.param .u32 Subsample_Bilinear_p016le_yuv420p_param_9,
	.param .u32 Subsample_Bilinear_p016le_yuv420p_param_10,
	.param .u32 Subsample_Bilinear_p016le_yuv420p_param_11,
	.param .u32 Subsample_Bilinear_p016le_yuv420p_param_12,
	.param .f32 Subsample_Bilinear_p016le_yuv420p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p016le_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p016le_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB88_2;
	bra.uni 	$L__BB88_1;
$L__BB88_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p016le_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p016le_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p016le_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p016le_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p016le_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 10;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB88_2:
	ret;

}
	// .globl	Subsample_Bilinear_p016le_yuv420p_uv
.visible .entry Subsample_Bilinear_p016le_yuv420p_uv(
	.param .u64 Subsample_Bilinear_p016le_yuv420p_uv_param_0,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_uv_param_1,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_uv_param_2,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_uv_param_3,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_uv_param_4,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_uv_param_5,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_uv_param_6,
	.param .u64 Subsample_Bilinear_p016le_yuv420p_uv_param_7,
	.param .u32 Subsample_Bilinear_p016le_yuv420p_uv_param_8,
	.param .u32 Subsample_Bilinear_p016le_yuv420p_uv_param_9,
	.param .u32 Subsample_Bilinear_p016le_yuv420p_uv_param_10,
	.param .u32 Subsample_Bilinear_p016le_yuv420p_uv_param_11,
	.param .u32 Subsample_Bilinear_p016le_yuv420p_uv_param_12,
	.param .f32 Subsample_Bilinear_p016le_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p016le_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p016le_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB89_2;
	bra.uni 	$L__BB89_1;
$L__BB89_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p016le_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p016le_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p016le_yuv420p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bilinear_p016le_yuv420p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p016le_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bilinear_p016le_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 65535;
	and.b32  	%r37, %r22, 65535;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 65535;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 65535;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 65535;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 65535;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 10;
	mul.wide.s32 	%rd10, %r2, %r5;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	add.s64 	%rd13, %rd2, %rd12;
	st.global.u8 	[%rd13], %r49;
	shr.u32 	%r50, %r48, 10;
	add.s64 	%rd14, %rd1, %rd12;
	st.global.u8 	[%rd14], %r50;
$L__BB89_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p16le_yuv420p
.visible .entry Subsample_Bilinear_yuv444p16le_yuv420p(
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_param_0,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_param_1,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_param_2,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_param_3,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_param_4,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_param_5,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_param_6,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_param_7,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv420p_param_8,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv420p_param_9,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv420p_param_10,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv420p_param_11,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv420p_param_12,
	.param .f32 Subsample_Bilinear_yuv444p16le_yuv420p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p16le_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p16le_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB90_2;
	bra.uni 	$L__BB90_1;
$L__BB90_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p16le_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p16le_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p16le_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p16le_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv444p16le_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 10;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB90_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p16le_yuv420p_uv
.visible .entry Subsample_Bilinear_yuv444p16le_yuv420p_uv(
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<67>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<20>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB91_2;
	bra.uni 	$L__BB91_1;
$L__BB91_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_10];
	ld.param.u64 	%rd11, [Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bilinear_yuv444p16le_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 65535;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 65535;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 65535;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 65535;
	add.s32 	%r55, %r53, %r54;
	add.s32 	%r56, %r55, 2;
	shr.u32 	%r57, %r56, 10;
	mul.wide.s32 	%rd15, %r2, %r5;
	cvt.s64.s32 	%rd16, %r1;
	add.s64 	%rd17, %rd15, %rd16;
	add.s64 	%rd18, %rd2, %rd17;
	st.global.u8 	[%rd18], %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd11, {%f5, %f4}];
	// end inline asm
	and.b32  	%r58, %r33, 65535;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd11, {%f7, %f4}];
	// end inline asm
	and.b32  	%r59, %r37, 65535;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd11, {%f5, %f8}];
	// end inline asm
	and.b32  	%r61, %r41, 65535;
	add.s32 	%r62, %r60, %r61;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd11, {%f7, %f8}];
	// end inline asm
	and.b32  	%r63, %r45, 65535;
	add.s32 	%r64, %r62, %r63;
	add.s32 	%r65, %r64, 2;
	shr.u32 	%r66, %r65, 10;
	add.s64 	%rd19, %rd1, %rd17;
	st.global.u8 	[%rd19], %r66;
$L__BB91_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv420p_nv12
.visible .entry Subsample_Bilinear_yuv420p_nv12(
	.param .u64 Subsample_Bilinear_yuv420p_nv12_param_0,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_param_1,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_param_2,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_param_3,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_param_4,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_param_5,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_param_6,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_param_7,
	.param .u32 Subsample_Bilinear_yuv420p_nv12_param_8,
	.param .u32 Subsample_Bilinear_yuv420p_nv12_param_9,
	.param .u32 Subsample_Bilinear_yuv420p_nv12_param_10,
	.param .u32 Subsample_Bilinear_yuv420p_nv12_param_11,
	.param .u32 Subsample_Bilinear_yuv420p_nv12_param_12,
	.param .f32 Subsample_Bilinear_yuv420p_nv12_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv420p_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv420p_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB92_2;
	bra.uni 	$L__BB92_1;
$L__BB92_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv420p_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv420p_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv420p_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv420p_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv420p_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB92_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv420p_nv12_uv
.visible .entry Subsample_Bilinear_yuv420p_nv12_uv(
	.param .u64 Subsample_Bilinear_yuv420p_nv12_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv420p_nv12_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv420p_nv12_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv420p_nv12_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv420p_nv12_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv420p_nv12_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv420p_nv12_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv420p_nv12_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<67>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<21>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv420p_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv420p_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB93_2;
	bra.uni 	$L__BB93_1;
$L__BB93_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv420p_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv420p_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv420p_nv12_uv_param_10];
	ld.param.u64 	%rd9, [Subsample_Bilinear_yuv420p_nv12_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv420p_nv12_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv420p_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 255;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 255;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 255;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 255;
	add.s32 	%r55, %r53, %r54;
	add.s32 	%r56, %r55, 2;
	shr.u32 	%r57, %r56, 2;
	cvt.u16.u32 	%rs1, %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd9, {%f5, %f4}];
	// end inline asm
	and.b32  	%r58, %r33, 255;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd9, {%f7, %f4}];
	// end inline asm
	and.b32  	%r59, %r37, 255;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd9, {%f5, %f8}];
	// end inline asm
	and.b32  	%r61, %r41, 255;
	add.s32 	%r62, %r60, %r61;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd9, {%f7, %f8}];
	// end inline asm
	and.b32  	%r63, %r45, 255;
	add.s32 	%r64, %r62, %r63;
	add.s32 	%r65, %r64, 2;
	shr.u32 	%r66, %r65, 2;
	cvt.u16.u32 	%rs2, %r66;
	cvt.s64.s32 	%rd13, %r2;
	cvt.s64.s32 	%rd14, %r5;
	shr.u64 	%rd15, %rd14, 1;
	mul.lo.s64 	%rd16, %rd15, %rd13;
	cvt.s64.s32 	%rd17, %r1;
	add.s64 	%rd18, %rd16, %rd17;
	shl.b64 	%rd19, %rd18, 1;
	add.s64 	%rd20, %rd1, %rd19;
	st.global.v2.u8 	[%rd20], {%rs1, %rs2};
$L__BB93_2:
	ret;

}
	// .globl	Subsample_Bilinear_nv12_nv12
.visible .entry Subsample_Bilinear_nv12_nv12(
	.param .u64 Subsample_Bilinear_nv12_nv12_param_0,
	.param .u64 Subsample_Bilinear_nv12_nv12_param_1,
	.param .u64 Subsample_Bilinear_nv12_nv12_param_2,
	.param .u64 Subsample_Bilinear_nv12_nv12_param_3,
	.param .u64 Subsample_Bilinear_nv12_nv12_param_4,
	.param .u64 Subsample_Bilinear_nv12_nv12_param_5,
	.param .u64 Subsample_Bilinear_nv12_nv12_param_6,
	.param .u64 Subsample_Bilinear_nv12_nv12_param_7,
	.param .u32 Subsample_Bilinear_nv12_nv12_param_8,
	.param .u32 Subsample_Bilinear_nv12_nv12_param_9,
	.param .u32 Subsample_Bilinear_nv12_nv12_param_10,
	.param .u32 Subsample_Bilinear_nv12_nv12_param_11,
	.param .u32 Subsample_Bilinear_nv12_nv12_param_12,
	.param .f32 Subsample_Bilinear_nv12_nv12_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_nv12_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_nv12_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB94_2;
	bra.uni 	$L__BB94_1;
$L__BB94_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_nv12_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_nv12_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_nv12_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_nv12_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_nv12_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB94_2:
	ret;

}
	// .globl	Subsample_Bilinear_nv12_nv12_uv
.visible .entry Subsample_Bilinear_nv12_nv12_uv(
	.param .u64 Subsample_Bilinear_nv12_nv12_uv_param_0,
	.param .u64 Subsample_Bilinear_nv12_nv12_uv_param_1,
	.param .u64 Subsample_Bilinear_nv12_nv12_uv_param_2,
	.param .u64 Subsample_Bilinear_nv12_nv12_uv_param_3,
	.param .u64 Subsample_Bilinear_nv12_nv12_uv_param_4,
	.param .u64 Subsample_Bilinear_nv12_nv12_uv_param_5,
	.param .u64 Subsample_Bilinear_nv12_nv12_uv_param_6,
	.param .u64 Subsample_Bilinear_nv12_nv12_uv_param_7,
	.param .u32 Subsample_Bilinear_nv12_nv12_uv_param_8,
	.param .u32 Subsample_Bilinear_nv12_nv12_uv_param_9,
	.param .u32 Subsample_Bilinear_nv12_nv12_uv_param_10,
	.param .u32 Subsample_Bilinear_nv12_nv12_uv_param_11,
	.param .u32 Subsample_Bilinear_nv12_nv12_uv_param_12,
	.param .f32 Subsample_Bilinear_nv12_nv12_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_nv12_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_nv12_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB95_2;
	bra.uni 	$L__BB95_1;
$L__BB95_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_nv12_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_nv12_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_nv12_nv12_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_nv12_nv12_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bilinear_nv12_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 255;
	and.b32  	%r37, %r22, 255;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 255;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 255;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 255;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 255;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 2;
	shr.u32 	%r50, %r48, 2;
	cvt.u16.u32 	%rs1, %r49;
	cvt.u16.u32 	%rs2, %r50;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v2.u8 	[%rd15], {%rs1, %rs2};
$L__BB95_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p_nv12
.visible .entry Subsample_Bilinear_yuv444p_nv12(
	.param .u64 Subsample_Bilinear_yuv444p_nv12_param_0,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_param_1,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_param_2,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_param_3,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_param_4,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_param_5,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_param_6,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_param_7,
	.param .u32 Subsample_Bilinear_yuv444p_nv12_param_8,
	.param .u32 Subsample_Bilinear_yuv444p_nv12_param_9,
	.param .u32 Subsample_Bilinear_yuv444p_nv12_param_10,
	.param .u32 Subsample_Bilinear_yuv444p_nv12_param_11,
	.param .u32 Subsample_Bilinear_yuv444p_nv12_param_12,
	.param .f32 Subsample_Bilinear_yuv444p_nv12_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB96_2;
	bra.uni 	$L__BB96_1;
$L__BB96_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv444p_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB96_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p_nv12_uv
.visible .entry Subsample_Bilinear_yuv444p_nv12_uv(
	.param .u64 Subsample_Bilinear_yuv444p_nv12_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv444p_nv12_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv444p_nv12_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv444p_nv12_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv444p_nv12_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv444p_nv12_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv444p_nv12_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv444p_nv12_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<67>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<21>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB97_2;
	bra.uni 	$L__BB97_1;
$L__BB97_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p_nv12_uv_param_10];
	ld.param.u64 	%rd9, [Subsample_Bilinear_yuv444p_nv12_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv444p_nv12_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 255;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 255;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 255;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 255;
	add.s32 	%r55, %r53, %r54;
	add.s32 	%r56, %r55, 2;
	shr.u32 	%r57, %r56, 2;
	cvt.u16.u32 	%rs1, %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd9, {%f5, %f4}];
	// end inline asm
	and.b32  	%r58, %r33, 255;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd9, {%f7, %f4}];
	// end inline asm
	and.b32  	%r59, %r37, 255;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd9, {%f5, %f8}];
	// end inline asm
	and.b32  	%r61, %r41, 255;
	add.s32 	%r62, %r60, %r61;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd9, {%f7, %f8}];
	// end inline asm
	and.b32  	%r63, %r45, 255;
	add.s32 	%r64, %r62, %r63;
	add.s32 	%r65, %r64, 2;
	shr.u32 	%r66, %r65, 2;
	cvt.u16.u32 	%rs2, %r66;
	cvt.s64.s32 	%rd13, %r2;
	cvt.s64.s32 	%rd14, %r5;
	shr.u64 	%rd15, %rd14, 1;
	mul.lo.s64 	%rd16, %rd15, %rd13;
	cvt.s64.s32 	%rd17, %r1;
	add.s64 	%rd18, %rd16, %rd17;
	shl.b64 	%rd19, %rd18, 1;
	add.s64 	%rd20, %rd1, %rd19;
	st.global.v2.u8 	[%rd20], {%rs1, %rs2};
$L__BB97_2:
	ret;

}
	// .globl	Subsample_Bilinear_p010le_nv12
.visible .entry Subsample_Bilinear_p010le_nv12(
	.param .u64 Subsample_Bilinear_p010le_nv12_param_0,
	.param .u64 Subsample_Bilinear_p010le_nv12_param_1,
	.param .u64 Subsample_Bilinear_p010le_nv12_param_2,
	.param .u64 Subsample_Bilinear_p010le_nv12_param_3,
	.param .u64 Subsample_Bilinear_p010le_nv12_param_4,
	.param .u64 Subsample_Bilinear_p010le_nv12_param_5,
	.param .u64 Subsample_Bilinear_p010le_nv12_param_6,
	.param .u64 Subsample_Bilinear_p010le_nv12_param_7,
	.param .u32 Subsample_Bilinear_p010le_nv12_param_8,
	.param .u32 Subsample_Bilinear_p010le_nv12_param_9,
	.param .u32 Subsample_Bilinear_p010le_nv12_param_10,
	.param .u32 Subsample_Bilinear_p010le_nv12_param_11,
	.param .u32 Subsample_Bilinear_p010le_nv12_param_12,
	.param .f32 Subsample_Bilinear_p010le_nv12_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p010le_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p010le_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB98_2;
	bra.uni 	$L__BB98_1;
$L__BB98_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p010le_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p010le_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p010le_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p010le_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p010le_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 10;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB98_2:
	ret;

}
	// .globl	Subsample_Bilinear_p010le_nv12_uv
.visible .entry Subsample_Bilinear_p010le_nv12_uv(
	.param .u64 Subsample_Bilinear_p010le_nv12_uv_param_0,
	.param .u64 Subsample_Bilinear_p010le_nv12_uv_param_1,
	.param .u64 Subsample_Bilinear_p010le_nv12_uv_param_2,
	.param .u64 Subsample_Bilinear_p010le_nv12_uv_param_3,
	.param .u64 Subsample_Bilinear_p010le_nv12_uv_param_4,
	.param .u64 Subsample_Bilinear_p010le_nv12_uv_param_5,
	.param .u64 Subsample_Bilinear_p010le_nv12_uv_param_6,
	.param .u64 Subsample_Bilinear_p010le_nv12_uv_param_7,
	.param .u32 Subsample_Bilinear_p010le_nv12_uv_param_8,
	.param .u32 Subsample_Bilinear_p010le_nv12_uv_param_9,
	.param .u32 Subsample_Bilinear_p010le_nv12_uv_param_10,
	.param .u32 Subsample_Bilinear_p010le_nv12_uv_param_11,
	.param .u32 Subsample_Bilinear_p010le_nv12_uv_param_12,
	.param .f32 Subsample_Bilinear_p010le_nv12_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p010le_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p010le_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB99_2;
	bra.uni 	$L__BB99_1;
$L__BB99_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p010le_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p010le_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p010le_nv12_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p010le_nv12_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p010le_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 65535;
	and.b32  	%r37, %r22, 65535;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 65535;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 65535;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 65535;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 65535;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 10;
	cvt.u16.u32 	%rs1, %r49;
	shr.u32 	%r50, %r48, 10;
	cvt.u16.u32 	%rs2, %r50;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v2.u8 	[%rd15], {%rs1, %rs2};
$L__BB99_2:
	ret;

}
	// .globl	Subsample_Bilinear_p016le_nv12
.visible .entry Subsample_Bilinear_p016le_nv12(
	.param .u64 Subsample_Bilinear_p016le_nv12_param_0,
	.param .u64 Subsample_Bilinear_p016le_nv12_param_1,
	.param .u64 Subsample_Bilinear_p016le_nv12_param_2,
	.param .u64 Subsample_Bilinear_p016le_nv12_param_3,
	.param .u64 Subsample_Bilinear_p016le_nv12_param_4,
	.param .u64 Subsample_Bilinear_p016le_nv12_param_5,
	.param .u64 Subsample_Bilinear_p016le_nv12_param_6,
	.param .u64 Subsample_Bilinear_p016le_nv12_param_7,
	.param .u32 Subsample_Bilinear_p016le_nv12_param_8,
	.param .u32 Subsample_Bilinear_p016le_nv12_param_9,
	.param .u32 Subsample_Bilinear_p016le_nv12_param_10,
	.param .u32 Subsample_Bilinear_p016le_nv12_param_11,
	.param .u32 Subsample_Bilinear_p016le_nv12_param_12,
	.param .f32 Subsample_Bilinear_p016le_nv12_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p016le_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p016le_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB100_2;
	bra.uni 	$L__BB100_1;
$L__BB100_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p016le_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p016le_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p016le_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p016le_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p016le_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 10;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB100_2:
	ret;

}
	// .globl	Subsample_Bilinear_p016le_nv12_uv
.visible .entry Subsample_Bilinear_p016le_nv12_uv(
	.param .u64 Subsample_Bilinear_p016le_nv12_uv_param_0,
	.param .u64 Subsample_Bilinear_p016le_nv12_uv_param_1,
	.param .u64 Subsample_Bilinear_p016le_nv12_uv_param_2,
	.param .u64 Subsample_Bilinear_p016le_nv12_uv_param_3,
	.param .u64 Subsample_Bilinear_p016le_nv12_uv_param_4,
	.param .u64 Subsample_Bilinear_p016le_nv12_uv_param_5,
	.param .u64 Subsample_Bilinear_p016le_nv12_uv_param_6,
	.param .u64 Subsample_Bilinear_p016le_nv12_uv_param_7,
	.param .u32 Subsample_Bilinear_p016le_nv12_uv_param_8,
	.param .u32 Subsample_Bilinear_p016le_nv12_uv_param_9,
	.param .u32 Subsample_Bilinear_p016le_nv12_uv_param_10,
	.param .u32 Subsample_Bilinear_p016le_nv12_uv_param_11,
	.param .u32 Subsample_Bilinear_p016le_nv12_uv_param_12,
	.param .f32 Subsample_Bilinear_p016le_nv12_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p016le_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p016le_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB101_2;
	bra.uni 	$L__BB101_1;
$L__BB101_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p016le_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p016le_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p016le_nv12_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p016le_nv12_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p016le_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 65535;
	and.b32  	%r37, %r22, 65535;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 65535;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 65535;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 65535;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 65535;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 10;
	cvt.u16.u32 	%rs1, %r49;
	shr.u32 	%r50, %r48, 10;
	cvt.u16.u32 	%rs2, %r50;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v2.u8 	[%rd15], {%rs1, %rs2};
$L__BB101_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p16le_nv12
.visible .entry Subsample_Bilinear_yuv444p16le_nv12(
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_param_0,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_param_1,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_param_2,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_param_3,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_param_4,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_param_5,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_param_6,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_param_7,
	.param .u32 Subsample_Bilinear_yuv444p16le_nv12_param_8,
	.param .u32 Subsample_Bilinear_yuv444p16le_nv12_param_9,
	.param .u32 Subsample_Bilinear_yuv444p16le_nv12_param_10,
	.param .u32 Subsample_Bilinear_yuv444p16le_nv12_param_11,
	.param .u32 Subsample_Bilinear_yuv444p16le_nv12_param_12,
	.param .f32 Subsample_Bilinear_yuv444p16le_nv12_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p16le_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p16le_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB102_2;
	bra.uni 	$L__BB102_1;
$L__BB102_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p16le_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p16le_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p16le_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p16le_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv444p16le_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 10;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB102_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p16le_nv12_uv
.visible .entry Subsample_Bilinear_yuv444p16le_nv12_uv(
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv444p16le_nv12_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv444p16le_nv12_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv444p16le_nv12_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv444p16le_nv12_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv444p16le_nv12_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv444p16le_nv12_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv444p16le_nv12_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<67>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<21>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p16le_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p16le_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB103_2;
	bra.uni 	$L__BB103_1;
$L__BB103_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p16le_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p16le_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p16le_nv12_uv_param_10];
	ld.param.u64 	%rd9, [Subsample_Bilinear_yuv444p16le_nv12_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv444p16le_nv12_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p16le_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 65535;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 65535;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 65535;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 65535;
	add.s32 	%r55, %r53, %r54;
	add.s32 	%r56, %r55, 2;
	shr.u32 	%r57, %r56, 10;
	cvt.u16.u32 	%rs1, %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd9, {%f5, %f4}];
	// end inline asm
	and.b32  	%r58, %r33, 65535;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd9, {%f7, %f4}];
	// end inline asm
	and.b32  	%r59, %r37, 65535;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd9, {%f5, %f8}];
	// end inline asm
	and.b32  	%r61, %r41, 65535;
	add.s32 	%r62, %r60, %r61;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd9, {%f7, %f8}];
	// end inline asm
	and.b32  	%r63, %r45, 65535;
	add.s32 	%r64, %r62, %r63;
	add.s32 	%r65, %r64, 2;
	shr.u32 	%r66, %r65, 10;
	cvt.u16.u32 	%rs2, %r66;
	cvt.s64.s32 	%rd13, %r2;
	cvt.s64.s32 	%rd14, %r5;
	shr.u64 	%rd15, %rd14, 1;
	mul.lo.s64 	%rd16, %rd15, %rd13;
	cvt.s64.s32 	%rd17, %r1;
	add.s64 	%rd18, %rd16, %rd17;
	shl.b64 	%rd19, %rd18, 1;
	add.s64 	%rd20, %rd1, %rd19;
	st.global.v2.u8 	[%rd20], {%rs1, %rs2};
$L__BB103_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv420p_yuv444p
.visible .entry Subsample_Bilinear_yuv420p_yuv444p(
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_param_0,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_param_1,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_param_2,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_param_3,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_param_4,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_param_5,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_param_6,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_param_7,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p_param_8,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p_param_9,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p_param_10,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p_param_11,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p_param_12,
	.param .f32 Subsample_Bilinear_yuv420p_yuv444p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv420p_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv420p_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB104_2;
	bra.uni 	$L__BB104_1;
$L__BB104_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv420p_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv420p_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv420p_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv420p_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv420p_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB104_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv420p_yuv444p_uv
.visible .entry Subsample_Bilinear_yuv420p_yuv444p_uv(
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv420p_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<67>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<20>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv420p_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv420p_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB105_2;
	bra.uni 	$L__BB105_1;
$L__BB105_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv420p_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv420p_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv420p_yuv444p_uv_param_10];
	ld.param.u64 	%rd11, [Subsample_Bilinear_yuv420p_yuv444p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bilinear_yuv420p_yuv444p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv420p_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bilinear_yuv420p_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 255;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 255;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 255;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 255;
	add.s32 	%r55, %r53, %r54;
	add.s32 	%r56, %r55, 2;
	shr.u32 	%r57, %r56, 2;
	mul.wide.s32 	%rd15, %r2, %r5;
	cvt.s64.s32 	%rd16, %r1;
	add.s64 	%rd17, %rd15, %rd16;
	add.s64 	%rd18, %rd2, %rd17;
	st.global.u8 	[%rd18], %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd11, {%f5, %f4}];
	// end inline asm
	and.b32  	%r58, %r33, 255;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd11, {%f7, %f4}];
	// end inline asm
	and.b32  	%r59, %r37, 255;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd11, {%f5, %f8}];
	// end inline asm
	and.b32  	%r61, %r41, 255;
	add.s32 	%r62, %r60, %r61;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd11, {%f7, %f8}];
	// end inline asm
	and.b32  	%r63, %r45, 255;
	add.s32 	%r64, %r62, %r63;
	add.s32 	%r65, %r64, 2;
	shr.u32 	%r66, %r65, 2;
	add.s64 	%rd19, %rd1, %rd17;
	st.global.u8 	[%rd19], %r66;
$L__BB105_2:
	ret;

}
	// .globl	Subsample_Bilinear_nv12_yuv444p
.visible .entry Subsample_Bilinear_nv12_yuv444p(
	.param .u64 Subsample_Bilinear_nv12_yuv444p_param_0,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_param_1,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_param_2,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_param_3,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_param_4,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_param_5,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_param_6,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_param_7,
	.param .u32 Subsample_Bilinear_nv12_yuv444p_param_8,
	.param .u32 Subsample_Bilinear_nv12_yuv444p_param_9,
	.param .u32 Subsample_Bilinear_nv12_yuv444p_param_10,
	.param .u32 Subsample_Bilinear_nv12_yuv444p_param_11,
	.param .u32 Subsample_Bilinear_nv12_yuv444p_param_12,
	.param .f32 Subsample_Bilinear_nv12_yuv444p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_nv12_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_nv12_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB106_2;
	bra.uni 	$L__BB106_1;
$L__BB106_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_nv12_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_nv12_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_nv12_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_nv12_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_nv12_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB106_2:
	ret;

}
	// .globl	Subsample_Bilinear_nv12_yuv444p_uv
.visible .entry Subsample_Bilinear_nv12_yuv444p_uv(
	.param .u64 Subsample_Bilinear_nv12_yuv444p_uv_param_0,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_uv_param_1,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_uv_param_2,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_uv_param_3,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_uv_param_4,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_uv_param_5,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_uv_param_6,
	.param .u64 Subsample_Bilinear_nv12_yuv444p_uv_param_7,
	.param .u32 Subsample_Bilinear_nv12_yuv444p_uv_param_8,
	.param .u32 Subsample_Bilinear_nv12_yuv444p_uv_param_9,
	.param .u32 Subsample_Bilinear_nv12_yuv444p_uv_param_10,
	.param .u32 Subsample_Bilinear_nv12_yuv444p_uv_param_11,
	.param .u32 Subsample_Bilinear_nv12_yuv444p_uv_param_12,
	.param .f32 Subsample_Bilinear_nv12_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Bilinear_nv12_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_nv12_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB107_2;
	bra.uni 	$L__BB107_1;
$L__BB107_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_nv12_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_nv12_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_nv12_yuv444p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bilinear_nv12_yuv444p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_nv12_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bilinear_nv12_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 255;
	and.b32  	%r37, %r22, 255;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 255;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 255;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 255;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 255;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 2;
	shr.u32 	%r50, %r48, 2;
	mul.wide.s32 	%rd10, %r2, %r5;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	add.s64 	%rd13, %rd2, %rd12;
	st.global.u8 	[%rd13], %r49;
	add.s64 	%rd14, %rd1, %rd12;
	st.global.u8 	[%rd14], %r50;
$L__BB107_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p_yuv444p
.visible .entry Subsample_Bilinear_yuv444p_yuv444p(
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_param_0,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_param_1,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_param_2,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_param_3,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_param_4,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_param_5,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_param_6,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_param_7,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p_param_8,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p_param_9,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p_param_10,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p_param_11,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p_param_12,
	.param .f32 Subsample_Bilinear_yuv444p_yuv444p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB108_2;
	bra.uni 	$L__BB108_1;
$L__BB108_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv444p_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB108_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p_yuv444p_uv
.visible .entry Subsample_Bilinear_yuv444p_yuv444p_uv(
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv444p_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<67>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<20>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB109_2;
	bra.uni 	$L__BB109_1;
$L__BB109_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p_yuv444p_uv_param_10];
	ld.param.u64 	%rd11, [Subsample_Bilinear_yuv444p_yuv444p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bilinear_yuv444p_yuv444p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv444p_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bilinear_yuv444p_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 255;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 255;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 255;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 255;
	add.s32 	%r55, %r53, %r54;
	add.s32 	%r56, %r55, 2;
	shr.u32 	%r57, %r56, 2;
	mul.wide.s32 	%rd15, %r2, %r5;
	cvt.s64.s32 	%rd16, %r1;
	add.s64 	%rd17, %rd15, %rd16;
	add.s64 	%rd18, %rd2, %rd17;
	st.global.u8 	[%rd18], %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd11, {%f5, %f4}];
	// end inline asm
	and.b32  	%r58, %r33, 255;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd11, {%f7, %f4}];
	// end inline asm
	and.b32  	%r59, %r37, 255;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd11, {%f5, %f8}];
	// end inline asm
	and.b32  	%r61, %r41, 255;
	add.s32 	%r62, %r60, %r61;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd11, {%f7, %f8}];
	// end inline asm
	and.b32  	%r63, %r45, 255;
	add.s32 	%r64, %r62, %r63;
	add.s32 	%r65, %r64, 2;
	shr.u32 	%r66, %r65, 2;
	add.s64 	%rd19, %rd1, %rd17;
	st.global.u8 	[%rd19], %r66;
$L__BB109_2:
	ret;

}
	// .globl	Subsample_Bilinear_p010le_yuv444p
.visible .entry Subsample_Bilinear_p010le_yuv444p(
	.param .u64 Subsample_Bilinear_p010le_yuv444p_param_0,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_param_1,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_param_2,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_param_3,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_param_4,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_param_5,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_param_6,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_param_7,
	.param .u32 Subsample_Bilinear_p010le_yuv444p_param_8,
	.param .u32 Subsample_Bilinear_p010le_yuv444p_param_9,
	.param .u32 Subsample_Bilinear_p010le_yuv444p_param_10,
	.param .u32 Subsample_Bilinear_p010le_yuv444p_param_11,
	.param .u32 Subsample_Bilinear_p010le_yuv444p_param_12,
	.param .f32 Subsample_Bilinear_p010le_yuv444p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p010le_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p010le_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB110_2;
	bra.uni 	$L__BB110_1;
$L__BB110_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p010le_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p010le_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p010le_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p010le_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p010le_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 10;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB110_2:
	ret;

}
	// .globl	Subsample_Bilinear_p010le_yuv444p_uv
.visible .entry Subsample_Bilinear_p010le_yuv444p_uv(
	.param .u64 Subsample_Bilinear_p010le_yuv444p_uv_param_0,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_uv_param_1,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_uv_param_2,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_uv_param_3,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_uv_param_4,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_uv_param_5,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_uv_param_6,
	.param .u64 Subsample_Bilinear_p010le_yuv444p_uv_param_7,
	.param .u32 Subsample_Bilinear_p010le_yuv444p_uv_param_8,
	.param .u32 Subsample_Bilinear_p010le_yuv444p_uv_param_9,
	.param .u32 Subsample_Bilinear_p010le_yuv444p_uv_param_10,
	.param .u32 Subsample_Bilinear_p010le_yuv444p_uv_param_11,
	.param .u32 Subsample_Bilinear_p010le_yuv444p_uv_param_12,
	.param .f32 Subsample_Bilinear_p010le_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p010le_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p010le_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB111_2;
	bra.uni 	$L__BB111_1;
$L__BB111_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p010le_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p010le_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p010le_yuv444p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bilinear_p010le_yuv444p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p010le_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bilinear_p010le_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 65535;
	and.b32  	%r37, %r22, 65535;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 65535;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 65535;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 65535;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 65535;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 10;
	mul.wide.s32 	%rd10, %r2, %r5;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	add.s64 	%rd13, %rd2, %rd12;
	st.global.u8 	[%rd13], %r49;
	shr.u32 	%r50, %r48, 10;
	add.s64 	%rd14, %rd1, %rd12;
	st.global.u8 	[%rd14], %r50;
$L__BB111_2:
	ret;

}
	// .globl	Subsample_Bilinear_p016le_yuv444p
.visible .entry Subsample_Bilinear_p016le_yuv444p(
	.param .u64 Subsample_Bilinear_p016le_yuv444p_param_0,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_param_1,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_param_2,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_param_3,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_param_4,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_param_5,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_param_6,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_param_7,
	.param .u32 Subsample_Bilinear_p016le_yuv444p_param_8,
	.param .u32 Subsample_Bilinear_p016le_yuv444p_param_9,
	.param .u32 Subsample_Bilinear_p016le_yuv444p_param_10,
	.param .u32 Subsample_Bilinear_p016le_yuv444p_param_11,
	.param .u32 Subsample_Bilinear_p016le_yuv444p_param_12,
	.param .f32 Subsample_Bilinear_p016le_yuv444p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p016le_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p016le_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB112_2;
	bra.uni 	$L__BB112_1;
$L__BB112_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p016le_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p016le_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p016le_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p016le_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p016le_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 10;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB112_2:
	ret;

}
	// .globl	Subsample_Bilinear_p016le_yuv444p_uv
.visible .entry Subsample_Bilinear_p016le_yuv444p_uv(
	.param .u64 Subsample_Bilinear_p016le_yuv444p_uv_param_0,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_uv_param_1,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_uv_param_2,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_uv_param_3,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_uv_param_4,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_uv_param_5,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_uv_param_6,
	.param .u64 Subsample_Bilinear_p016le_yuv444p_uv_param_7,
	.param .u32 Subsample_Bilinear_p016le_yuv444p_uv_param_8,
	.param .u32 Subsample_Bilinear_p016le_yuv444p_uv_param_9,
	.param .u32 Subsample_Bilinear_p016le_yuv444p_uv_param_10,
	.param .u32 Subsample_Bilinear_p016le_yuv444p_uv_param_11,
	.param .u32 Subsample_Bilinear_p016le_yuv444p_uv_param_12,
	.param .f32 Subsample_Bilinear_p016le_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<15>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p016le_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p016le_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB113_2;
	bra.uni 	$L__BB113_1;
$L__BB113_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p016le_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p016le_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p016le_yuv444p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bilinear_p016le_yuv444p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p016le_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bilinear_p016le_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 65535;
	and.b32  	%r37, %r22, 65535;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 65535;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 65535;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 65535;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 65535;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 10;
	mul.wide.s32 	%rd10, %r2, %r5;
	cvt.s64.s32 	%rd11, %r1;
	add.s64 	%rd12, %rd10, %rd11;
	add.s64 	%rd13, %rd2, %rd12;
	st.global.u8 	[%rd13], %r49;
	shr.u32 	%r50, %r48, 10;
	add.s64 	%rd14, %rd1, %rd12;
	st.global.u8 	[%rd14], %r50;
$L__BB113_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p16le_yuv444p
.visible .entry Subsample_Bilinear_yuv444p16le_yuv444p(
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_param_0,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_param_1,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_param_2,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_param_3,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_param_4,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_param_5,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_param_6,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_param_7,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p_param_8,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p_param_9,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p_param_10,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p_param_11,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p_param_12,
	.param .f32 Subsample_Bilinear_yuv444p16le_yuv444p_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p16le_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p16le_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB114_2;
	bra.uni 	$L__BB114_1;
$L__BB114_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p16le_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p16le_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p16le_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p16le_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv444p16le_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 10;
	mul.wide.s32 	%rd8, %r2, %r5;
	cvt.s64.s32 	%rd9, %r1;
	add.s64 	%rd10, %rd8, %rd9;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.u8 	[%rd11], %r41;
$L__BB114_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p16le_yuv444p_uv
.visible .entry Subsample_Bilinear_yuv444p16le_yuv444p_uv(
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<67>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<20>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB115_2;
	bra.uni 	$L__BB115_1;
$L__BB115_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_10];
	ld.param.u64 	%rd11, [Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bilinear_yuv444p16le_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 65535;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 65535;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 65535;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 65535;
	add.s32 	%r55, %r53, %r54;
	add.s32 	%r56, %r55, 2;
	shr.u32 	%r57, %r56, 10;
	mul.wide.s32 	%rd15, %r2, %r5;
	cvt.s64.s32 	%rd16, %r1;
	add.s64 	%rd17, %rd15, %rd16;
	add.s64 	%rd18, %rd2, %rd17;
	st.global.u8 	[%rd18], %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd11, {%f5, %f4}];
	// end inline asm
	and.b32  	%r58, %r33, 65535;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd11, {%f7, %f4}];
	// end inline asm
	and.b32  	%r59, %r37, 65535;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd11, {%f5, %f8}];
	// end inline asm
	and.b32  	%r61, %r41, 65535;
	add.s32 	%r62, %r60, %r61;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd11, {%f7, %f8}];
	// end inline asm
	and.b32  	%r63, %r45, 65535;
	add.s32 	%r64, %r62, %r63;
	add.s32 	%r65, %r64, 2;
	shr.u32 	%r66, %r65, 10;
	add.s64 	%rd19, %rd1, %rd17;
	st.global.u8 	[%rd19], %r66;
$L__BB115_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv420p_p010le
.visible .entry Subsample_Bilinear_yuv420p_p010le(
	.param .u64 Subsample_Bilinear_yuv420p_p010le_param_0,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_param_1,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_param_2,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_param_3,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_param_4,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_param_5,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_param_6,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_param_7,
	.param .u32 Subsample_Bilinear_yuv420p_p010le_param_8,
	.param .u32 Subsample_Bilinear_yuv420p_p010le_param_9,
	.param .u32 Subsample_Bilinear_yuv420p_p010le_param_10,
	.param .u32 Subsample_Bilinear_yuv420p_p010le_param_11,
	.param .u32 Subsample_Bilinear_yuv420p_p010le_param_12,
	.param .f32 Subsample_Bilinear_yuv420p_p010le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<6>;
	.reg .b32 	%r<40>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv420p_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv420p_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB116_2;
	bra.uni 	$L__BB116_1;
$L__BB116_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv420p_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv420p_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv420p_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv420p_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv420p_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	cvt.u16.u32 	%rs1, %r39;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	and.b16  	%rs5, %rs4, -64;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs5;
$L__BB116_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv420p_p010le_uv
.visible .entry Subsample_Bilinear_yuv420p_p010le_uv(
	.param .u64 Subsample_Bilinear_yuv420p_p010le_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv420p_p010le_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv420p_p010le_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv420p_p010le_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv420p_p010le_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv420p_p010le_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv420p_p010le_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv420p_p010le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<11>;
	.reg .b32 	%r<63>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<21>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv420p_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv420p_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB117_2;
	bra.uni 	$L__BB117_1;
$L__BB117_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv420p_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv420p_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv420p_p010le_uv_param_10];
	ld.param.u64 	%rd9, [Subsample_Bilinear_yuv420p_p010le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv420p_p010le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv420p_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 255;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 255;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 255;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 255;
	add.s32 	%r55, %r53, %r54;
	cvt.u16.u32 	%rs1, %r55;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	and.b16  	%rs5, %rs4, -64;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd9, {%f5, %f4}];
	// end inline asm
	and.b32  	%r56, %r33, 255;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd9, {%f7, %f4}];
	// end inline asm
	and.b32  	%r57, %r37, 255;
	add.s32 	%r58, %r56, %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd9, {%f5, %f8}];
	// end inline asm
	and.b32  	%r59, %r41, 255;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd9, {%f7, %f8}];
	// end inline asm
	and.b32  	%r61, %r45, 255;
	add.s32 	%r62, %r60, %r61;
	cvt.u16.u32 	%rs6, %r62;
	add.s16 	%rs7, %rs6, 2;
	shr.u16 	%rs8, %rs7, 2;
	mul.lo.s16 	%rs9, %rs8, 257;
	and.b16  	%rs10, %rs9, -64;
	cvt.s64.s32 	%rd13, %r2;
	cvt.s64.s32 	%rd14, %r5;
	shr.u64 	%rd15, %rd14, 2;
	mul.lo.s64 	%rd16, %rd15, %rd13;
	cvt.s64.s32 	%rd17, %r1;
	add.s64 	%rd18, %rd16, %rd17;
	shl.b64 	%rd19, %rd18, 2;
	add.s64 	%rd20, %rd1, %rd19;
	st.global.v2.u16 	[%rd20], {%rs5, %rs10};
$L__BB117_2:
	ret;

}
	// .globl	Subsample_Bilinear_nv12_p010le
.visible .entry Subsample_Bilinear_nv12_p010le(
	.param .u64 Subsample_Bilinear_nv12_p010le_param_0,
	.param .u64 Subsample_Bilinear_nv12_p010le_param_1,
	.param .u64 Subsample_Bilinear_nv12_p010le_param_2,
	.param .u64 Subsample_Bilinear_nv12_p010le_param_3,
	.param .u64 Subsample_Bilinear_nv12_p010le_param_4,
	.param .u64 Subsample_Bilinear_nv12_p010le_param_5,
	.param .u64 Subsample_Bilinear_nv12_p010le_param_6,
	.param .u64 Subsample_Bilinear_nv12_p010le_param_7,
	.param .u32 Subsample_Bilinear_nv12_p010le_param_8,
	.param .u32 Subsample_Bilinear_nv12_p010le_param_9,
	.param .u32 Subsample_Bilinear_nv12_p010le_param_10,
	.param .u32 Subsample_Bilinear_nv12_p010le_param_11,
	.param .u32 Subsample_Bilinear_nv12_p010le_param_12,
	.param .f32 Subsample_Bilinear_nv12_p010le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<6>;
	.reg .b32 	%r<40>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_nv12_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_nv12_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB118_2;
	bra.uni 	$L__BB118_1;
$L__BB118_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_nv12_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_nv12_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_nv12_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_nv12_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_nv12_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	cvt.u16.u32 	%rs1, %r39;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	and.b16  	%rs5, %rs4, -64;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs5;
$L__BB118_2:
	ret;

}
	// .globl	Subsample_Bilinear_nv12_p010le_uv
.visible .entry Subsample_Bilinear_nv12_p010le_uv(
	.param .u64 Subsample_Bilinear_nv12_p010le_uv_param_0,
	.param .u64 Subsample_Bilinear_nv12_p010le_uv_param_1,
	.param .u64 Subsample_Bilinear_nv12_p010le_uv_param_2,
	.param .u64 Subsample_Bilinear_nv12_p010le_uv_param_3,
	.param .u64 Subsample_Bilinear_nv12_p010le_uv_param_4,
	.param .u64 Subsample_Bilinear_nv12_p010le_uv_param_5,
	.param .u64 Subsample_Bilinear_nv12_p010le_uv_param_6,
	.param .u64 Subsample_Bilinear_nv12_p010le_uv_param_7,
	.param .u32 Subsample_Bilinear_nv12_p010le_uv_param_8,
	.param .u32 Subsample_Bilinear_nv12_p010le_uv_param_9,
	.param .u32 Subsample_Bilinear_nv12_p010le_uv_param_10,
	.param .u32 Subsample_Bilinear_nv12_p010le_uv_param_11,
	.param .u32 Subsample_Bilinear_nv12_p010le_uv_param_12,
	.param .f32 Subsample_Bilinear_nv12_p010le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<11>;
	.reg .b32 	%r<47>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_nv12_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_nv12_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB119_2;
	bra.uni 	$L__BB119_1;
$L__BB119_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_nv12_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_nv12_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_nv12_p010le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_nv12_p010le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bilinear_nv12_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 255;
	and.b32  	%r37, %r22, 255;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 255;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 255;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 255;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 255;
	add.s32 	%r46, %r42, %r45;
	cvt.u16.u32 	%rs1, %r44;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	cvt.u16.u32 	%rs4, %r46;
	add.s16 	%rs5, %rs4, 2;
	shr.u16 	%rs6, %rs5, 2;
	mul.lo.s16 	%rs7, %rs3, 257;
	and.b16  	%rs8, %rs7, -64;
	mul.lo.s16 	%rs9, %rs6, 257;
	and.b16  	%rs10, %rs9, -64;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 2;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 2;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v2.u16 	[%rd15], {%rs8, %rs10};
$L__BB119_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p_p010le
.visible .entry Subsample_Bilinear_yuv444p_p010le(
	.param .u64 Subsample_Bilinear_yuv444p_p010le_param_0,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_param_1,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_param_2,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_param_3,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_param_4,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_param_5,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_param_6,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_param_7,
	.param .u32 Subsample_Bilinear_yuv444p_p010le_param_8,
	.param .u32 Subsample_Bilinear_yuv444p_p010le_param_9,
	.param .u32 Subsample_Bilinear_yuv444p_p010le_param_10,
	.param .u32 Subsample_Bilinear_yuv444p_p010le_param_11,
	.param .u32 Subsample_Bilinear_yuv444p_p010le_param_12,
	.param .f32 Subsample_Bilinear_yuv444p_p010le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<6>;
	.reg .b32 	%r<40>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB120_2;
	bra.uni 	$L__BB120_1;
$L__BB120_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv444p_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	cvt.u16.u32 	%rs1, %r39;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	and.b16  	%rs5, %rs4, -64;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs5;
$L__BB120_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p_p010le_uv
.visible .entry Subsample_Bilinear_yuv444p_p010le_uv(
	.param .u64 Subsample_Bilinear_yuv444p_p010le_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv444p_p010le_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv444p_p010le_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv444p_p010le_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv444p_p010le_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv444p_p010le_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv444p_p010le_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv444p_p010le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<11>;
	.reg .b32 	%r<63>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<21>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB121_2;
	bra.uni 	$L__BB121_1;
$L__BB121_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p_p010le_uv_param_10];
	ld.param.u64 	%rd9, [Subsample_Bilinear_yuv444p_p010le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv444p_p010le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 255;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 255;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 255;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 255;
	add.s32 	%r55, %r53, %r54;
	cvt.u16.u32 	%rs1, %r55;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	and.b16  	%rs5, %rs4, -64;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd9, {%f5, %f4}];
	// end inline asm
	and.b32  	%r56, %r33, 255;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd9, {%f7, %f4}];
	// end inline asm
	and.b32  	%r57, %r37, 255;
	add.s32 	%r58, %r56, %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd9, {%f5, %f8}];
	// end inline asm
	and.b32  	%r59, %r41, 255;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd9, {%f7, %f8}];
	// end inline asm
	and.b32  	%r61, %r45, 255;
	add.s32 	%r62, %r60, %r61;
	cvt.u16.u32 	%rs6, %r62;
	add.s16 	%rs7, %rs6, 2;
	shr.u16 	%rs8, %rs7, 2;
	mul.lo.s16 	%rs9, %rs8, 257;
	and.b16  	%rs10, %rs9, -64;
	cvt.s64.s32 	%rd13, %r2;
	cvt.s64.s32 	%rd14, %r5;
	shr.u64 	%rd15, %rd14, 2;
	mul.lo.s64 	%rd16, %rd15, %rd13;
	cvt.s64.s32 	%rd17, %r1;
	add.s64 	%rd18, %rd16, %rd17;
	shl.b64 	%rd19, %rd18, 2;
	add.s64 	%rd20, %rd1, %rd19;
	st.global.v2.u16 	[%rd20], {%rs5, %rs10};
$L__BB121_2:
	ret;

}
	// .globl	Subsample_Bilinear_p010le_p010le
.visible .entry Subsample_Bilinear_p010le_p010le(
	.param .u64 Subsample_Bilinear_p010le_p010le_param_0,
	.param .u64 Subsample_Bilinear_p010le_p010le_param_1,
	.param .u64 Subsample_Bilinear_p010le_p010le_param_2,
	.param .u64 Subsample_Bilinear_p010le_p010le_param_3,
	.param .u64 Subsample_Bilinear_p010le_p010le_param_4,
	.param .u64 Subsample_Bilinear_p010le_p010le_param_5,
	.param .u64 Subsample_Bilinear_p010le_p010le_param_6,
	.param .u64 Subsample_Bilinear_p010le_p010le_param_7,
	.param .u32 Subsample_Bilinear_p010le_p010le_param_8,
	.param .u32 Subsample_Bilinear_p010le_p010le_param_9,
	.param .u32 Subsample_Bilinear_p010le_p010le_param_10,
	.param .u32 Subsample_Bilinear_p010le_p010le_param_11,
	.param .u32 Subsample_Bilinear_p010le_p010le_param_12,
	.param .f32 Subsample_Bilinear_p010le_p010le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p010le_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p010le_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB122_2;
	bra.uni 	$L__BB122_1;
$L__BB122_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p010le_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p010le_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p010le_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p010le_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p010le_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %r41;
$L__BB122_2:
	ret;

}
	// .globl	Subsample_Bilinear_p010le_p010le_uv
.visible .entry Subsample_Bilinear_p010le_p010le_uv(
	.param .u64 Subsample_Bilinear_p010le_p010le_uv_param_0,
	.param .u64 Subsample_Bilinear_p010le_p010le_uv_param_1,
	.param .u64 Subsample_Bilinear_p010le_p010le_uv_param_2,
	.param .u64 Subsample_Bilinear_p010le_p010le_uv_param_3,
	.param .u64 Subsample_Bilinear_p010le_p010le_uv_param_4,
	.param .u64 Subsample_Bilinear_p010le_p010le_uv_param_5,
	.param .u64 Subsample_Bilinear_p010le_p010le_uv_param_6,
	.param .u64 Subsample_Bilinear_p010le_p010le_uv_param_7,
	.param .u32 Subsample_Bilinear_p010le_p010le_uv_param_8,
	.param .u32 Subsample_Bilinear_p010le_p010le_uv_param_9,
	.param .u32 Subsample_Bilinear_p010le_p010le_uv_param_10,
	.param .u32 Subsample_Bilinear_p010le_p010le_uv_param_11,
	.param .u32 Subsample_Bilinear_p010le_p010le_uv_param_12,
	.param .f32 Subsample_Bilinear_p010le_p010le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p010le_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p010le_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB123_2;
	bra.uni 	$L__BB123_1;
$L__BB123_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p010le_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p010le_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p010le_p010le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p010le_p010le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p010le_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 65535;
	and.b32  	%r37, %r22, 65535;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 65535;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 65535;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 65535;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 65535;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 2;
	shr.u32 	%r50, %r48, 2;
	cvt.u16.u32 	%rs1, %r49;
	cvt.u16.u32 	%rs2, %r50;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 2;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 2;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v2.u16 	[%rd15], {%rs1, %rs2};
$L__BB123_2:
	ret;

}
	// .globl	Subsample_Bilinear_p016le_p010le
.visible .entry Subsample_Bilinear_p016le_p010le(
	.param .u64 Subsample_Bilinear_p016le_p010le_param_0,
	.param .u64 Subsample_Bilinear_p016le_p010le_param_1,
	.param .u64 Subsample_Bilinear_p016le_p010le_param_2,
	.param .u64 Subsample_Bilinear_p016le_p010le_param_3,
	.param .u64 Subsample_Bilinear_p016le_p010le_param_4,
	.param .u64 Subsample_Bilinear_p016le_p010le_param_5,
	.param .u64 Subsample_Bilinear_p016le_p010le_param_6,
	.param .u64 Subsample_Bilinear_p016le_p010le_param_7,
	.param .u32 Subsample_Bilinear_p016le_p010le_param_8,
	.param .u32 Subsample_Bilinear_p016le_p010le_param_9,
	.param .u32 Subsample_Bilinear_p016le_p010le_param_10,
	.param .u32 Subsample_Bilinear_p016le_p010le_param_11,
	.param .u32 Subsample_Bilinear_p016le_p010le_param_12,
	.param .f32 Subsample_Bilinear_p016le_p010le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p016le_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p016le_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB124_2;
	bra.uni 	$L__BB124_1;
$L__BB124_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p016le_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p016le_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p016le_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p016le_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p016le_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	cvt.u16.u32 	%rs1, %r41;
	and.b16  	%rs2, %rs1, -64;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs2;
$L__BB124_2:
	ret;

}
	// .globl	Subsample_Bilinear_p016le_p010le_uv
.visible .entry Subsample_Bilinear_p016le_p010le_uv(
	.param .u64 Subsample_Bilinear_p016le_p010le_uv_param_0,
	.param .u64 Subsample_Bilinear_p016le_p010le_uv_param_1,
	.param .u64 Subsample_Bilinear_p016le_p010le_uv_param_2,
	.param .u64 Subsample_Bilinear_p016le_p010le_uv_param_3,
	.param .u64 Subsample_Bilinear_p016le_p010le_uv_param_4,
	.param .u64 Subsample_Bilinear_p016le_p010le_uv_param_5,
	.param .u64 Subsample_Bilinear_p016le_p010le_uv_param_6,
	.param .u64 Subsample_Bilinear_p016le_p010le_uv_param_7,
	.param .u32 Subsample_Bilinear_p016le_p010le_uv_param_8,
	.param .u32 Subsample_Bilinear_p016le_p010le_uv_param_9,
	.param .u32 Subsample_Bilinear_p016le_p010le_uv_param_10,
	.param .u32 Subsample_Bilinear_p016le_p010le_uv_param_11,
	.param .u32 Subsample_Bilinear_p016le_p010le_uv_param_12,
	.param .f32 Subsample_Bilinear_p016le_p010le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p016le_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p016le_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB125_2;
	bra.uni 	$L__BB125_1;
$L__BB125_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p016le_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p016le_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p016le_p010le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p016le_p010le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p016le_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 65535;
	and.b32  	%r37, %r22, 65535;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 65535;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 65535;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 65535;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 65535;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 2;
	shr.u32 	%r50, %r48, 2;
	cvt.u16.u32 	%rs1, %r49;
	cvt.u16.u32 	%rs2, %r50;
	and.b16  	%rs3, %rs1, -64;
	and.b16  	%rs4, %rs2, -64;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 2;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 2;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v2.u16 	[%rd15], {%rs3, %rs4};
$L__BB125_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p16le_p010le
.visible .entry Subsample_Bilinear_yuv444p16le_p010le(
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_param_0,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_param_1,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_param_2,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_param_3,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_param_4,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_param_5,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_param_6,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_param_7,
	.param .u32 Subsample_Bilinear_yuv444p16le_p010le_param_8,
	.param .u32 Subsample_Bilinear_yuv444p16le_p010le_param_9,
	.param .u32 Subsample_Bilinear_yuv444p16le_p010le_param_10,
	.param .u32 Subsample_Bilinear_yuv444p16le_p010le_param_11,
	.param .u32 Subsample_Bilinear_yuv444p16le_p010le_param_12,
	.param .f32 Subsample_Bilinear_yuv444p16le_p010le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p16le_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p16le_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB126_2;
	bra.uni 	$L__BB126_1;
$L__BB126_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p16le_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p16le_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p16le_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p16le_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv444p16le_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	cvt.u16.u32 	%rs1, %r41;
	and.b16  	%rs2, %rs1, -64;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs2;
$L__BB126_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p16le_p010le_uv
.visible .entry Subsample_Bilinear_yuv444p16le_p010le_uv(
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv444p16le_p010le_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv444p16le_p010le_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv444p16le_p010le_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv444p16le_p010le_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv444p16le_p010le_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv444p16le_p010le_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv444p16le_p010le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<67>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<21>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p16le_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p16le_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB127_2;
	bra.uni 	$L__BB127_1;
$L__BB127_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p16le_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p16le_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p16le_p010le_uv_param_10];
	ld.param.u64 	%rd9, [Subsample_Bilinear_yuv444p16le_p010le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv444p16le_p010le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p16le_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 65535;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 65535;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 65535;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 65535;
	add.s32 	%r55, %r53, %r54;
	add.s32 	%r56, %r55, 2;
	shr.u32 	%r57, %r56, 2;
	cvt.u16.u32 	%rs1, %r57;
	and.b16  	%rs2, %rs1, -64;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd9, {%f5, %f4}];
	// end inline asm
	and.b32  	%r58, %r33, 65535;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd9, {%f7, %f4}];
	// end inline asm
	and.b32  	%r59, %r37, 65535;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd9, {%f5, %f8}];
	// end inline asm
	and.b32  	%r61, %r41, 65535;
	add.s32 	%r62, %r60, %r61;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd9, {%f7, %f8}];
	// end inline asm
	and.b32  	%r63, %r45, 65535;
	add.s32 	%r64, %r62, %r63;
	add.s32 	%r65, %r64, 2;
	shr.u32 	%r66, %r65, 2;
	cvt.u16.u32 	%rs3, %r66;
	and.b16  	%rs4, %rs3, -64;
	cvt.s64.s32 	%rd13, %r2;
	cvt.s64.s32 	%rd14, %r5;
	shr.u64 	%rd15, %rd14, 2;
	mul.lo.s64 	%rd16, %rd15, %rd13;
	cvt.s64.s32 	%rd17, %r1;
	add.s64 	%rd18, %rd16, %rd17;
	shl.b64 	%rd19, %rd18, 2;
	add.s64 	%rd20, %rd1, %rd19;
	st.global.v2.u16 	[%rd20], {%rs2, %rs4};
$L__BB127_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv420p_p016le
.visible .entry Subsample_Bilinear_yuv420p_p016le(
	.param .u64 Subsample_Bilinear_yuv420p_p016le_param_0,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_param_1,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_param_2,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_param_3,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_param_4,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_param_5,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_param_6,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_param_7,
	.param .u32 Subsample_Bilinear_yuv420p_p016le_param_8,
	.param .u32 Subsample_Bilinear_yuv420p_p016le_param_9,
	.param .u32 Subsample_Bilinear_yuv420p_p016le_param_10,
	.param .u32 Subsample_Bilinear_yuv420p_p016le_param_11,
	.param .u32 Subsample_Bilinear_yuv420p_p016le_param_12,
	.param .f32 Subsample_Bilinear_yuv420p_p016le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<40>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv420p_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv420p_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB128_2;
	bra.uni 	$L__BB128_1;
$L__BB128_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv420p_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv420p_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv420p_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv420p_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv420p_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	cvt.u16.u32 	%rs1, %r39;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs4;
$L__BB128_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv420p_p016le_uv
.visible .entry Subsample_Bilinear_yuv420p_p016le_uv(
	.param .u64 Subsample_Bilinear_yuv420p_p016le_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv420p_p016le_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv420p_p016le_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv420p_p016le_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv420p_p016le_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv420p_p016le_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv420p_p016le_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv420p_p016le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<63>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<21>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv420p_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv420p_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB129_2;
	bra.uni 	$L__BB129_1;
$L__BB129_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv420p_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv420p_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv420p_p016le_uv_param_10];
	ld.param.u64 	%rd9, [Subsample_Bilinear_yuv420p_p016le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv420p_p016le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv420p_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 255;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 255;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 255;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 255;
	add.s32 	%r55, %r53, %r54;
	cvt.u16.u32 	%rs1, %r55;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd9, {%f5, %f4}];
	// end inline asm
	and.b32  	%r56, %r33, 255;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd9, {%f7, %f4}];
	// end inline asm
	and.b32  	%r57, %r37, 255;
	add.s32 	%r58, %r56, %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd9, {%f5, %f8}];
	// end inline asm
	and.b32  	%r59, %r41, 255;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd9, {%f7, %f8}];
	// end inline asm
	and.b32  	%r61, %r45, 255;
	add.s32 	%r62, %r60, %r61;
	cvt.u16.u32 	%rs5, %r62;
	add.s16 	%rs6, %rs5, 2;
	shr.u16 	%rs7, %rs6, 2;
	mul.lo.s16 	%rs8, %rs7, 257;
	cvt.s64.s32 	%rd13, %r2;
	cvt.s64.s32 	%rd14, %r5;
	shr.u64 	%rd15, %rd14, 2;
	mul.lo.s64 	%rd16, %rd15, %rd13;
	cvt.s64.s32 	%rd17, %r1;
	add.s64 	%rd18, %rd16, %rd17;
	shl.b64 	%rd19, %rd18, 2;
	add.s64 	%rd20, %rd1, %rd19;
	st.global.v2.u16 	[%rd20], {%rs4, %rs8};
$L__BB129_2:
	ret;

}
	// .globl	Subsample_Bilinear_nv12_p016le
.visible .entry Subsample_Bilinear_nv12_p016le(
	.param .u64 Subsample_Bilinear_nv12_p016le_param_0,
	.param .u64 Subsample_Bilinear_nv12_p016le_param_1,
	.param .u64 Subsample_Bilinear_nv12_p016le_param_2,
	.param .u64 Subsample_Bilinear_nv12_p016le_param_3,
	.param .u64 Subsample_Bilinear_nv12_p016le_param_4,
	.param .u64 Subsample_Bilinear_nv12_p016le_param_5,
	.param .u64 Subsample_Bilinear_nv12_p016le_param_6,
	.param .u64 Subsample_Bilinear_nv12_p016le_param_7,
	.param .u32 Subsample_Bilinear_nv12_p016le_param_8,
	.param .u32 Subsample_Bilinear_nv12_p016le_param_9,
	.param .u32 Subsample_Bilinear_nv12_p016le_param_10,
	.param .u32 Subsample_Bilinear_nv12_p016le_param_11,
	.param .u32 Subsample_Bilinear_nv12_p016le_param_12,
	.param .f32 Subsample_Bilinear_nv12_p016le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<40>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_nv12_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_nv12_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB130_2;
	bra.uni 	$L__BB130_1;
$L__BB130_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_nv12_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_nv12_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_nv12_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_nv12_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_nv12_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	cvt.u16.u32 	%rs1, %r39;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs4;
$L__BB130_2:
	ret;

}
	// .globl	Subsample_Bilinear_nv12_p016le_uv
.visible .entry Subsample_Bilinear_nv12_p016le_uv(
	.param .u64 Subsample_Bilinear_nv12_p016le_uv_param_0,
	.param .u64 Subsample_Bilinear_nv12_p016le_uv_param_1,
	.param .u64 Subsample_Bilinear_nv12_p016le_uv_param_2,
	.param .u64 Subsample_Bilinear_nv12_p016le_uv_param_3,
	.param .u64 Subsample_Bilinear_nv12_p016le_uv_param_4,
	.param .u64 Subsample_Bilinear_nv12_p016le_uv_param_5,
	.param .u64 Subsample_Bilinear_nv12_p016le_uv_param_6,
	.param .u64 Subsample_Bilinear_nv12_p016le_uv_param_7,
	.param .u32 Subsample_Bilinear_nv12_p016le_uv_param_8,
	.param .u32 Subsample_Bilinear_nv12_p016le_uv_param_9,
	.param .u32 Subsample_Bilinear_nv12_p016le_uv_param_10,
	.param .u32 Subsample_Bilinear_nv12_p016le_uv_param_11,
	.param .u32 Subsample_Bilinear_nv12_p016le_uv_param_12,
	.param .f32 Subsample_Bilinear_nv12_p016le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<47>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_nv12_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_nv12_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB131_2;
	bra.uni 	$L__BB131_1;
$L__BB131_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_nv12_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_nv12_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_nv12_p016le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_nv12_p016le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bilinear_nv12_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 255;
	and.b32  	%r37, %r22, 255;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 255;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 255;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 255;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 255;
	add.s32 	%r46, %r42, %r45;
	cvt.u16.u32 	%rs1, %r44;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	cvt.u16.u32 	%rs4, %r46;
	add.s16 	%rs5, %rs4, 2;
	shr.u16 	%rs6, %rs5, 2;
	mul.lo.s16 	%rs7, %rs3, 257;
	mul.lo.s16 	%rs8, %rs6, 257;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 2;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 2;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v2.u16 	[%rd15], {%rs7, %rs8};
$L__BB131_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p_p016le
.visible .entry Subsample_Bilinear_yuv444p_p016le(
	.param .u64 Subsample_Bilinear_yuv444p_p016le_param_0,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_param_1,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_param_2,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_param_3,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_param_4,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_param_5,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_param_6,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_param_7,
	.param .u32 Subsample_Bilinear_yuv444p_p016le_param_8,
	.param .u32 Subsample_Bilinear_yuv444p_p016le_param_9,
	.param .u32 Subsample_Bilinear_yuv444p_p016le_param_10,
	.param .u32 Subsample_Bilinear_yuv444p_p016le_param_11,
	.param .u32 Subsample_Bilinear_yuv444p_p016le_param_12,
	.param .f32 Subsample_Bilinear_yuv444p_p016le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<40>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB132_2;
	bra.uni 	$L__BB132_1;
$L__BB132_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv444p_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	cvt.u16.u32 	%rs1, %r39;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs4;
$L__BB132_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p_p016le_uv
.visible .entry Subsample_Bilinear_yuv444p_p016le_uv(
	.param .u64 Subsample_Bilinear_yuv444p_p016le_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv444p_p016le_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv444p_p016le_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv444p_p016le_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv444p_p016le_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv444p_p016le_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv444p_p016le_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv444p_p016le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<63>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<21>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB133_2;
	bra.uni 	$L__BB133_1;
$L__BB133_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p_p016le_uv_param_10];
	ld.param.u64 	%rd9, [Subsample_Bilinear_yuv444p_p016le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv444p_p016le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 255;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 255;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 255;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 255;
	add.s32 	%r55, %r53, %r54;
	cvt.u16.u32 	%rs1, %r55;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd9, {%f5, %f4}];
	// end inline asm
	and.b32  	%r56, %r33, 255;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd9, {%f7, %f4}];
	// end inline asm
	and.b32  	%r57, %r37, 255;
	add.s32 	%r58, %r56, %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd9, {%f5, %f8}];
	// end inline asm
	and.b32  	%r59, %r41, 255;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd9, {%f7, %f8}];
	// end inline asm
	and.b32  	%r61, %r45, 255;
	add.s32 	%r62, %r60, %r61;
	cvt.u16.u32 	%rs5, %r62;
	add.s16 	%rs6, %rs5, 2;
	shr.u16 	%rs7, %rs6, 2;
	mul.lo.s16 	%rs8, %rs7, 257;
	cvt.s64.s32 	%rd13, %r2;
	cvt.s64.s32 	%rd14, %r5;
	shr.u64 	%rd15, %rd14, 2;
	mul.lo.s64 	%rd16, %rd15, %rd13;
	cvt.s64.s32 	%rd17, %r1;
	add.s64 	%rd18, %rd16, %rd17;
	shl.b64 	%rd19, %rd18, 2;
	add.s64 	%rd20, %rd1, %rd19;
	st.global.v2.u16 	[%rd20], {%rs4, %rs8};
$L__BB133_2:
	ret;

}
	// .globl	Subsample_Bilinear_p010le_p016le
.visible .entry Subsample_Bilinear_p010le_p016le(
	.param .u64 Subsample_Bilinear_p010le_p016le_param_0,
	.param .u64 Subsample_Bilinear_p010le_p016le_param_1,
	.param .u64 Subsample_Bilinear_p010le_p016le_param_2,
	.param .u64 Subsample_Bilinear_p010le_p016le_param_3,
	.param .u64 Subsample_Bilinear_p010le_p016le_param_4,
	.param .u64 Subsample_Bilinear_p010le_p016le_param_5,
	.param .u64 Subsample_Bilinear_p010le_p016le_param_6,
	.param .u64 Subsample_Bilinear_p010le_p016le_param_7,
	.param .u32 Subsample_Bilinear_p010le_p016le_param_8,
	.param .u32 Subsample_Bilinear_p010le_p016le_param_9,
	.param .u32 Subsample_Bilinear_p010le_p016le_param_10,
	.param .u32 Subsample_Bilinear_p010le_p016le_param_11,
	.param .u32 Subsample_Bilinear_p010le_p016le_param_12,
	.param .f32 Subsample_Bilinear_p010le_p016le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p010le_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p010le_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB134_2;
	bra.uni 	$L__BB134_1;
$L__BB134_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p010le_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p010le_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p010le_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p010le_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p010le_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	cvt.u16.u32 	%rs1, %r41;
	shr.u16 	%rs2, %rs1, 10;
	or.b16  	%rs3, %rs2, %rs1;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs3;
$L__BB134_2:
	ret;

}
	// .globl	Subsample_Bilinear_p010le_p016le_uv
.visible .entry Subsample_Bilinear_p010le_p016le_uv(
	.param .u64 Subsample_Bilinear_p010le_p016le_uv_param_0,
	.param .u64 Subsample_Bilinear_p010le_p016le_uv_param_1,
	.param .u64 Subsample_Bilinear_p010le_p016le_uv_param_2,
	.param .u64 Subsample_Bilinear_p010le_p016le_uv_param_3,
	.param .u64 Subsample_Bilinear_p010le_p016le_uv_param_4,
	.param .u64 Subsample_Bilinear_p010le_p016le_uv_param_5,
	.param .u64 Subsample_Bilinear_p010le_p016le_uv_param_6,
	.param .u64 Subsample_Bilinear_p010le_p016le_uv_param_7,
	.param .u32 Subsample_Bilinear_p010le_p016le_uv_param_8,
	.param .u32 Subsample_Bilinear_p010le_p016le_uv_param_9,
	.param .u32 Subsample_Bilinear_p010le_p016le_uv_param_10,
	.param .u32 Subsample_Bilinear_p010le_p016le_uv_param_11,
	.param .u32 Subsample_Bilinear_p010le_p016le_uv_param_12,
	.param .f32 Subsample_Bilinear_p010le_p016le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p010le_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p010le_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB135_2;
	bra.uni 	$L__BB135_1;
$L__BB135_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p010le_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p010le_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p010le_p016le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p010le_p016le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p010le_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 65535;
	and.b32  	%r37, %r22, 65535;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 65535;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 65535;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 65535;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 65535;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 2;
	shr.u32 	%r50, %r48, 2;
	cvt.u16.u32 	%rs1, %r49;
	cvt.u16.u32 	%rs2, %r50;
	shr.u16 	%rs3, %rs1, 10;
	or.b16  	%rs4, %rs3, %rs1;
	shr.u16 	%rs5, %rs2, 10;
	or.b16  	%rs6, %rs5, %rs2;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 2;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 2;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v2.u16 	[%rd15], {%rs4, %rs6};
$L__BB135_2:
	ret;

}
	// .globl	Subsample_Bilinear_p016le_p016le
.visible .entry Subsample_Bilinear_p016le_p016le(
	.param .u64 Subsample_Bilinear_p016le_p016le_param_0,
	.param .u64 Subsample_Bilinear_p016le_p016le_param_1,
	.param .u64 Subsample_Bilinear_p016le_p016le_param_2,
	.param .u64 Subsample_Bilinear_p016le_p016le_param_3,
	.param .u64 Subsample_Bilinear_p016le_p016le_param_4,
	.param .u64 Subsample_Bilinear_p016le_p016le_param_5,
	.param .u64 Subsample_Bilinear_p016le_p016le_param_6,
	.param .u64 Subsample_Bilinear_p016le_p016le_param_7,
	.param .u32 Subsample_Bilinear_p016le_p016le_param_8,
	.param .u32 Subsample_Bilinear_p016le_p016le_param_9,
	.param .u32 Subsample_Bilinear_p016le_p016le_param_10,
	.param .u32 Subsample_Bilinear_p016le_p016le_param_11,
	.param .u32 Subsample_Bilinear_p016le_p016le_param_12,
	.param .f32 Subsample_Bilinear_p016le_p016le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p016le_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p016le_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB136_2;
	bra.uni 	$L__BB136_1;
$L__BB136_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p016le_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p016le_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p016le_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p016le_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p016le_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %r41;
$L__BB136_2:
	ret;

}
	// .globl	Subsample_Bilinear_p016le_p016le_uv
.visible .entry Subsample_Bilinear_p016le_p016le_uv(
	.param .u64 Subsample_Bilinear_p016le_p016le_uv_param_0,
	.param .u64 Subsample_Bilinear_p016le_p016le_uv_param_1,
	.param .u64 Subsample_Bilinear_p016le_p016le_uv_param_2,
	.param .u64 Subsample_Bilinear_p016le_p016le_uv_param_3,
	.param .u64 Subsample_Bilinear_p016le_p016le_uv_param_4,
	.param .u64 Subsample_Bilinear_p016le_p016le_uv_param_5,
	.param .u64 Subsample_Bilinear_p016le_p016le_uv_param_6,
	.param .u64 Subsample_Bilinear_p016le_p016le_uv_param_7,
	.param .u32 Subsample_Bilinear_p016le_p016le_uv_param_8,
	.param .u32 Subsample_Bilinear_p016le_p016le_uv_param_9,
	.param .u32 Subsample_Bilinear_p016le_p016le_uv_param_10,
	.param .u32 Subsample_Bilinear_p016le_p016le_uv_param_11,
	.param .u32 Subsample_Bilinear_p016le_p016le_uv_param_12,
	.param .f32 Subsample_Bilinear_p016le_p016le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p016le_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p016le_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB137_2;
	bra.uni 	$L__BB137_1;
$L__BB137_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p016le_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p016le_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p016le_p016le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p016le_p016le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p016le_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 65535;
	and.b32  	%r37, %r22, 65535;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 65535;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 65535;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 65535;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 65535;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 2;
	shr.u32 	%r50, %r48, 2;
	cvt.u16.u32 	%rs1, %r49;
	cvt.u16.u32 	%rs2, %r50;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 2;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 2;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v2.u16 	[%rd15], {%rs1, %rs2};
$L__BB137_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p16le_p016le
.visible .entry Subsample_Bilinear_yuv444p16le_p016le(
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_param_0,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_param_1,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_param_2,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_param_3,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_param_4,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_param_5,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_param_6,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_param_7,
	.param .u32 Subsample_Bilinear_yuv444p16le_p016le_param_8,
	.param .u32 Subsample_Bilinear_yuv444p16le_p016le_param_9,
	.param .u32 Subsample_Bilinear_yuv444p16le_p016le_param_10,
	.param .u32 Subsample_Bilinear_yuv444p16le_p016le_param_11,
	.param .u32 Subsample_Bilinear_yuv444p16le_p016le_param_12,
	.param .f32 Subsample_Bilinear_yuv444p16le_p016le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p16le_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p16le_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB138_2;
	bra.uni 	$L__BB138_1;
$L__BB138_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p16le_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p16le_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p16le_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p16le_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv444p16le_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %r41;
$L__BB138_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p16le_p016le_uv
.visible .entry Subsample_Bilinear_yuv444p16le_p016le_uv(
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv444p16le_p016le_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv444p16le_p016le_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv444p16le_p016le_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv444p16le_p016le_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv444p16le_p016le_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv444p16le_p016le_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv444p16le_p016le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<67>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<21>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p16le_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p16le_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB139_2;
	bra.uni 	$L__BB139_1;
$L__BB139_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p16le_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p16le_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p16le_p016le_uv_param_10];
	ld.param.u64 	%rd9, [Subsample_Bilinear_yuv444p16le_p016le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv444p16le_p016le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p16le_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 65535;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 65535;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 65535;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 65535;
	add.s32 	%r55, %r53, %r54;
	add.s32 	%r56, %r55, 2;
	shr.u32 	%r57, %r56, 2;
	cvt.u16.u32 	%rs1, %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd9, {%f5, %f4}];
	// end inline asm
	and.b32  	%r58, %r33, 65535;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd9, {%f7, %f4}];
	// end inline asm
	and.b32  	%r59, %r37, 65535;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd9, {%f5, %f8}];
	// end inline asm
	and.b32  	%r61, %r41, 65535;
	add.s32 	%r62, %r60, %r61;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd9, {%f7, %f8}];
	// end inline asm
	and.b32  	%r63, %r45, 65535;
	add.s32 	%r64, %r62, %r63;
	add.s32 	%r65, %r64, 2;
	shr.u32 	%r66, %r65, 2;
	cvt.u16.u32 	%rs2, %r66;
	cvt.s64.s32 	%rd13, %r2;
	cvt.s64.s32 	%rd14, %r5;
	shr.u64 	%rd15, %rd14, 2;
	mul.lo.s64 	%rd16, %rd15, %rd13;
	cvt.s64.s32 	%rd17, %r1;
	add.s64 	%rd18, %rd16, %rd17;
	shl.b64 	%rd19, %rd18, 2;
	add.s64 	%rd20, %rd1, %rd19;
	st.global.v2.u16 	[%rd20], {%rs1, %rs2};
$L__BB139_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv420p_yuv444p16le
.visible .entry Subsample_Bilinear_yuv420p_yuv444p16le(
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_param_0,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_param_1,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_param_2,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_param_3,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_param_4,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_param_5,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_param_6,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_param_7,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p16le_param_8,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p16le_param_9,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p16le_param_10,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p16le_param_11,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p16le_param_12,
	.param .f32 Subsample_Bilinear_yuv420p_yuv444p16le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<40>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv420p_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv420p_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB140_2;
	bra.uni 	$L__BB140_1;
$L__BB140_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv420p_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv420p_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv420p_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv420p_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv420p_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	cvt.u16.u32 	%rs1, %r39;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs4;
$L__BB140_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv420p_yuv444p16le_uv
.visible .entry Subsample_Bilinear_yuv420p_yuv444p16le_uv(
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<63>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB141_2;
	bra.uni 	$L__BB141_1;
$L__BB141_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd11, [Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bilinear_yuv420p_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 255;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 255;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 255;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 255;
	add.s32 	%r55, %r53, %r54;
	cvt.u16.u32 	%rs1, %r55;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd15, %r2;
	cvt.s64.s32 	%rd16, %r5;
	shr.u64 	%rd17, %rd16, 1;
	mul.lo.s64 	%rd18, %rd17, %rd15;
	cvt.s64.s32 	%rd19, %r1;
	add.s64 	%rd20, %rd18, %rd19;
	shl.b64 	%rd21, %rd20, 1;
	add.s64 	%rd22, %rd2, %rd21;
	st.global.u16 	[%rd22], %rs4;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd11, {%f5, %f4}];
	// end inline asm
	and.b32  	%r56, %r33, 255;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd11, {%f7, %f4}];
	// end inline asm
	and.b32  	%r57, %r37, 255;
	add.s32 	%r58, %r56, %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd11, {%f5, %f8}];
	// end inline asm
	and.b32  	%r59, %r41, 255;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd11, {%f7, %f8}];
	// end inline asm
	and.b32  	%r61, %r45, 255;
	add.s32 	%r62, %r60, %r61;
	cvt.u16.u32 	%rs5, %r62;
	add.s16 	%rs6, %rs5, 2;
	shr.u16 	%rs7, %rs6, 2;
	mul.lo.s16 	%rs8, %rs7, 257;
	add.s64 	%rd23, %rd1, %rd21;
	st.global.u16 	[%rd23], %rs8;
$L__BB141_2:
	ret;

}
	// .globl	Subsample_Bilinear_nv12_yuv444p16le
.visible .entry Subsample_Bilinear_nv12_yuv444p16le(
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_param_0,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_param_1,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_param_2,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_param_3,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_param_4,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_param_5,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_param_6,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_param_7,
	.param .u32 Subsample_Bilinear_nv12_yuv444p16le_param_8,
	.param .u32 Subsample_Bilinear_nv12_yuv444p16le_param_9,
	.param .u32 Subsample_Bilinear_nv12_yuv444p16le_param_10,
	.param .u32 Subsample_Bilinear_nv12_yuv444p16le_param_11,
	.param .u32 Subsample_Bilinear_nv12_yuv444p16le_param_12,
	.param .f32 Subsample_Bilinear_nv12_yuv444p16le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<40>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_nv12_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_nv12_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB142_2;
	bra.uni 	$L__BB142_1;
$L__BB142_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_nv12_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_nv12_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_nv12_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_nv12_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_nv12_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	cvt.u16.u32 	%rs1, %r39;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs4;
$L__BB142_2:
	ret;

}
	// .globl	Subsample_Bilinear_nv12_yuv444p16le_uv
.visible .entry Subsample_Bilinear_nv12_yuv444p16le_uv(
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Bilinear_nv12_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Bilinear_nv12_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Bilinear_nv12_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Bilinear_nv12_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Bilinear_nv12_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Bilinear_nv12_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Bilinear_nv12_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<47>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<19>;

	ld.param.u32 	%r4, [Subsample_Bilinear_nv12_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_nv12_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB143_2;
	bra.uni 	$L__BB143_1;
$L__BB143_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_nv12_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_nv12_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_nv12_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bilinear_nv12_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_nv12_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bilinear_nv12_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 255;
	and.b32  	%r37, %r22, 255;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 255;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 255;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 255;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 255;
	add.s32 	%r46, %r42, %r45;
	cvt.u16.u32 	%rs1, %r44;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	cvt.u16.u32 	%rs4, %r46;
	add.s16 	%rs5, %rs4, 2;
	shr.u16 	%rs6, %rs5, 2;
	mul.lo.s16 	%rs7, %rs3, 257;
	cvt.s64.s32 	%rd10, %r2;
	cvt.s64.s32 	%rd11, %r5;
	shr.u64 	%rd12, %rd11, 1;
	mul.lo.s64 	%rd13, %rd12, %rd10;
	cvt.s64.s32 	%rd14, %r1;
	add.s64 	%rd15, %rd13, %rd14;
	shl.b64 	%rd16, %rd15, 1;
	add.s64 	%rd17, %rd2, %rd16;
	st.global.u16 	[%rd17], %rs7;
	mul.lo.s16 	%rs8, %rs6, 257;
	add.s64 	%rd18, %rd1, %rd16;
	st.global.u16 	[%rd18], %rs8;
$L__BB143_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p_yuv444p16le
.visible .entry Subsample_Bilinear_yuv444p_yuv444p16le(
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_param_0,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_param_1,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_param_2,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_param_3,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_param_4,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_param_5,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_param_6,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_param_7,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p16le_param_8,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p16le_param_9,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p16le_param_10,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p16le_param_11,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p16le_param_12,
	.param .f32 Subsample_Bilinear_yuv444p_yuv444p16le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<40>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB144_2;
	bra.uni 	$L__BB144_1;
$L__BB144_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv444p_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 255;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 255;
	add.s32 	%r39, %r37, %r38;
	cvt.u16.u32 	%rs1, %r39;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs4;
$L__BB144_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p_yuv444p16le_uv
.visible .entry Subsample_Bilinear_yuv444p_yuv444p16le_uv(
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<63>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB145_2;
	bra.uni 	$L__BB145_1;
$L__BB145_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd11, [Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bilinear_yuv444p_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 255;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 255;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 255;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 255;
	add.s32 	%r55, %r53, %r54;
	cvt.u16.u32 	%rs1, %r55;
	add.s16 	%rs2, %rs1, 2;
	shr.u16 	%rs3, %rs2, 2;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd15, %r2;
	cvt.s64.s32 	%rd16, %r5;
	shr.u64 	%rd17, %rd16, 1;
	mul.lo.s64 	%rd18, %rd17, %rd15;
	cvt.s64.s32 	%rd19, %r1;
	add.s64 	%rd20, %rd18, %rd19;
	shl.b64 	%rd21, %rd20, 1;
	add.s64 	%rd22, %rd2, %rd21;
	st.global.u16 	[%rd22], %rs4;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd11, {%f5, %f4}];
	// end inline asm
	and.b32  	%r56, %r33, 255;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd11, {%f7, %f4}];
	// end inline asm
	and.b32  	%r57, %r37, 255;
	add.s32 	%r58, %r56, %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd11, {%f5, %f8}];
	// end inline asm
	and.b32  	%r59, %r41, 255;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd11, {%f7, %f8}];
	// end inline asm
	and.b32  	%r61, %r45, 255;
	add.s32 	%r62, %r60, %r61;
	cvt.u16.u32 	%rs5, %r62;
	add.s16 	%rs6, %rs5, 2;
	shr.u16 	%rs7, %rs6, 2;
	mul.lo.s16 	%rs8, %rs7, 257;
	add.s64 	%rd23, %rd1, %rd21;
	st.global.u16 	[%rd23], %rs8;
$L__BB145_2:
	ret;

}
	// .globl	Subsample_Bilinear_p010le_yuv444p16le
.visible .entry Subsample_Bilinear_p010le_yuv444p16le(
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_param_0,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_param_1,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_param_2,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_param_3,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_param_4,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_param_5,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_param_6,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_param_7,
	.param .u32 Subsample_Bilinear_p010le_yuv444p16le_param_8,
	.param .u32 Subsample_Bilinear_p010le_yuv444p16le_param_9,
	.param .u32 Subsample_Bilinear_p010le_yuv444p16le_param_10,
	.param .u32 Subsample_Bilinear_p010le_yuv444p16le_param_11,
	.param .u32 Subsample_Bilinear_p010le_yuv444p16le_param_12,
	.param .f32 Subsample_Bilinear_p010le_yuv444p16le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p010le_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p010le_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB146_2;
	bra.uni 	$L__BB146_1;
$L__BB146_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p010le_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p010le_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p010le_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p010le_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p010le_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	cvt.u16.u32 	%rs1, %r41;
	shr.u16 	%rs2, %rs1, 10;
	or.b16  	%rs3, %rs2, %rs1;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %rs3;
$L__BB146_2:
	ret;

}
	// .globl	Subsample_Bilinear_p010le_yuv444p16le_uv
.visible .entry Subsample_Bilinear_p010le_yuv444p16le_uv(
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Bilinear_p010le_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Bilinear_p010le_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Bilinear_p010le_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Bilinear_p010le_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Bilinear_p010le_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Bilinear_p010le_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Bilinear_p010le_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<19>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p010le_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p010le_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB147_2;
	bra.uni 	$L__BB147_1;
$L__BB147_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p010le_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p010le_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p010le_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bilinear_p010le_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p010le_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bilinear_p010le_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 65535;
	and.b32  	%r37, %r22, 65535;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 65535;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 65535;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 65535;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 65535;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 2;
	shr.u32 	%r50, %r48, 2;
	cvt.u16.u32 	%rs1, %r49;
	cvt.u16.u32 	%rs2, %r50;
	shr.u16 	%rs3, %rs1, 10;
	or.b16  	%rs4, %rs3, %rs1;
	cvt.s64.s32 	%rd10, %r2;
	cvt.s64.s32 	%rd11, %r5;
	shr.u64 	%rd12, %rd11, 1;
	mul.lo.s64 	%rd13, %rd12, %rd10;
	cvt.s64.s32 	%rd14, %r1;
	add.s64 	%rd15, %rd13, %rd14;
	shl.b64 	%rd16, %rd15, 1;
	add.s64 	%rd17, %rd2, %rd16;
	st.global.u16 	[%rd17], %rs4;
	shr.u16 	%rs5, %rs2, 10;
	or.b16  	%rs6, %rs5, %rs2;
	add.s64 	%rd18, %rd1, %rd16;
	st.global.u16 	[%rd18], %rs6;
$L__BB147_2:
	ret;

}
	// .globl	Subsample_Bilinear_p016le_yuv444p16le
.visible .entry Subsample_Bilinear_p016le_yuv444p16le(
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_param_0,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_param_1,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_param_2,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_param_3,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_param_4,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_param_5,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_param_6,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_param_7,
	.param .u32 Subsample_Bilinear_p016le_yuv444p16le_param_8,
	.param .u32 Subsample_Bilinear_p016le_yuv444p16le_param_9,
	.param .u32 Subsample_Bilinear_p016le_yuv444p16le_param_10,
	.param .u32 Subsample_Bilinear_p016le_yuv444p16le_param_11,
	.param .u32 Subsample_Bilinear_p016le_yuv444p16le_param_12,
	.param .f32 Subsample_Bilinear_p016le_yuv444p16le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p016le_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p016le_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB148_2;
	bra.uni 	$L__BB148_1;
$L__BB148_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p016le_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p016le_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p016le_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p016le_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_p016le_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %r41;
$L__BB148_2:
	ret;

}
	// .globl	Subsample_Bilinear_p016le_yuv444p16le_uv
.visible .entry Subsample_Bilinear_p016le_yuv444p16le_uv(
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Bilinear_p016le_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Bilinear_p016le_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Bilinear_p016le_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Bilinear_p016le_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Bilinear_p016le_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Bilinear_p016le_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Bilinear_p016le_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<51>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<19>;

	ld.param.u32 	%r4, [Subsample_Bilinear_p016le_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_p016le_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB149_2;
	bra.uni 	$L__BB149_1;
$L__BB149_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_p016le_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_p016le_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_p016le_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bilinear_p016le_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bilinear_p016le_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bilinear_p016le_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 65535;
	and.b32  	%r37, %r22, 65535;
	add.s32 	%r38, %r36, %r37;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f5, %f8}];
	// end inline asm
	and.b32  	%r39, %r25, 65535;
	add.s32 	%r40, %r35, %r39;
	and.b32  	%r41, %r26, 65535;
	add.s32 	%r42, %r38, %r41;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f7, %f8}];
	// end inline asm
	and.b32  	%r43, %r29, 65535;
	add.s32 	%r44, %r40, %r43;
	and.b32  	%r45, %r30, 65535;
	add.s32 	%r46, %r42, %r45;
	add.s32 	%r47, %r44, 2;
	add.s32 	%r48, %r46, 2;
	shr.u32 	%r49, %r47, 2;
	shr.u32 	%r50, %r48, 2;
	cvt.s64.s32 	%rd10, %r2;
	cvt.s64.s32 	%rd11, %r5;
	shr.u64 	%rd12, %rd11, 1;
	mul.lo.s64 	%rd13, %rd12, %rd10;
	cvt.s64.s32 	%rd14, %r1;
	add.s64 	%rd15, %rd13, %rd14;
	shl.b64 	%rd16, %rd15, 1;
	add.s64 	%rd17, %rd2, %rd16;
	st.global.u16 	[%rd17], %r49;
	add.s64 	%rd18, %rd1, %rd16;
	st.global.u16 	[%rd18], %r50;
$L__BB149_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p16le_yuv444p16le
.visible .entry Subsample_Bilinear_yuv444p16le_yuv444p16le(
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_0,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_1,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_2,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_3,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_4,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_5,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_6,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_7,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_8,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_9,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_10,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_11,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_12,
	.param .f32 Subsample_Bilinear_yuv444p16le_yuv444p16le_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<42>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p16le_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p16le_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB150_2;
	bra.uni 	$L__BB150_1;
$L__BB150_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p16le_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p16le_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p16le_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_yuv444p16le_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_yuv444p16le_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 65535;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 65535;
	add.s32 	%r35, %r33, %r34;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r36, %r25, 65535;
	add.s32 	%r37, %r35, %r36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r38, %r29, 65535;
	add.s32 	%r39, %r37, %r38;
	add.s32 	%r40, %r39, 2;
	shr.u32 	%r41, %r40, 2;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 1;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 1;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.u16 	[%rd15], %r41;
$L__BB150_2:
	ret;

}
	// .globl	Subsample_Bilinear_yuv444p16le_yuv444p16le_uv
.visible .entry Subsample_Bilinear_yuv444p16le_yuv444p16le_uv(
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b32 	%r<67>;
	.reg .f32 	%f<41>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB151_2;
	bra.uni 	$L__BB151_1;
$L__BB151_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd11, [Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bilinear_yuv444p16le_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f17, %r6;
	cvt.rn.f32.s32 	%f18, %r3;
	div.rn.f32 	%f19, %f17, %f18;
	cvt.rn.f32.s32 	%f20, %r7;
	cvt.rn.f32.s32 	%f21, %r4;
	div.rn.f32 	%f22, %f20, %f21;
	add.f32 	%f23, %f19, 0fBF800000;
	mul.f32 	%f24, %f23, 0f3F000000;
	max.f32 	%f25, %f24, 0f00000000;
	min.f32 	%f26, %f25, 0f3F800000;
	add.f32 	%f27, %f22, 0fBF800000;
	mul.f32 	%f28, %f27, 0f3F000000;
	max.f32 	%f29, %f28, 0f00000000;
	min.f32 	%f30, %f29, 0f3F800000;
	cvt.rn.f32.s32 	%f31, %r2;
	add.f32 	%f32, %f31, 0f3F000000;
	cvt.rn.f32.s32 	%f33, %r1;
	add.f32 	%f34, %f33, 0f3F000000;
	add.f32 	%f35, %f26, 0f3F000000;
	div.rn.f32 	%f36, %f26, %f35;
	add.f32 	%f37, %f30, 0f3F000000;
	div.rn.f32 	%f38, %f30, %f37;
	neg.f32 	%f39, %f36;
	fma.rn.f32 	%f5, %f19, %f34, %f39;
	neg.f32 	%f40, %f38;
	fma.rn.f32 	%f4, %f22, %f32, %f40;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f5, %f4}];
	// end inline asm
	and.b32  	%r49, %r17, 65535;
	fma.rn.f32 	%f7, %f19, %f34, %f36;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f7, %f4}];
	// end inline asm
	and.b32  	%r50, %r21, 65535;
	add.s32 	%r51, %r49, %r50;
	fma.rn.f32 	%f8, %f22, %f32, %f38;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f5, %f8}];
	// end inline asm
	and.b32  	%r52, %r25, 65535;
	add.s32 	%r53, %r51, %r52;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f7, %f8}];
	// end inline asm
	and.b32  	%r54, %r29, 65535;
	add.s32 	%r55, %r53, %r54;
	add.s32 	%r56, %r55, 2;
	shr.u32 	%r57, %r56, 2;
	cvt.s64.s32 	%rd15, %r2;
	cvt.s64.s32 	%rd16, %r5;
	shr.u64 	%rd17, %rd16, 1;
	mul.lo.s64 	%rd18, %rd17, %rd15;
	cvt.s64.s32 	%rd19, %r1;
	add.s64 	%rd20, %rd18, %rd19;
	shl.b64 	%rd21, %rd20, 1;
	add.s64 	%rd22, %rd2, %rd21;
	st.global.u16 	[%rd22], %r57;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r33, %r34, %r35, %r36}, [%rd11, {%f5, %f4}];
	// end inline asm
	and.b32  	%r58, %r33, 65535;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r37, %r38, %r39, %r40}, [%rd11, {%f7, %f4}];
	// end inline asm
	and.b32  	%r59, %r37, 65535;
	add.s32 	%r60, %r58, %r59;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r41, %r42, %r43, %r44}, [%rd11, {%f5, %f8}];
	// end inline asm
	and.b32  	%r61, %r41, 65535;
	add.s32 	%r62, %r60, %r61;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r45, %r46, %r47, %r48}, [%rd11, {%f7, %f8}];
	// end inline asm
	and.b32  	%r63, %r45, 65535;
	add.s32 	%r64, %r62, %r63;
	add.s32 	%r65, %r64, 2;
	shr.u32 	%r66, %r65, 2;
	add.s64 	%rd23, %rd1, %rd21;
	st.global.u16 	[%rd23], %r66;
$L__BB151_2:
	ret;

}
	// .globl	Subsample_Bilinear_bgr0_bgr0
.visible .entry Subsample_Bilinear_bgr0_bgr0(
	.param .u64 Subsample_Bilinear_bgr0_bgr0_param_0,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_param_1,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_param_2,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_param_3,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_param_4,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_param_5,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_param_6,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_param_7,
	.param .u32 Subsample_Bilinear_bgr0_bgr0_param_8,
	.param .u32 Subsample_Bilinear_bgr0_bgr0_param_9,
	.param .u32 Subsample_Bilinear_bgr0_bgr0_param_10,
	.param .u32 Subsample_Bilinear_bgr0_bgr0_param_11,
	.param .u32 Subsample_Bilinear_bgr0_bgr0_param_12,
	.param .f32 Subsample_Bilinear_bgr0_bgr0_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<69>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_bgr0_bgr0_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_bgr0_bgr0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB152_2;
	bra.uni 	$L__BB152_1;
$L__BB152_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_bgr0_bgr0_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_bgr0_bgr0_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_bgr0_bgr0_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_bgr0_bgr0_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_bgr0_bgr0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 255;
	and.b32  	%r37, %r22, 255;
	add.s32 	%r38, %r36, %r37;
	and.b32  	%r39, %r19, 255;
	and.b32  	%r40, %r23, 255;
	add.s32 	%r41, %r39, %r40;
	and.b32  	%r42, %r20, 255;
	and.b32  	%r43, %r24, 255;
	add.s32 	%r44, %r42, %r43;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r45, %r25, 255;
	add.s32 	%r46, %r35, %r45;
	and.b32  	%r47, %r26, 255;
	add.s32 	%r48, %r38, %r47;
	and.b32  	%r49, %r27, 255;
	add.s32 	%r50, %r41, %r49;
	and.b32  	%r51, %r28, 255;
	add.s32 	%r52, %r44, %r51;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r53, %r29, 255;
	add.s32 	%r54, %r46, %r53;
	and.b32  	%r55, %r30, 255;
	add.s32 	%r56, %r48, %r55;
	and.b32  	%r57, %r31, 255;
	add.s32 	%r58, %r50, %r57;
	and.b32  	%r59, %r32, 255;
	add.s32 	%r60, %r52, %r59;
	add.s32 	%r61, %r54, 2;
	add.s32 	%r62, %r56, 2;
	add.s32 	%r63, %r58, 2;
	add.s32 	%r64, %r60, 2;
	shr.u32 	%r65, %r61, 2;
	shr.u32 	%r66, %r62, 2;
	shr.u32 	%r67, %r63, 2;
	shr.u32 	%r68, %r64, 2;
	cvt.u16.u32 	%rs1, %r65;
	cvt.u16.u32 	%rs2, %r66;
	cvt.u16.u32 	%rs3, %r67;
	cvt.u16.u32 	%rs4, %r68;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 2;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 2;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v4.u8 	[%rd15], {%rs1, %rs2, %rs3, %rs4};
$L__BB152_2:
	ret;

}
	// .globl	Subsample_Bilinear_bgr0_bgr0_uv
.visible .entry Subsample_Bilinear_bgr0_bgr0_uv(
	.param .u64 Subsample_Bilinear_bgr0_bgr0_uv_param_0,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_uv_param_1,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_uv_param_2,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_uv_param_3,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_uv_param_4,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_uv_param_5,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_uv_param_6,
	.param .u64 Subsample_Bilinear_bgr0_bgr0_uv_param_7,
	.param .u32 Subsample_Bilinear_bgr0_bgr0_uv_param_8,
	.param .u32 Subsample_Bilinear_bgr0_bgr0_uv_param_9,
	.param .u32 Subsample_Bilinear_bgr0_bgr0_uv_param_10,
	.param .u32 Subsample_Bilinear_bgr0_bgr0_uv_param_11,
	.param .u32 Subsample_Bilinear_bgr0_bgr0_uv_param_12,
	.param .f32 Subsample_Bilinear_bgr0_bgr0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Bilinear_rgb0_rgb0
.visible .entry Subsample_Bilinear_rgb0_rgb0(
	.param .u64 Subsample_Bilinear_rgb0_rgb0_param_0,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_param_1,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_param_2,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_param_3,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_param_4,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_param_5,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_param_6,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_param_7,
	.param .u32 Subsample_Bilinear_rgb0_rgb0_param_8,
	.param .u32 Subsample_Bilinear_rgb0_rgb0_param_9,
	.param .u32 Subsample_Bilinear_rgb0_rgb0_param_10,
	.param .u32 Subsample_Bilinear_rgb0_rgb0_param_11,
	.param .u32 Subsample_Bilinear_rgb0_rgb0_param_12,
	.param .f32 Subsample_Bilinear_rgb0_rgb0_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<69>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_rgb0_rgb0_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_rgb0_rgb0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB154_2;
	bra.uni 	$L__BB154_1;
$L__BB154_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_rgb0_rgb0_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_rgb0_rgb0_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_rgb0_rgb0_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_rgb0_rgb0_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_rgb0_rgb0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 255;
	and.b32  	%r37, %r22, 255;
	add.s32 	%r38, %r36, %r37;
	and.b32  	%r39, %r19, 255;
	and.b32  	%r40, %r23, 255;
	add.s32 	%r41, %r39, %r40;
	and.b32  	%r42, %r20, 255;
	and.b32  	%r43, %r24, 255;
	add.s32 	%r44, %r42, %r43;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r45, %r25, 255;
	add.s32 	%r46, %r35, %r45;
	and.b32  	%r47, %r26, 255;
	add.s32 	%r48, %r38, %r47;
	and.b32  	%r49, %r27, 255;
	add.s32 	%r50, %r41, %r49;
	and.b32  	%r51, %r28, 255;
	add.s32 	%r52, %r44, %r51;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r53, %r29, 255;
	add.s32 	%r54, %r46, %r53;
	and.b32  	%r55, %r30, 255;
	add.s32 	%r56, %r48, %r55;
	and.b32  	%r57, %r31, 255;
	add.s32 	%r58, %r50, %r57;
	and.b32  	%r59, %r32, 255;
	add.s32 	%r60, %r52, %r59;
	add.s32 	%r61, %r54, 2;
	add.s32 	%r62, %r56, 2;
	add.s32 	%r63, %r58, 2;
	add.s32 	%r64, %r60, 2;
	shr.u32 	%r65, %r61, 2;
	shr.u32 	%r66, %r62, 2;
	shr.u32 	%r67, %r63, 2;
	shr.u32 	%r68, %r64, 2;
	cvt.u16.u32 	%rs1, %r65;
	cvt.u16.u32 	%rs2, %r66;
	cvt.u16.u32 	%rs3, %r67;
	cvt.u16.u32 	%rs4, %r68;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 2;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 2;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v4.u8 	[%rd15], {%rs1, %rs2, %rs3, %rs4};
$L__BB154_2:
	ret;

}
	// .globl	Subsample_Bilinear_rgb0_rgb0_uv
.visible .entry Subsample_Bilinear_rgb0_rgb0_uv(
	.param .u64 Subsample_Bilinear_rgb0_rgb0_uv_param_0,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_uv_param_1,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_uv_param_2,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_uv_param_3,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_uv_param_4,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_uv_param_5,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_uv_param_6,
	.param .u64 Subsample_Bilinear_rgb0_rgb0_uv_param_7,
	.param .u32 Subsample_Bilinear_rgb0_rgb0_uv_param_8,
	.param .u32 Subsample_Bilinear_rgb0_rgb0_uv_param_9,
	.param .u32 Subsample_Bilinear_rgb0_rgb0_uv_param_10,
	.param .u32 Subsample_Bilinear_rgb0_rgb0_uv_param_11,
	.param .u32 Subsample_Bilinear_rgb0_rgb0_uv_param_12,
	.param .f32 Subsample_Bilinear_rgb0_rgb0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Bilinear_bgr0_rgb0
.visible .entry Subsample_Bilinear_bgr0_rgb0(
	.param .u64 Subsample_Bilinear_bgr0_rgb0_param_0,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_param_1,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_param_2,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_param_3,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_param_4,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_param_5,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_param_6,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_param_7,
	.param .u32 Subsample_Bilinear_bgr0_rgb0_param_8,
	.param .u32 Subsample_Bilinear_bgr0_rgb0_param_9,
	.param .u32 Subsample_Bilinear_bgr0_rgb0_param_10,
	.param .u32 Subsample_Bilinear_bgr0_rgb0_param_11,
	.param .u32 Subsample_Bilinear_bgr0_rgb0_param_12,
	.param .f32 Subsample_Bilinear_bgr0_rgb0_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<69>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_bgr0_rgb0_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_bgr0_rgb0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB156_2;
	bra.uni 	$L__BB156_1;
$L__BB156_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_bgr0_rgb0_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_bgr0_rgb0_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_bgr0_rgb0_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_bgr0_rgb0_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_bgr0_rgb0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 255;
	and.b32  	%r37, %r22, 255;
	add.s32 	%r38, %r36, %r37;
	and.b32  	%r39, %r19, 255;
	and.b32  	%r40, %r23, 255;
	add.s32 	%r41, %r39, %r40;
	and.b32  	%r42, %r20, 255;
	and.b32  	%r43, %r24, 255;
	add.s32 	%r44, %r42, %r43;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r45, %r25, 255;
	add.s32 	%r46, %r35, %r45;
	and.b32  	%r47, %r26, 255;
	add.s32 	%r48, %r38, %r47;
	and.b32  	%r49, %r27, 255;
	add.s32 	%r50, %r41, %r49;
	and.b32  	%r51, %r28, 255;
	add.s32 	%r52, %r44, %r51;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r53, %r29, 255;
	add.s32 	%r54, %r46, %r53;
	and.b32  	%r55, %r30, 255;
	add.s32 	%r56, %r48, %r55;
	and.b32  	%r57, %r31, 255;
	add.s32 	%r58, %r50, %r57;
	and.b32  	%r59, %r32, 255;
	add.s32 	%r60, %r52, %r59;
	add.s32 	%r61, %r54, 2;
	add.s32 	%r62, %r56, 2;
	add.s32 	%r63, %r58, 2;
	add.s32 	%r64, %r60, 2;
	shr.u32 	%r65, %r61, 2;
	shr.u32 	%r66, %r62, 2;
	shr.u32 	%r67, %r63, 2;
	shr.u32 	%r68, %r64, 2;
	cvt.u16.u32 	%rs1, %r65;
	cvt.u16.u32 	%rs2, %r66;
	cvt.u16.u32 	%rs3, %r67;
	cvt.u16.u32 	%rs4, %r68;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 2;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 2;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v4.u8 	[%rd15], {%rs3, %rs2, %rs1, %rs4};
$L__BB156_2:
	ret;

}
	// .globl	Subsample_Bilinear_bgr0_rgb0_uv
.visible .entry Subsample_Bilinear_bgr0_rgb0_uv(
	.param .u64 Subsample_Bilinear_bgr0_rgb0_uv_param_0,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_uv_param_1,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_uv_param_2,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_uv_param_3,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_uv_param_4,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_uv_param_5,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_uv_param_6,
	.param .u64 Subsample_Bilinear_bgr0_rgb0_uv_param_7,
	.param .u32 Subsample_Bilinear_bgr0_rgb0_uv_param_8,
	.param .u32 Subsample_Bilinear_bgr0_rgb0_uv_param_9,
	.param .u32 Subsample_Bilinear_bgr0_rgb0_uv_param_10,
	.param .u32 Subsample_Bilinear_bgr0_rgb0_uv_param_11,
	.param .u32 Subsample_Bilinear_bgr0_rgb0_uv_param_12,
	.param .f32 Subsample_Bilinear_bgr0_rgb0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Bilinear_rgb0_bgr0
.visible .entry Subsample_Bilinear_rgb0_bgr0(
	.param .u64 Subsample_Bilinear_rgb0_bgr0_param_0,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_param_1,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_param_2,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_param_3,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_param_4,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_param_5,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_param_6,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_param_7,
	.param .u32 Subsample_Bilinear_rgb0_bgr0_param_8,
	.param .u32 Subsample_Bilinear_rgb0_bgr0_param_9,
	.param .u32 Subsample_Bilinear_rgb0_bgr0_param_10,
	.param .u32 Subsample_Bilinear_rgb0_bgr0_param_11,
	.param .u32 Subsample_Bilinear_rgb0_bgr0_param_12,
	.param .f32 Subsample_Bilinear_rgb0_bgr0_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<69>;
	.reg .f32 	%f<33>;
	.reg .b64 	%rd<16>;

	ld.param.u32 	%r4, [Subsample_Bilinear_rgb0_bgr0_param_9];
	ld.param.u32 	%r3, [Subsample_Bilinear_rgb0_bgr0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB158_2;
	bra.uni 	$L__BB158_1;
$L__BB158_1:
	ld.param.u32 	%r7, [Subsample_Bilinear_rgb0_bgr0_param_12];
	ld.param.u32 	%r6, [Subsample_Bilinear_rgb0_bgr0_param_11];
	ld.param.u32 	%r5, [Subsample_Bilinear_rgb0_bgr0_param_10];
	ld.param.u64 	%rd4, [Subsample_Bilinear_rgb0_bgr0_param_0];
	ld.param.u64 	%rd3, [Subsample_Bilinear_rgb0_bgr0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f9, %r6;
	cvt.rn.f32.s32 	%f10, %r3;
	div.rn.f32 	%f11, %f9, %f10;
	cvt.rn.f32.s32 	%f12, %r7;
	cvt.rn.f32.s32 	%f13, %r4;
	div.rn.f32 	%f14, %f12, %f13;
	add.f32 	%f15, %f11, 0fBF800000;
	mul.f32 	%f16, %f15, 0f3F000000;
	max.f32 	%f17, %f16, 0f00000000;
	min.f32 	%f18, %f17, 0f3F800000;
	add.f32 	%f19, %f14, 0fBF800000;
	mul.f32 	%f20, %f19, 0f3F000000;
	max.f32 	%f21, %f20, 0f00000000;
	min.f32 	%f22, %f21, 0f3F800000;
	cvt.rn.f32.s32 	%f23, %r2;
	add.f32 	%f24, %f23, 0f3F000000;
	cvt.rn.f32.s32 	%f25, %r1;
	add.f32 	%f26, %f25, 0f3F000000;
	add.f32 	%f27, %f18, 0f3F000000;
	div.rn.f32 	%f28, %f18, %f27;
	add.f32 	%f29, %f22, 0f3F000000;
	div.rn.f32 	%f30, %f22, %f29;
	neg.f32 	%f31, %f28;
	fma.rn.f32 	%f5, %f11, %f26, %f31;
	neg.f32 	%f32, %f30;
	fma.rn.f32 	%f4, %f14, %f24, %f32;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f5, %f4}];
	// end inline asm
	and.b32  	%r33, %r17, 255;
	fma.rn.f32 	%f7, %f11, %f26, %f28;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f7, %f4}];
	// end inline asm
	and.b32  	%r34, %r21, 255;
	add.s32 	%r35, %r33, %r34;
	and.b32  	%r36, %r18, 255;
	and.b32  	%r37, %r22, 255;
	add.s32 	%r38, %r36, %r37;
	and.b32  	%r39, %r19, 255;
	and.b32  	%r40, %r23, 255;
	add.s32 	%r41, %r39, %r40;
	and.b32  	%r42, %r20, 255;
	and.b32  	%r43, %r24, 255;
	add.s32 	%r44, %r42, %r43;
	fma.rn.f32 	%f8, %f14, %f24, %f30;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f5, %f8}];
	// end inline asm
	and.b32  	%r45, %r25, 255;
	add.s32 	%r46, %r35, %r45;
	and.b32  	%r47, %r26, 255;
	add.s32 	%r48, %r38, %r47;
	and.b32  	%r49, %r27, 255;
	add.s32 	%r50, %r41, %r49;
	and.b32  	%r51, %r28, 255;
	add.s32 	%r52, %r44, %r51;
	// begin inline asm
	tex.2d.v4.u32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f7, %f8}];
	// end inline asm
	and.b32  	%r53, %r29, 255;
	add.s32 	%r54, %r46, %r53;
	and.b32  	%r55, %r30, 255;
	add.s32 	%r56, %r48, %r55;
	and.b32  	%r57, %r31, 255;
	add.s32 	%r58, %r50, %r57;
	and.b32  	%r59, %r32, 255;
	add.s32 	%r60, %r52, %r59;
	add.s32 	%r61, %r54, 2;
	add.s32 	%r62, %r56, 2;
	add.s32 	%r63, %r58, 2;
	add.s32 	%r64, %r60, 2;
	shr.u32 	%r65, %r61, 2;
	shr.u32 	%r66, %r62, 2;
	shr.u32 	%r67, %r63, 2;
	shr.u32 	%r68, %r64, 2;
	cvt.u16.u32 	%rs1, %r65;
	cvt.u16.u32 	%rs2, %r66;
	cvt.u16.u32 	%rs3, %r67;
	cvt.u16.u32 	%rs4, %r68;
	cvt.s64.s32 	%rd8, %r2;
	cvt.s64.s32 	%rd9, %r5;
	shr.u64 	%rd10, %rd9, 2;
	mul.lo.s64 	%rd11, %rd10, %rd8;
	cvt.s64.s32 	%rd12, %r1;
	add.s64 	%rd13, %rd11, %rd12;
	shl.b64 	%rd14, %rd13, 2;
	add.s64 	%rd15, %rd1, %rd14;
	st.global.v4.u8 	[%rd15], {%rs3, %rs2, %rs1, %rs4};
$L__BB158_2:
	ret;

}
	// .globl	Subsample_Bilinear_rgb0_bgr0_uv
.visible .entry Subsample_Bilinear_rgb0_bgr0_uv(
	.param .u64 Subsample_Bilinear_rgb0_bgr0_uv_param_0,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_uv_param_1,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_uv_param_2,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_uv_param_3,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_uv_param_4,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_uv_param_5,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_uv_param_6,
	.param .u64 Subsample_Bilinear_rgb0_bgr0_uv_param_7,
	.param .u32 Subsample_Bilinear_rgb0_bgr0_uv_param_8,
	.param .u32 Subsample_Bilinear_rgb0_bgr0_uv_param_9,
	.param .u32 Subsample_Bilinear_rgb0_bgr0_uv_param_10,
	.param .u32 Subsample_Bilinear_rgb0_bgr0_uv_param_11,
	.param .u32 Subsample_Bilinear_rgb0_bgr0_uv_param_12,
	.param .f32 Subsample_Bilinear_rgb0_bgr0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Bicubic_yuv420p_yuv420p
.visible .entry Subsample_Bicubic_yuv420p_yuv420p(
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_param_0,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_param_1,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_param_2,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_param_3,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_param_4,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_param_5,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_param_6,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_param_7,
	.param .u32 Subsample_Bicubic_yuv420p_yuv420p_param_8,
	.param .u32 Subsample_Bicubic_yuv420p_yuv420p_param_9,
	.param .u32 Subsample_Bicubic_yuv420p_yuv420p_param_10,
	.param .u32 Subsample_Bicubic_yuv420p_yuv420p_param_11,
	.param .u32 Subsample_Bicubic_yuv420p_yuv420p_param_12,
	.param .f32 Subsample_Bicubic_yuv420p_yuv420p_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv420p_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv420p_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB160_2;
	bra.uni 	$L__BB160_1;
$L__BB160_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv420p_yuv420p_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv420p_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv420p_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv420p_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv420p_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv420p_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB160_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv420p_yuv420p_uv
.visible .entry Subsample_Bicubic_yuv420p_yuv420p_uv(
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv420p_yuv420p_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv420p_yuv420p_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv420p_yuv420p_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv420p_yuv420p_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv420p_yuv420p_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv420p_yuv420p_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv420p_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<44>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv420p_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv420p_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB161_2;
	bra.uni 	$L__BB161_1;
$L__BB161_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv420p_yuv420p_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv420p_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv420p_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv420p_yuv420p_uv_param_10];
	ld.param.u64 	%rd23, [Subsample_Bicubic_yuv420p_yuv420p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bicubic_yuv420p_yuv420p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv420p_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bicubic_yuv420p_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd7, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd7, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd7, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd7, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd7, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd7, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd7, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd7, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd7, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd7, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd7, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd7, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f153;
	mul.wide.s32 	%rd39, %r2, %r5;
	cvt.s64.s32 	%rd40, %r1;
	add.s64 	%rd41, %rd39, %rd40;
	add.s64 	%rd42, %rd2, %rd41;
	st.global.u8 	[%rd42], %rs1;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd23, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd23, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd23, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd23, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd23, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd23, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd23, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd23, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd23, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd23, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd23, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd23, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd23, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd23, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd23, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd23, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f437F0000;
	cvt.rzi.u16.f32 	%rs2, %f190;
	add.s64 	%rd43, %rd1, %rd41;
	st.global.u8 	[%rd43], %rs2;
$L__BB161_2:
	ret;

}
	// .globl	Subsample_Bicubic_nv12_yuv420p
.visible .entry Subsample_Bicubic_nv12_yuv420p(
	.param .u64 Subsample_Bicubic_nv12_yuv420p_param_0,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_param_1,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_param_2,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_param_3,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_param_4,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_param_5,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_param_6,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_param_7,
	.param .u32 Subsample_Bicubic_nv12_yuv420p_param_8,
	.param .u32 Subsample_Bicubic_nv12_yuv420p_param_9,
	.param .u32 Subsample_Bicubic_nv12_yuv420p_param_10,
	.param .u32 Subsample_Bicubic_nv12_yuv420p_param_11,
	.param .u32 Subsample_Bicubic_nv12_yuv420p_param_12,
	.param .f32 Subsample_Bicubic_nv12_yuv420p_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_nv12_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_nv12_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB162_2;
	bra.uni 	$L__BB162_1;
$L__BB162_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_nv12_yuv420p_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_nv12_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_nv12_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_nv12_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_nv12_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_nv12_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB162_2:
	ret;

}
	// .globl	Subsample_Bicubic_nv12_yuv420p_uv
.visible .entry Subsample_Bicubic_nv12_yuv420p_uv(
	.param .u64 Subsample_Bicubic_nv12_yuv420p_uv_param_0,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_uv_param_1,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_uv_param_2,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_uv_param_3,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_uv_param_4,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_uv_param_5,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_uv_param_6,
	.param .u64 Subsample_Bicubic_nv12_yuv420p_uv_param_7,
	.param .u32 Subsample_Bicubic_nv12_yuv420p_uv_param_8,
	.param .u32 Subsample_Bicubic_nv12_yuv420p_uv_param_9,
	.param .u32 Subsample_Bicubic_nv12_yuv420p_uv_param_10,
	.param .u32 Subsample_Bicubic_nv12_yuv420p_uv_param_11,
	.param .u32 Subsample_Bicubic_nv12_yuv420p_uv_param_12,
	.param .f32 Subsample_Bicubic_nv12_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<27>;

	ld.param.u32 	%r4, [Subsample_Bicubic_nv12_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_nv12_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB163_2;
	bra.uni 	$L__BB163_1;
$L__BB163_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_nv12_yuv420p_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_nv12_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_nv12_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_nv12_yuv420p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bicubic_nv12_yuv420p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_nv12_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bicubic_nv12_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f437F0000;
	mul.f32 	%f158, %f156, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	mul.wide.s32 	%rd22, %r2, %r5;
	cvt.s64.s32 	%rd23, %r1;
	add.s64 	%rd24, %rd22, %rd23;
	add.s64 	%rd25, %rd2, %rd24;
	st.global.u8 	[%rd25], %rs1;
	add.s64 	%rd26, %rd1, %rd24;
	st.global.u8 	[%rd26], %rs2;
$L__BB163_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p_yuv420p
.visible .entry Subsample_Bicubic_yuv444p_yuv420p(
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_param_0,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_param_1,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_param_2,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_param_3,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_param_4,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_param_5,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_param_6,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_param_7,
	.param .u32 Subsample_Bicubic_yuv444p_yuv420p_param_8,
	.param .u32 Subsample_Bicubic_yuv444p_yuv420p_param_9,
	.param .u32 Subsample_Bicubic_yuv444p_yuv420p_param_10,
	.param .u32 Subsample_Bicubic_yuv444p_yuv420p_param_11,
	.param .u32 Subsample_Bicubic_yuv444p_yuv420p_param_12,
	.param .f32 Subsample_Bicubic_yuv444p_yuv420p_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB164_2;
	bra.uni 	$L__BB164_1;
$L__BB164_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p_yuv420p_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv444p_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB164_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p_yuv420p_uv
.visible .entry Subsample_Bicubic_yuv444p_yuv420p_uv(
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv444p_yuv420p_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv444p_yuv420p_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv444p_yuv420p_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv444p_yuv420p_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv444p_yuv420p_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv444p_yuv420p_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv444p_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<44>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB165_2;
	bra.uni 	$L__BB165_1;
$L__BB165_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p_yuv420p_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p_yuv420p_uv_param_10];
	ld.param.u64 	%rd23, [Subsample_Bicubic_yuv444p_yuv420p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bicubic_yuv444p_yuv420p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv444p_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bicubic_yuv444p_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd7, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd7, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd7, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd7, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd7, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd7, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd7, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd7, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd7, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd7, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd7, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd7, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f153;
	mul.wide.s32 	%rd39, %r2, %r5;
	cvt.s64.s32 	%rd40, %r1;
	add.s64 	%rd41, %rd39, %rd40;
	add.s64 	%rd42, %rd2, %rd41;
	st.global.u8 	[%rd42], %rs1;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd23, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd23, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd23, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd23, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd23, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd23, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd23, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd23, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd23, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd23, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd23, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd23, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd23, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd23, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd23, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd23, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f437F0000;
	cvt.rzi.u16.f32 	%rs2, %f190;
	add.s64 	%rd43, %rd1, %rd41;
	st.global.u8 	[%rd43], %rs2;
$L__BB165_2:
	ret;

}
	// .globl	Subsample_Bicubic_p010le_yuv420p
.visible .entry Subsample_Bicubic_p010le_yuv420p(
	.param .u64 Subsample_Bicubic_p010le_yuv420p_param_0,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_param_1,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_param_2,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_param_3,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_param_4,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_param_5,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_param_6,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_param_7,
	.param .u32 Subsample_Bicubic_p010le_yuv420p_param_8,
	.param .u32 Subsample_Bicubic_p010le_yuv420p_param_9,
	.param .u32 Subsample_Bicubic_p010le_yuv420p_param_10,
	.param .u32 Subsample_Bicubic_p010le_yuv420p_param_11,
	.param .u32 Subsample_Bicubic_p010le_yuv420p_param_12,
	.param .f32 Subsample_Bicubic_p010le_yuv420p_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p010le_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p010le_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB166_2;
	bra.uni 	$L__BB166_1;
$L__BB166_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p010le_yuv420p_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p010le_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p010le_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p010le_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p010le_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p010le_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB166_2:
	ret;

}
	// .globl	Subsample_Bicubic_p010le_yuv420p_uv
.visible .entry Subsample_Bicubic_p010le_yuv420p_uv(
	.param .u64 Subsample_Bicubic_p010le_yuv420p_uv_param_0,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_uv_param_1,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_uv_param_2,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_uv_param_3,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_uv_param_4,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_uv_param_5,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_uv_param_6,
	.param .u64 Subsample_Bicubic_p010le_yuv420p_uv_param_7,
	.param .u32 Subsample_Bicubic_p010le_yuv420p_uv_param_8,
	.param .u32 Subsample_Bicubic_p010le_yuv420p_uv_param_9,
	.param .u32 Subsample_Bicubic_p010le_yuv420p_uv_param_10,
	.param .u32 Subsample_Bicubic_p010le_yuv420p_uv_param_11,
	.param .u32 Subsample_Bicubic_p010le_yuv420p_uv_param_12,
	.param .f32 Subsample_Bicubic_p010le_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<27>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p010le_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p010le_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB167_2;
	bra.uni 	$L__BB167_1;
$L__BB167_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p010le_yuv420p_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p010le_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p010le_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p010le_yuv420p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bicubic_p010le_yuv420p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p010le_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bicubic_p010le_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f477FFF00;
	mul.f32 	%f158, %f156, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	shr.u16 	%rs3, %rs1, 8;
	mul.wide.s32 	%rd22, %r2, %r5;
	cvt.s64.s32 	%rd23, %r1;
	add.s64 	%rd24, %rd22, %rd23;
	add.s64 	%rd25, %rd2, %rd24;
	st.global.u8 	[%rd25], %rs3;
	shr.u16 	%rs4, %rs2, 8;
	add.s64 	%rd26, %rd1, %rd24;
	st.global.u8 	[%rd26], %rs4;
$L__BB167_2:
	ret;

}
	// .globl	Subsample_Bicubic_p016le_yuv420p
.visible .entry Subsample_Bicubic_p016le_yuv420p(
	.param .u64 Subsample_Bicubic_p016le_yuv420p_param_0,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_param_1,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_param_2,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_param_3,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_param_4,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_param_5,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_param_6,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_param_7,
	.param .u32 Subsample_Bicubic_p016le_yuv420p_param_8,
	.param .u32 Subsample_Bicubic_p016le_yuv420p_param_9,
	.param .u32 Subsample_Bicubic_p016le_yuv420p_param_10,
	.param .u32 Subsample_Bicubic_p016le_yuv420p_param_11,
	.param .u32 Subsample_Bicubic_p016le_yuv420p_param_12,
	.param .f32 Subsample_Bicubic_p016le_yuv420p_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p016le_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p016le_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB168_2;
	bra.uni 	$L__BB168_1;
$L__BB168_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p016le_yuv420p_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p016le_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p016le_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p016le_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p016le_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p016le_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB168_2:
	ret;

}
	// .globl	Subsample_Bicubic_p016le_yuv420p_uv
.visible .entry Subsample_Bicubic_p016le_yuv420p_uv(
	.param .u64 Subsample_Bicubic_p016le_yuv420p_uv_param_0,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_uv_param_1,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_uv_param_2,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_uv_param_3,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_uv_param_4,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_uv_param_5,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_uv_param_6,
	.param .u64 Subsample_Bicubic_p016le_yuv420p_uv_param_7,
	.param .u32 Subsample_Bicubic_p016le_yuv420p_uv_param_8,
	.param .u32 Subsample_Bicubic_p016le_yuv420p_uv_param_9,
	.param .u32 Subsample_Bicubic_p016le_yuv420p_uv_param_10,
	.param .u32 Subsample_Bicubic_p016le_yuv420p_uv_param_11,
	.param .u32 Subsample_Bicubic_p016le_yuv420p_uv_param_12,
	.param .f32 Subsample_Bicubic_p016le_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<27>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p016le_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p016le_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB169_2;
	bra.uni 	$L__BB169_1;
$L__BB169_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p016le_yuv420p_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p016le_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p016le_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p016le_yuv420p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bicubic_p016le_yuv420p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p016le_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bicubic_p016le_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f477FFF00;
	mul.f32 	%f158, %f156, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	shr.u16 	%rs3, %rs1, 8;
	mul.wide.s32 	%rd22, %r2, %r5;
	cvt.s64.s32 	%rd23, %r1;
	add.s64 	%rd24, %rd22, %rd23;
	add.s64 	%rd25, %rd2, %rd24;
	st.global.u8 	[%rd25], %rs3;
	shr.u16 	%rs4, %rs2, 8;
	add.s64 	%rd26, %rd1, %rd24;
	st.global.u8 	[%rd26], %rs4;
$L__BB169_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p16le_yuv420p
.visible .entry Subsample_Bicubic_yuv444p16le_yuv420p(
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_param_0,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_param_1,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_param_2,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_param_3,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_param_4,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_param_5,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_param_6,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_param_7,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv420p_param_8,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv420p_param_9,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv420p_param_10,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv420p_param_11,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv420p_param_12,
	.param .f32 Subsample_Bicubic_yuv444p16le_yuv420p_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p16le_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p16le_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB170_2;
	bra.uni 	$L__BB170_1;
$L__BB170_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p16le_yuv420p_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p16le_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p16le_yuv420p_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p16le_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p16le_yuv420p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv444p16le_yuv420p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB170_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p16le_yuv420p_uv
.visible .entry Subsample_Bicubic_yuv444p16le_yuv420p_uv(
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<44>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB171_2;
	bra.uni 	$L__BB171_1;
$L__BB171_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_10];
	ld.param.u64 	%rd23, [Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bicubic_yuv444p16le_yuv420p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd7, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd7, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd7, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd7, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd7, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd7, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd7, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd7, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd7, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd7, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd7, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd7, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f153;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd39, %r2, %r5;
	cvt.s64.s32 	%rd40, %r1;
	add.s64 	%rd41, %rd39, %rd40;
	add.s64 	%rd42, %rd2, %rd41;
	st.global.u8 	[%rd42], %rs2;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd23, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd23, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd23, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd23, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd23, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd23, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd23, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd23, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd23, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd23, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd23, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd23, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd23, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd23, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd23, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd23, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs3, %f190;
	shr.u16 	%rs4, %rs3, 8;
	add.s64 	%rd43, %rd1, %rd41;
	st.global.u8 	[%rd43], %rs4;
$L__BB171_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv420p_nv12
.visible .entry Subsample_Bicubic_yuv420p_nv12(
	.param .u64 Subsample_Bicubic_yuv420p_nv12_param_0,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_param_1,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_param_2,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_param_3,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_param_4,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_param_5,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_param_6,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_param_7,
	.param .u32 Subsample_Bicubic_yuv420p_nv12_param_8,
	.param .u32 Subsample_Bicubic_yuv420p_nv12_param_9,
	.param .u32 Subsample_Bicubic_yuv420p_nv12_param_10,
	.param .u32 Subsample_Bicubic_yuv420p_nv12_param_11,
	.param .u32 Subsample_Bicubic_yuv420p_nv12_param_12,
	.param .f32 Subsample_Bicubic_yuv420p_nv12_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv420p_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv420p_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB172_2;
	bra.uni 	$L__BB172_1;
$L__BB172_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv420p_nv12_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv420p_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv420p_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv420p_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv420p_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv420p_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB172_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv420p_nv12_uv
.visible .entry Subsample_Bicubic_yuv420p_nv12_uv(
	.param .u64 Subsample_Bicubic_yuv420p_nv12_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv420p_nv12_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv420p_nv12_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv420p_nv12_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv420p_nv12_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv420p_nv12_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv420p_nv12_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv420p_nv12_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv420p_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv420p_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB173_2;
	bra.uni 	$L__BB173_1;
$L__BB173_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv420p_nv12_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv420p_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv420p_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv420p_nv12_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Bicubic_yuv420p_nv12_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv420p_nv12_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv420p_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f153;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f437F0000;
	cvt.rzi.u16.f32 	%rs2, %f190;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 1;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 1;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u8 	[%rd44], {%rs1, %rs2};
$L__BB173_2:
	ret;

}
	// .globl	Subsample_Bicubic_nv12_nv12
.visible .entry Subsample_Bicubic_nv12_nv12(
	.param .u64 Subsample_Bicubic_nv12_nv12_param_0,
	.param .u64 Subsample_Bicubic_nv12_nv12_param_1,
	.param .u64 Subsample_Bicubic_nv12_nv12_param_2,
	.param .u64 Subsample_Bicubic_nv12_nv12_param_3,
	.param .u64 Subsample_Bicubic_nv12_nv12_param_4,
	.param .u64 Subsample_Bicubic_nv12_nv12_param_5,
	.param .u64 Subsample_Bicubic_nv12_nv12_param_6,
	.param .u64 Subsample_Bicubic_nv12_nv12_param_7,
	.param .u32 Subsample_Bicubic_nv12_nv12_param_8,
	.param .u32 Subsample_Bicubic_nv12_nv12_param_9,
	.param .u32 Subsample_Bicubic_nv12_nv12_param_10,
	.param .u32 Subsample_Bicubic_nv12_nv12_param_11,
	.param .u32 Subsample_Bicubic_nv12_nv12_param_12,
	.param .f32 Subsample_Bicubic_nv12_nv12_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_nv12_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_nv12_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB174_2;
	bra.uni 	$L__BB174_1;
$L__BB174_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_nv12_nv12_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_nv12_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_nv12_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_nv12_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_nv12_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_nv12_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB174_2:
	ret;

}
	// .globl	Subsample_Bicubic_nv12_nv12_uv
.visible .entry Subsample_Bicubic_nv12_nv12_uv(
	.param .u64 Subsample_Bicubic_nv12_nv12_uv_param_0,
	.param .u64 Subsample_Bicubic_nv12_nv12_uv_param_1,
	.param .u64 Subsample_Bicubic_nv12_nv12_uv_param_2,
	.param .u64 Subsample_Bicubic_nv12_nv12_uv_param_3,
	.param .u64 Subsample_Bicubic_nv12_nv12_uv_param_4,
	.param .u64 Subsample_Bicubic_nv12_nv12_uv_param_5,
	.param .u64 Subsample_Bicubic_nv12_nv12_uv_param_6,
	.param .u64 Subsample_Bicubic_nv12_nv12_uv_param_7,
	.param .u32 Subsample_Bicubic_nv12_nv12_uv_param_8,
	.param .u32 Subsample_Bicubic_nv12_nv12_uv_param_9,
	.param .u32 Subsample_Bicubic_nv12_nv12_uv_param_10,
	.param .u32 Subsample_Bicubic_nv12_nv12_uv_param_11,
	.param .u32 Subsample_Bicubic_nv12_nv12_uv_param_12,
	.param .f32 Subsample_Bicubic_nv12_nv12_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_nv12_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_nv12_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB175_2;
	bra.uni 	$L__BB175_1;
$L__BB175_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_nv12_nv12_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_nv12_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_nv12_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_nv12_nv12_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_nv12_nv12_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bicubic_nv12_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f437F0000;
	mul.f32 	%f158, %f156, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u8 	[%rd27], {%rs1, %rs2};
$L__BB175_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p_nv12
.visible .entry Subsample_Bicubic_yuv444p_nv12(
	.param .u64 Subsample_Bicubic_yuv444p_nv12_param_0,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_param_1,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_param_2,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_param_3,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_param_4,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_param_5,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_param_6,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_param_7,
	.param .u32 Subsample_Bicubic_yuv444p_nv12_param_8,
	.param .u32 Subsample_Bicubic_yuv444p_nv12_param_9,
	.param .u32 Subsample_Bicubic_yuv444p_nv12_param_10,
	.param .u32 Subsample_Bicubic_yuv444p_nv12_param_11,
	.param .u32 Subsample_Bicubic_yuv444p_nv12_param_12,
	.param .f32 Subsample_Bicubic_yuv444p_nv12_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB176_2;
	bra.uni 	$L__BB176_1;
$L__BB176_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p_nv12_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv444p_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB176_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p_nv12_uv
.visible .entry Subsample_Bicubic_yuv444p_nv12_uv(
	.param .u64 Subsample_Bicubic_yuv444p_nv12_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv444p_nv12_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv444p_nv12_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv444p_nv12_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv444p_nv12_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv444p_nv12_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv444p_nv12_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv444p_nv12_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB177_2;
	bra.uni 	$L__BB177_1;
$L__BB177_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p_nv12_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p_nv12_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Bicubic_yuv444p_nv12_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv444p_nv12_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f153;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f437F0000;
	cvt.rzi.u16.f32 	%rs2, %f190;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 1;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 1;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u8 	[%rd44], {%rs1, %rs2};
$L__BB177_2:
	ret;

}
	// .globl	Subsample_Bicubic_p010le_nv12
.visible .entry Subsample_Bicubic_p010le_nv12(
	.param .u64 Subsample_Bicubic_p010le_nv12_param_0,
	.param .u64 Subsample_Bicubic_p010le_nv12_param_1,
	.param .u64 Subsample_Bicubic_p010le_nv12_param_2,
	.param .u64 Subsample_Bicubic_p010le_nv12_param_3,
	.param .u64 Subsample_Bicubic_p010le_nv12_param_4,
	.param .u64 Subsample_Bicubic_p010le_nv12_param_5,
	.param .u64 Subsample_Bicubic_p010le_nv12_param_6,
	.param .u64 Subsample_Bicubic_p010le_nv12_param_7,
	.param .u32 Subsample_Bicubic_p010le_nv12_param_8,
	.param .u32 Subsample_Bicubic_p010le_nv12_param_9,
	.param .u32 Subsample_Bicubic_p010le_nv12_param_10,
	.param .u32 Subsample_Bicubic_p010le_nv12_param_11,
	.param .u32 Subsample_Bicubic_p010le_nv12_param_12,
	.param .f32 Subsample_Bicubic_p010le_nv12_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p010le_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p010le_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB178_2;
	bra.uni 	$L__BB178_1;
$L__BB178_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p010le_nv12_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p010le_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p010le_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p010le_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p010le_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p010le_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB178_2:
	ret;

}
	// .globl	Subsample_Bicubic_p010le_nv12_uv
.visible .entry Subsample_Bicubic_p010le_nv12_uv(
	.param .u64 Subsample_Bicubic_p010le_nv12_uv_param_0,
	.param .u64 Subsample_Bicubic_p010le_nv12_uv_param_1,
	.param .u64 Subsample_Bicubic_p010le_nv12_uv_param_2,
	.param .u64 Subsample_Bicubic_p010le_nv12_uv_param_3,
	.param .u64 Subsample_Bicubic_p010le_nv12_uv_param_4,
	.param .u64 Subsample_Bicubic_p010le_nv12_uv_param_5,
	.param .u64 Subsample_Bicubic_p010le_nv12_uv_param_6,
	.param .u64 Subsample_Bicubic_p010le_nv12_uv_param_7,
	.param .u32 Subsample_Bicubic_p010le_nv12_uv_param_8,
	.param .u32 Subsample_Bicubic_p010le_nv12_uv_param_9,
	.param .u32 Subsample_Bicubic_p010le_nv12_uv_param_10,
	.param .u32 Subsample_Bicubic_p010le_nv12_uv_param_11,
	.param .u32 Subsample_Bicubic_p010le_nv12_uv_param_12,
	.param .f32 Subsample_Bicubic_p010le_nv12_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p010le_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p010le_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB179_2;
	bra.uni 	$L__BB179_1;
$L__BB179_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p010le_nv12_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p010le_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p010le_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p010le_nv12_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p010le_nv12_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p010le_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f477FFF00;
	mul.f32 	%f158, %f156, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	shr.u16 	%rs3, %rs1, 8;
	shr.u16 	%rs4, %rs2, 8;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u8 	[%rd27], {%rs3, %rs4};
$L__BB179_2:
	ret;

}
	// .globl	Subsample_Bicubic_p016le_nv12
.visible .entry Subsample_Bicubic_p016le_nv12(
	.param .u64 Subsample_Bicubic_p016le_nv12_param_0,
	.param .u64 Subsample_Bicubic_p016le_nv12_param_1,
	.param .u64 Subsample_Bicubic_p016le_nv12_param_2,
	.param .u64 Subsample_Bicubic_p016le_nv12_param_3,
	.param .u64 Subsample_Bicubic_p016le_nv12_param_4,
	.param .u64 Subsample_Bicubic_p016le_nv12_param_5,
	.param .u64 Subsample_Bicubic_p016le_nv12_param_6,
	.param .u64 Subsample_Bicubic_p016le_nv12_param_7,
	.param .u32 Subsample_Bicubic_p016le_nv12_param_8,
	.param .u32 Subsample_Bicubic_p016le_nv12_param_9,
	.param .u32 Subsample_Bicubic_p016le_nv12_param_10,
	.param .u32 Subsample_Bicubic_p016le_nv12_param_11,
	.param .u32 Subsample_Bicubic_p016le_nv12_param_12,
	.param .f32 Subsample_Bicubic_p016le_nv12_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p016le_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p016le_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB180_2;
	bra.uni 	$L__BB180_1;
$L__BB180_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p016le_nv12_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p016le_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p016le_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p016le_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p016le_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p016le_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB180_2:
	ret;

}
	// .globl	Subsample_Bicubic_p016le_nv12_uv
.visible .entry Subsample_Bicubic_p016le_nv12_uv(
	.param .u64 Subsample_Bicubic_p016le_nv12_uv_param_0,
	.param .u64 Subsample_Bicubic_p016le_nv12_uv_param_1,
	.param .u64 Subsample_Bicubic_p016le_nv12_uv_param_2,
	.param .u64 Subsample_Bicubic_p016le_nv12_uv_param_3,
	.param .u64 Subsample_Bicubic_p016le_nv12_uv_param_4,
	.param .u64 Subsample_Bicubic_p016le_nv12_uv_param_5,
	.param .u64 Subsample_Bicubic_p016le_nv12_uv_param_6,
	.param .u64 Subsample_Bicubic_p016le_nv12_uv_param_7,
	.param .u32 Subsample_Bicubic_p016le_nv12_uv_param_8,
	.param .u32 Subsample_Bicubic_p016le_nv12_uv_param_9,
	.param .u32 Subsample_Bicubic_p016le_nv12_uv_param_10,
	.param .u32 Subsample_Bicubic_p016le_nv12_uv_param_11,
	.param .u32 Subsample_Bicubic_p016le_nv12_uv_param_12,
	.param .f32 Subsample_Bicubic_p016le_nv12_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p016le_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p016le_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB181_2;
	bra.uni 	$L__BB181_1;
$L__BB181_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p016le_nv12_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p016le_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p016le_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p016le_nv12_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p016le_nv12_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p016le_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f477FFF00;
	mul.f32 	%f158, %f156, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	shr.u16 	%rs3, %rs1, 8;
	shr.u16 	%rs4, %rs2, 8;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u8 	[%rd27], {%rs3, %rs4};
$L__BB181_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p16le_nv12
.visible .entry Subsample_Bicubic_yuv444p16le_nv12(
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_param_0,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_param_1,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_param_2,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_param_3,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_param_4,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_param_5,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_param_6,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_param_7,
	.param .u32 Subsample_Bicubic_yuv444p16le_nv12_param_8,
	.param .u32 Subsample_Bicubic_yuv444p16le_nv12_param_9,
	.param .u32 Subsample_Bicubic_yuv444p16le_nv12_param_10,
	.param .u32 Subsample_Bicubic_yuv444p16le_nv12_param_11,
	.param .u32 Subsample_Bicubic_yuv444p16le_nv12_param_12,
	.param .f32 Subsample_Bicubic_yuv444p16le_nv12_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p16le_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p16le_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB182_2;
	bra.uni 	$L__BB182_1;
$L__BB182_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p16le_nv12_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p16le_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p16le_nv12_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p16le_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p16le_nv12_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv444p16le_nv12_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB182_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p16le_nv12_uv
.visible .entry Subsample_Bicubic_yuv444p16le_nv12_uv(
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv444p16le_nv12_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv444p16le_nv12_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv444p16le_nv12_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv444p16le_nv12_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv444p16le_nv12_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv444p16le_nv12_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv444p16le_nv12_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p16le_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p16le_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB183_2;
	bra.uni 	$L__BB183_1;
$L__BB183_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p16le_nv12_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p16le_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p16le_nv12_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p16le_nv12_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Bicubic_yuv444p16le_nv12_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv444p16le_nv12_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p16le_nv12_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f153;
	shr.u16 	%rs2, %rs1, 8;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs3, %f190;
	shr.u16 	%rs4, %rs3, 8;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 1;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 1;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u8 	[%rd44], {%rs2, %rs4};
$L__BB183_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv420p_yuv444p
.visible .entry Subsample_Bicubic_yuv420p_yuv444p(
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_param_0,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_param_1,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_param_2,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_param_3,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_param_4,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_param_5,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_param_6,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_param_7,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p_param_8,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p_param_9,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p_param_10,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p_param_11,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p_param_12,
	.param .f32 Subsample_Bicubic_yuv420p_yuv444p_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv420p_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv420p_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB184_2;
	bra.uni 	$L__BB184_1;
$L__BB184_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv420p_yuv444p_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv420p_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv420p_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv420p_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv420p_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv420p_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB184_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv420p_yuv444p_uv
.visible .entry Subsample_Bicubic_yuv420p_yuv444p_uv(
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv420p_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<44>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv420p_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv420p_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB185_2;
	bra.uni 	$L__BB185_1;
$L__BB185_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv420p_yuv444p_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv420p_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv420p_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv420p_yuv444p_uv_param_10];
	ld.param.u64 	%rd23, [Subsample_Bicubic_yuv420p_yuv444p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bicubic_yuv420p_yuv444p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv420p_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bicubic_yuv420p_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd7, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd7, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd7, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd7, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd7, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd7, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd7, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd7, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd7, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd7, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd7, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd7, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f153;
	mul.wide.s32 	%rd39, %r2, %r5;
	cvt.s64.s32 	%rd40, %r1;
	add.s64 	%rd41, %rd39, %rd40;
	add.s64 	%rd42, %rd2, %rd41;
	st.global.u8 	[%rd42], %rs1;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd23, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd23, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd23, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd23, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd23, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd23, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd23, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd23, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd23, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd23, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd23, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd23, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd23, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd23, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd23, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd23, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f437F0000;
	cvt.rzi.u16.f32 	%rs2, %f190;
	add.s64 	%rd43, %rd1, %rd41;
	st.global.u8 	[%rd43], %rs2;
$L__BB185_2:
	ret;

}
	// .globl	Subsample_Bicubic_nv12_yuv444p
.visible .entry Subsample_Bicubic_nv12_yuv444p(
	.param .u64 Subsample_Bicubic_nv12_yuv444p_param_0,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_param_1,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_param_2,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_param_3,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_param_4,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_param_5,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_param_6,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_param_7,
	.param .u32 Subsample_Bicubic_nv12_yuv444p_param_8,
	.param .u32 Subsample_Bicubic_nv12_yuv444p_param_9,
	.param .u32 Subsample_Bicubic_nv12_yuv444p_param_10,
	.param .u32 Subsample_Bicubic_nv12_yuv444p_param_11,
	.param .u32 Subsample_Bicubic_nv12_yuv444p_param_12,
	.param .f32 Subsample_Bicubic_nv12_yuv444p_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_nv12_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_nv12_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB186_2;
	bra.uni 	$L__BB186_1;
$L__BB186_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_nv12_yuv444p_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_nv12_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_nv12_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_nv12_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_nv12_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_nv12_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB186_2:
	ret;

}
	// .globl	Subsample_Bicubic_nv12_yuv444p_uv
.visible .entry Subsample_Bicubic_nv12_yuv444p_uv(
	.param .u64 Subsample_Bicubic_nv12_yuv444p_uv_param_0,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_uv_param_1,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_uv_param_2,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_uv_param_3,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_uv_param_4,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_uv_param_5,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_uv_param_6,
	.param .u64 Subsample_Bicubic_nv12_yuv444p_uv_param_7,
	.param .u32 Subsample_Bicubic_nv12_yuv444p_uv_param_8,
	.param .u32 Subsample_Bicubic_nv12_yuv444p_uv_param_9,
	.param .u32 Subsample_Bicubic_nv12_yuv444p_uv_param_10,
	.param .u32 Subsample_Bicubic_nv12_yuv444p_uv_param_11,
	.param .u32 Subsample_Bicubic_nv12_yuv444p_uv_param_12,
	.param .f32 Subsample_Bicubic_nv12_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<27>;

	ld.param.u32 	%r4, [Subsample_Bicubic_nv12_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_nv12_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB187_2;
	bra.uni 	$L__BB187_1;
$L__BB187_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_nv12_yuv444p_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_nv12_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_nv12_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_nv12_yuv444p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bicubic_nv12_yuv444p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_nv12_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bicubic_nv12_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f437F0000;
	mul.f32 	%f158, %f156, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	mul.wide.s32 	%rd22, %r2, %r5;
	cvt.s64.s32 	%rd23, %r1;
	add.s64 	%rd24, %rd22, %rd23;
	add.s64 	%rd25, %rd2, %rd24;
	st.global.u8 	[%rd25], %rs1;
	add.s64 	%rd26, %rd1, %rd24;
	st.global.u8 	[%rd26], %rs2;
$L__BB187_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p_yuv444p
.visible .entry Subsample_Bicubic_yuv444p_yuv444p(
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_param_0,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_param_1,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_param_2,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_param_3,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_param_4,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_param_5,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_param_6,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_param_7,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p_param_8,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p_param_9,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p_param_10,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p_param_11,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p_param_12,
	.param .f32 Subsample_Bicubic_yuv444p_yuv444p_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB188_2;
	bra.uni 	$L__BB188_1;
$L__BB188_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p_yuv444p_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv444p_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB188_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p_yuv444p_uv
.visible .entry Subsample_Bicubic_yuv444p_yuv444p_uv(
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv444p_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<44>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB189_2;
	bra.uni 	$L__BB189_1;
$L__BB189_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p_yuv444p_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p_yuv444p_uv_param_10];
	ld.param.u64 	%rd23, [Subsample_Bicubic_yuv444p_yuv444p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bicubic_yuv444p_yuv444p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv444p_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bicubic_yuv444p_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd7, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd7, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd7, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd7, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd7, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd7, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd7, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd7, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd7, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd7, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd7, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd7, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f153;
	mul.wide.s32 	%rd39, %r2, %r5;
	cvt.s64.s32 	%rd40, %r1;
	add.s64 	%rd41, %rd39, %rd40;
	add.s64 	%rd42, %rd2, %rd41;
	st.global.u8 	[%rd42], %rs1;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd23, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd23, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd23, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd23, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd23, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd23, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd23, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd23, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd23, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd23, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd23, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd23, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd23, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd23, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd23, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd23, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f437F0000;
	cvt.rzi.u16.f32 	%rs2, %f190;
	add.s64 	%rd43, %rd1, %rd41;
	st.global.u8 	[%rd43], %rs2;
$L__BB189_2:
	ret;

}
	// .globl	Subsample_Bicubic_p010le_yuv444p
.visible .entry Subsample_Bicubic_p010le_yuv444p(
	.param .u64 Subsample_Bicubic_p010le_yuv444p_param_0,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_param_1,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_param_2,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_param_3,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_param_4,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_param_5,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_param_6,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_param_7,
	.param .u32 Subsample_Bicubic_p010le_yuv444p_param_8,
	.param .u32 Subsample_Bicubic_p010le_yuv444p_param_9,
	.param .u32 Subsample_Bicubic_p010le_yuv444p_param_10,
	.param .u32 Subsample_Bicubic_p010le_yuv444p_param_11,
	.param .u32 Subsample_Bicubic_p010le_yuv444p_param_12,
	.param .f32 Subsample_Bicubic_p010le_yuv444p_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p010le_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p010le_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB190_2;
	bra.uni 	$L__BB190_1;
$L__BB190_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p010le_yuv444p_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p010le_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p010le_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p010le_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p010le_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p010le_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB190_2:
	ret;

}
	// .globl	Subsample_Bicubic_p010le_yuv444p_uv
.visible .entry Subsample_Bicubic_p010le_yuv444p_uv(
	.param .u64 Subsample_Bicubic_p010le_yuv444p_uv_param_0,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_uv_param_1,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_uv_param_2,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_uv_param_3,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_uv_param_4,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_uv_param_5,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_uv_param_6,
	.param .u64 Subsample_Bicubic_p010le_yuv444p_uv_param_7,
	.param .u32 Subsample_Bicubic_p010le_yuv444p_uv_param_8,
	.param .u32 Subsample_Bicubic_p010le_yuv444p_uv_param_9,
	.param .u32 Subsample_Bicubic_p010le_yuv444p_uv_param_10,
	.param .u32 Subsample_Bicubic_p010le_yuv444p_uv_param_11,
	.param .u32 Subsample_Bicubic_p010le_yuv444p_uv_param_12,
	.param .f32 Subsample_Bicubic_p010le_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<27>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p010le_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p010le_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB191_2;
	bra.uni 	$L__BB191_1;
$L__BB191_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p010le_yuv444p_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p010le_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p010le_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p010le_yuv444p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bicubic_p010le_yuv444p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p010le_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bicubic_p010le_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f477FFF00;
	mul.f32 	%f158, %f156, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	shr.u16 	%rs3, %rs1, 8;
	mul.wide.s32 	%rd22, %r2, %r5;
	cvt.s64.s32 	%rd23, %r1;
	add.s64 	%rd24, %rd22, %rd23;
	add.s64 	%rd25, %rd2, %rd24;
	st.global.u8 	[%rd25], %rs3;
	shr.u16 	%rs4, %rs2, 8;
	add.s64 	%rd26, %rd1, %rd24;
	st.global.u8 	[%rd26], %rs4;
$L__BB191_2:
	ret;

}
	// .globl	Subsample_Bicubic_p016le_yuv444p
.visible .entry Subsample_Bicubic_p016le_yuv444p(
	.param .u64 Subsample_Bicubic_p016le_yuv444p_param_0,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_param_1,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_param_2,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_param_3,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_param_4,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_param_5,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_param_6,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_param_7,
	.param .u32 Subsample_Bicubic_p016le_yuv444p_param_8,
	.param .u32 Subsample_Bicubic_p016le_yuv444p_param_9,
	.param .u32 Subsample_Bicubic_p016le_yuv444p_param_10,
	.param .u32 Subsample_Bicubic_p016le_yuv444p_param_11,
	.param .u32 Subsample_Bicubic_p016le_yuv444p_param_12,
	.param .f32 Subsample_Bicubic_p016le_yuv444p_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p016le_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p016le_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB192_2;
	bra.uni 	$L__BB192_1;
$L__BB192_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p016le_yuv444p_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p016le_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p016le_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p016le_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p016le_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p016le_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB192_2:
	ret;

}
	// .globl	Subsample_Bicubic_p016le_yuv444p_uv
.visible .entry Subsample_Bicubic_p016le_yuv444p_uv(
	.param .u64 Subsample_Bicubic_p016le_yuv444p_uv_param_0,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_uv_param_1,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_uv_param_2,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_uv_param_3,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_uv_param_4,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_uv_param_5,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_uv_param_6,
	.param .u64 Subsample_Bicubic_p016le_yuv444p_uv_param_7,
	.param .u32 Subsample_Bicubic_p016le_yuv444p_uv_param_8,
	.param .u32 Subsample_Bicubic_p016le_yuv444p_uv_param_9,
	.param .u32 Subsample_Bicubic_p016le_yuv444p_uv_param_10,
	.param .u32 Subsample_Bicubic_p016le_yuv444p_uv_param_11,
	.param .u32 Subsample_Bicubic_p016le_yuv444p_uv_param_12,
	.param .f32 Subsample_Bicubic_p016le_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<27>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p016le_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p016le_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB193_2;
	bra.uni 	$L__BB193_1;
$L__BB193_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p016le_yuv444p_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p016le_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p016le_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p016le_yuv444p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bicubic_p016le_yuv444p_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p016le_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bicubic_p016le_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f477FFF00;
	mul.f32 	%f158, %f156, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	shr.u16 	%rs3, %rs1, 8;
	mul.wide.s32 	%rd22, %r2, %r5;
	cvt.s64.s32 	%rd23, %r1;
	add.s64 	%rd24, %rd22, %rd23;
	add.s64 	%rd25, %rd2, %rd24;
	st.global.u8 	[%rd25], %rs3;
	shr.u16 	%rs4, %rs2, 8;
	add.s64 	%rd26, %rd1, %rd24;
	st.global.u8 	[%rd26], %rs4;
$L__BB193_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p16le_yuv444p
.visible .entry Subsample_Bicubic_yuv444p16le_yuv444p(
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_param_0,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_param_1,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_param_2,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_param_3,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_param_4,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_param_5,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_param_6,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_param_7,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p_param_8,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p_param_9,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p_param_10,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p_param_11,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p_param_12,
	.param .f32 Subsample_Bicubic_yuv444p16le_yuv444p_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p16le_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p16le_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB194_2;
	bra.uni 	$L__BB194_1;
$L__BB194_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p16le_yuv444p_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p16le_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p16le_yuv444p_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p16le_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p16le_yuv444p_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv444p16le_yuv444p_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB194_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p16le_yuv444p_uv
.visible .entry Subsample_Bicubic_yuv444p16le_yuv444p_uv(
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<44>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB195_2;
	bra.uni 	$L__BB195_1;
$L__BB195_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_10];
	ld.param.u64 	%rd23, [Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bicubic_yuv444p16le_yuv444p_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd7, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd7, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd7, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd7, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd7, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd7, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd7, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd7, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd7, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd7, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd7, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd7, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f153;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd39, %r2, %r5;
	cvt.s64.s32 	%rd40, %r1;
	add.s64 	%rd41, %rd39, %rd40;
	add.s64 	%rd42, %rd2, %rd41;
	st.global.u8 	[%rd42], %rs2;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd23, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd23, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd23, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd23, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd23, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd23, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd23, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd23, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd23, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd23, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd23, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd23, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd23, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd23, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd23, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd23, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs3, %f190;
	shr.u16 	%rs4, %rs3, 8;
	add.s64 	%rd43, %rd1, %rd41;
	st.global.u8 	[%rd43], %rs4;
$L__BB195_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv420p_p010le
.visible .entry Subsample_Bicubic_yuv420p_p010le(
	.param .u64 Subsample_Bicubic_yuv420p_p010le_param_0,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_param_1,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_param_2,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_param_3,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_param_4,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_param_5,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_param_6,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_param_7,
	.param .u32 Subsample_Bicubic_yuv420p_p010le_param_8,
	.param .u32 Subsample_Bicubic_yuv420p_p010le_param_9,
	.param .u32 Subsample_Bicubic_yuv420p_p010le_param_10,
	.param .u32 Subsample_Bicubic_yuv420p_p010le_param_11,
	.param .u32 Subsample_Bicubic_yuv420p_p010le_param_12,
	.param .f32 Subsample_Bicubic_yuv420p_p010le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv420p_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv420p_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB196_2;
	bra.uni 	$L__BB196_1;
$L__BB196_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv420p_p010le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv420p_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv420p_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv420p_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv420p_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv420p_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.lo.s16 	%rs2, %rs1, 257;
	and.b16  	%rs3, %rs2, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs3;
$L__BB196_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv420p_p010le_uv
.visible .entry Subsample_Bicubic_yuv420p_p010le_uv(
	.param .u64 Subsample_Bicubic_yuv420p_p010le_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv420p_p010le_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv420p_p010le_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv420p_p010le_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv420p_p010le_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv420p_p010le_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv420p_p010le_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv420p_p010le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv420p_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv420p_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB197_2;
	bra.uni 	$L__BB197_1;
$L__BB197_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv420p_p010le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv420p_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv420p_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv420p_p010le_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Bicubic_yuv420p_p010le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv420p_p010le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv420p_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f153;
	mul.lo.s16 	%rs2, %rs1, 257;
	and.b16  	%rs3, %rs2, -64;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f437F0000;
	cvt.rzi.u16.f32 	%rs4, %f190;
	mul.lo.s16 	%rs5, %rs4, 257;
	and.b16  	%rs6, %rs5, -64;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 2;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 2;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u16 	[%rd44], {%rs3, %rs6};
$L__BB197_2:
	ret;

}
	// .globl	Subsample_Bicubic_nv12_p010le
.visible .entry Subsample_Bicubic_nv12_p010le(
	.param .u64 Subsample_Bicubic_nv12_p010le_param_0,
	.param .u64 Subsample_Bicubic_nv12_p010le_param_1,
	.param .u64 Subsample_Bicubic_nv12_p010le_param_2,
	.param .u64 Subsample_Bicubic_nv12_p010le_param_3,
	.param .u64 Subsample_Bicubic_nv12_p010le_param_4,
	.param .u64 Subsample_Bicubic_nv12_p010le_param_5,
	.param .u64 Subsample_Bicubic_nv12_p010le_param_6,
	.param .u64 Subsample_Bicubic_nv12_p010le_param_7,
	.param .u32 Subsample_Bicubic_nv12_p010le_param_8,
	.param .u32 Subsample_Bicubic_nv12_p010le_param_9,
	.param .u32 Subsample_Bicubic_nv12_p010le_param_10,
	.param .u32 Subsample_Bicubic_nv12_p010le_param_11,
	.param .u32 Subsample_Bicubic_nv12_p010le_param_12,
	.param .f32 Subsample_Bicubic_nv12_p010le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_nv12_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_nv12_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB198_2;
	bra.uni 	$L__BB198_1;
$L__BB198_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_nv12_p010le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_nv12_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_nv12_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_nv12_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_nv12_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_nv12_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.lo.s16 	%rs2, %rs1, 257;
	and.b16  	%rs3, %rs2, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs3;
$L__BB198_2:
	ret;

}
	// .globl	Subsample_Bicubic_nv12_p010le_uv
.visible .entry Subsample_Bicubic_nv12_p010le_uv(
	.param .u64 Subsample_Bicubic_nv12_p010le_uv_param_0,
	.param .u64 Subsample_Bicubic_nv12_p010le_uv_param_1,
	.param .u64 Subsample_Bicubic_nv12_p010le_uv_param_2,
	.param .u64 Subsample_Bicubic_nv12_p010le_uv_param_3,
	.param .u64 Subsample_Bicubic_nv12_p010le_uv_param_4,
	.param .u64 Subsample_Bicubic_nv12_p010le_uv_param_5,
	.param .u64 Subsample_Bicubic_nv12_p010le_uv_param_6,
	.param .u64 Subsample_Bicubic_nv12_p010le_uv_param_7,
	.param .u32 Subsample_Bicubic_nv12_p010le_uv_param_8,
	.param .u32 Subsample_Bicubic_nv12_p010le_uv_param_9,
	.param .u32 Subsample_Bicubic_nv12_p010le_uv_param_10,
	.param .u32 Subsample_Bicubic_nv12_p010le_uv_param_11,
	.param .u32 Subsample_Bicubic_nv12_p010le_uv_param_12,
	.param .f32 Subsample_Bicubic_nv12_p010le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_nv12_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_nv12_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB199_2;
	bra.uni 	$L__BB199_1;
$L__BB199_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_nv12_p010le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_nv12_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_nv12_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_nv12_p010le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_nv12_p010le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bicubic_nv12_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f437F0000;
	mul.f32 	%f158, %f156, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	mul.lo.s16 	%rs3, %rs1, 257;
	and.b16  	%rs4, %rs3, -64;
	mul.lo.s16 	%rs5, %rs2, 257;
	and.b16  	%rs6, %rs5, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u16 	[%rd27], {%rs4, %rs6};
$L__BB199_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p_p010le
.visible .entry Subsample_Bicubic_yuv444p_p010le(
	.param .u64 Subsample_Bicubic_yuv444p_p010le_param_0,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_param_1,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_param_2,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_param_3,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_param_4,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_param_5,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_param_6,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_param_7,
	.param .u32 Subsample_Bicubic_yuv444p_p010le_param_8,
	.param .u32 Subsample_Bicubic_yuv444p_p010le_param_9,
	.param .u32 Subsample_Bicubic_yuv444p_p010le_param_10,
	.param .u32 Subsample_Bicubic_yuv444p_p010le_param_11,
	.param .u32 Subsample_Bicubic_yuv444p_p010le_param_12,
	.param .f32 Subsample_Bicubic_yuv444p_p010le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB200_2;
	bra.uni 	$L__BB200_1;
$L__BB200_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p_p010le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv444p_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.lo.s16 	%rs2, %rs1, 257;
	and.b16  	%rs3, %rs2, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs3;
$L__BB200_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p_p010le_uv
.visible .entry Subsample_Bicubic_yuv444p_p010le_uv(
	.param .u64 Subsample_Bicubic_yuv444p_p010le_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv444p_p010le_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv444p_p010le_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv444p_p010le_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv444p_p010le_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv444p_p010le_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv444p_p010le_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv444p_p010le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB201_2;
	bra.uni 	$L__BB201_1;
$L__BB201_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p_p010le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p_p010le_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Bicubic_yuv444p_p010le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv444p_p010le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f153;
	mul.lo.s16 	%rs2, %rs1, 257;
	and.b16  	%rs3, %rs2, -64;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f437F0000;
	cvt.rzi.u16.f32 	%rs4, %f190;
	mul.lo.s16 	%rs5, %rs4, 257;
	and.b16  	%rs6, %rs5, -64;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 2;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 2;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u16 	[%rd44], {%rs3, %rs6};
$L__BB201_2:
	ret;

}
	// .globl	Subsample_Bicubic_p010le_p010le
.visible .entry Subsample_Bicubic_p010le_p010le(
	.param .u64 Subsample_Bicubic_p010le_p010le_param_0,
	.param .u64 Subsample_Bicubic_p010le_p010le_param_1,
	.param .u64 Subsample_Bicubic_p010le_p010le_param_2,
	.param .u64 Subsample_Bicubic_p010le_p010le_param_3,
	.param .u64 Subsample_Bicubic_p010le_p010le_param_4,
	.param .u64 Subsample_Bicubic_p010le_p010le_param_5,
	.param .u64 Subsample_Bicubic_p010le_p010le_param_6,
	.param .u64 Subsample_Bicubic_p010le_p010le_param_7,
	.param .u32 Subsample_Bicubic_p010le_p010le_param_8,
	.param .u32 Subsample_Bicubic_p010le_p010le_param_9,
	.param .u32 Subsample_Bicubic_p010le_p010le_param_10,
	.param .u32 Subsample_Bicubic_p010le_p010le_param_11,
	.param .u32 Subsample_Bicubic_p010le_p010le_param_12,
	.param .f32 Subsample_Bicubic_p010le_p010le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p010le_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p010le_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB202_2;
	bra.uni 	$L__BB202_1;
$L__BB202_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p010le_p010le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p010le_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p010le_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p010le_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p010le_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p010le_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs1;
$L__BB202_2:
	ret;

}
	// .globl	Subsample_Bicubic_p010le_p010le_uv
.visible .entry Subsample_Bicubic_p010le_p010le_uv(
	.param .u64 Subsample_Bicubic_p010le_p010le_uv_param_0,
	.param .u64 Subsample_Bicubic_p010le_p010le_uv_param_1,
	.param .u64 Subsample_Bicubic_p010le_p010le_uv_param_2,
	.param .u64 Subsample_Bicubic_p010le_p010le_uv_param_3,
	.param .u64 Subsample_Bicubic_p010le_p010le_uv_param_4,
	.param .u64 Subsample_Bicubic_p010le_p010le_uv_param_5,
	.param .u64 Subsample_Bicubic_p010le_p010le_uv_param_6,
	.param .u64 Subsample_Bicubic_p010le_p010le_uv_param_7,
	.param .u32 Subsample_Bicubic_p010le_p010le_uv_param_8,
	.param .u32 Subsample_Bicubic_p010le_p010le_uv_param_9,
	.param .u32 Subsample_Bicubic_p010le_p010le_uv_param_10,
	.param .u32 Subsample_Bicubic_p010le_p010le_uv_param_11,
	.param .u32 Subsample_Bicubic_p010le_p010le_uv_param_12,
	.param .f32 Subsample_Bicubic_p010le_p010le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p010le_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p010le_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB203_2;
	bra.uni 	$L__BB203_1;
$L__BB203_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p010le_p010le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p010le_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p010le_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p010le_p010le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p010le_p010le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p010le_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f477FFF00;
	mul.f32 	%f158, %f156, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u16 	[%rd27], {%rs1, %rs2};
$L__BB203_2:
	ret;

}
	// .globl	Subsample_Bicubic_p016le_p010le
.visible .entry Subsample_Bicubic_p016le_p010le(
	.param .u64 Subsample_Bicubic_p016le_p010le_param_0,
	.param .u64 Subsample_Bicubic_p016le_p010le_param_1,
	.param .u64 Subsample_Bicubic_p016le_p010le_param_2,
	.param .u64 Subsample_Bicubic_p016le_p010le_param_3,
	.param .u64 Subsample_Bicubic_p016le_p010le_param_4,
	.param .u64 Subsample_Bicubic_p016le_p010le_param_5,
	.param .u64 Subsample_Bicubic_p016le_p010le_param_6,
	.param .u64 Subsample_Bicubic_p016le_p010le_param_7,
	.param .u32 Subsample_Bicubic_p016le_p010le_param_8,
	.param .u32 Subsample_Bicubic_p016le_p010le_param_9,
	.param .u32 Subsample_Bicubic_p016le_p010le_param_10,
	.param .u32 Subsample_Bicubic_p016le_p010le_param_11,
	.param .u32 Subsample_Bicubic_p016le_p010le_param_12,
	.param .f32 Subsample_Bicubic_p016le_p010le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p016le_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p016le_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB204_2;
	bra.uni 	$L__BB204_1;
$L__BB204_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p016le_p010le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p016le_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p016le_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p016le_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p016le_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p016le_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	and.b16  	%rs2, %rs1, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB204_2:
	ret;

}
	// .globl	Subsample_Bicubic_p016le_p010le_uv
.visible .entry Subsample_Bicubic_p016le_p010le_uv(
	.param .u64 Subsample_Bicubic_p016le_p010le_uv_param_0,
	.param .u64 Subsample_Bicubic_p016le_p010le_uv_param_1,
	.param .u64 Subsample_Bicubic_p016le_p010le_uv_param_2,
	.param .u64 Subsample_Bicubic_p016le_p010le_uv_param_3,
	.param .u64 Subsample_Bicubic_p016le_p010le_uv_param_4,
	.param .u64 Subsample_Bicubic_p016le_p010le_uv_param_5,
	.param .u64 Subsample_Bicubic_p016le_p010le_uv_param_6,
	.param .u64 Subsample_Bicubic_p016le_p010le_uv_param_7,
	.param .u32 Subsample_Bicubic_p016le_p010le_uv_param_8,
	.param .u32 Subsample_Bicubic_p016le_p010le_uv_param_9,
	.param .u32 Subsample_Bicubic_p016le_p010le_uv_param_10,
	.param .u32 Subsample_Bicubic_p016le_p010le_uv_param_11,
	.param .u32 Subsample_Bicubic_p016le_p010le_uv_param_12,
	.param .f32 Subsample_Bicubic_p016le_p010le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p016le_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p016le_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB205_2;
	bra.uni 	$L__BB205_1;
$L__BB205_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p016le_p010le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p016le_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p016le_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p016le_p010le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p016le_p010le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p016le_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f477FFF00;
	mul.f32 	%f158, %f156, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	and.b16  	%rs3, %rs1, -64;
	and.b16  	%rs4, %rs2, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u16 	[%rd27], {%rs3, %rs4};
$L__BB205_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p16le_p010le
.visible .entry Subsample_Bicubic_yuv444p16le_p010le(
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_param_0,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_param_1,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_param_2,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_param_3,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_param_4,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_param_5,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_param_6,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_param_7,
	.param .u32 Subsample_Bicubic_yuv444p16le_p010le_param_8,
	.param .u32 Subsample_Bicubic_yuv444p16le_p010le_param_9,
	.param .u32 Subsample_Bicubic_yuv444p16le_p010le_param_10,
	.param .u32 Subsample_Bicubic_yuv444p16le_p010le_param_11,
	.param .u32 Subsample_Bicubic_yuv444p16le_p010le_param_12,
	.param .f32 Subsample_Bicubic_yuv444p16le_p010le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p16le_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p16le_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB206_2;
	bra.uni 	$L__BB206_1;
$L__BB206_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p16le_p010le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p16le_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p16le_p010le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p16le_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p16le_p010le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv444p16le_p010le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	and.b16  	%rs2, %rs1, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB206_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p16le_p010le_uv
.visible .entry Subsample_Bicubic_yuv444p16le_p010le_uv(
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv444p16le_p010le_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv444p16le_p010le_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv444p16le_p010le_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv444p16le_p010le_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv444p16le_p010le_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv444p16le_p010le_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv444p16le_p010le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p16le_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p16le_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB207_2;
	bra.uni 	$L__BB207_1;
$L__BB207_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p16le_p010le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p16le_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p16le_p010le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p16le_p010le_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Bicubic_yuv444p16le_p010le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv444p16le_p010le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p16le_p010le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f153;
	and.b16  	%rs2, %rs1, -64;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs3, %f190;
	and.b16  	%rs4, %rs3, -64;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 2;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 2;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u16 	[%rd44], {%rs2, %rs4};
$L__BB207_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv420p_p016le
.visible .entry Subsample_Bicubic_yuv420p_p016le(
	.param .u64 Subsample_Bicubic_yuv420p_p016le_param_0,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_param_1,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_param_2,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_param_3,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_param_4,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_param_5,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_param_6,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_param_7,
	.param .u32 Subsample_Bicubic_yuv420p_p016le_param_8,
	.param .u32 Subsample_Bicubic_yuv420p_p016le_param_9,
	.param .u32 Subsample_Bicubic_yuv420p_p016le_param_10,
	.param .u32 Subsample_Bicubic_yuv420p_p016le_param_11,
	.param .u32 Subsample_Bicubic_yuv420p_p016le_param_12,
	.param .f32 Subsample_Bicubic_yuv420p_p016le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv420p_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv420p_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB208_2;
	bra.uni 	$L__BB208_1;
$L__BB208_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv420p_p016le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv420p_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv420p_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv420p_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv420p_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv420p_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB208_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv420p_p016le_uv
.visible .entry Subsample_Bicubic_yuv420p_p016le_uv(
	.param .u64 Subsample_Bicubic_yuv420p_p016le_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv420p_p016le_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv420p_p016le_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv420p_p016le_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv420p_p016le_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv420p_p016le_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv420p_p016le_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv420p_p016le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv420p_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv420p_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB209_2;
	bra.uni 	$L__BB209_1;
$L__BB209_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv420p_p016le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv420p_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv420p_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv420p_p016le_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Bicubic_yuv420p_p016le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv420p_p016le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv420p_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f153;
	mul.lo.s16 	%rs2, %rs1, 257;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f437F0000;
	cvt.rzi.u16.f32 	%rs3, %f190;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 2;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 2;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u16 	[%rd44], {%rs2, %rs4};
$L__BB209_2:
	ret;

}
	// .globl	Subsample_Bicubic_nv12_p016le
.visible .entry Subsample_Bicubic_nv12_p016le(
	.param .u64 Subsample_Bicubic_nv12_p016le_param_0,
	.param .u64 Subsample_Bicubic_nv12_p016le_param_1,
	.param .u64 Subsample_Bicubic_nv12_p016le_param_2,
	.param .u64 Subsample_Bicubic_nv12_p016le_param_3,
	.param .u64 Subsample_Bicubic_nv12_p016le_param_4,
	.param .u64 Subsample_Bicubic_nv12_p016le_param_5,
	.param .u64 Subsample_Bicubic_nv12_p016le_param_6,
	.param .u64 Subsample_Bicubic_nv12_p016le_param_7,
	.param .u32 Subsample_Bicubic_nv12_p016le_param_8,
	.param .u32 Subsample_Bicubic_nv12_p016le_param_9,
	.param .u32 Subsample_Bicubic_nv12_p016le_param_10,
	.param .u32 Subsample_Bicubic_nv12_p016le_param_11,
	.param .u32 Subsample_Bicubic_nv12_p016le_param_12,
	.param .f32 Subsample_Bicubic_nv12_p016le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_nv12_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_nv12_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB210_2;
	bra.uni 	$L__BB210_1;
$L__BB210_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_nv12_p016le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_nv12_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_nv12_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_nv12_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_nv12_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_nv12_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB210_2:
	ret;

}
	// .globl	Subsample_Bicubic_nv12_p016le_uv
.visible .entry Subsample_Bicubic_nv12_p016le_uv(
	.param .u64 Subsample_Bicubic_nv12_p016le_uv_param_0,
	.param .u64 Subsample_Bicubic_nv12_p016le_uv_param_1,
	.param .u64 Subsample_Bicubic_nv12_p016le_uv_param_2,
	.param .u64 Subsample_Bicubic_nv12_p016le_uv_param_3,
	.param .u64 Subsample_Bicubic_nv12_p016le_uv_param_4,
	.param .u64 Subsample_Bicubic_nv12_p016le_uv_param_5,
	.param .u64 Subsample_Bicubic_nv12_p016le_uv_param_6,
	.param .u64 Subsample_Bicubic_nv12_p016le_uv_param_7,
	.param .u32 Subsample_Bicubic_nv12_p016le_uv_param_8,
	.param .u32 Subsample_Bicubic_nv12_p016le_uv_param_9,
	.param .u32 Subsample_Bicubic_nv12_p016le_uv_param_10,
	.param .u32 Subsample_Bicubic_nv12_p016le_uv_param_11,
	.param .u32 Subsample_Bicubic_nv12_p016le_uv_param_12,
	.param .f32 Subsample_Bicubic_nv12_p016le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_nv12_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_nv12_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB211_2;
	bra.uni 	$L__BB211_1;
$L__BB211_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_nv12_p016le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_nv12_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_nv12_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_nv12_p016le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_nv12_p016le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bicubic_nv12_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f437F0000;
	mul.f32 	%f158, %f156, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	mul.lo.s16 	%rs3, %rs1, 257;
	mul.lo.s16 	%rs4, %rs2, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u16 	[%rd27], {%rs3, %rs4};
$L__BB211_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p_p016le
.visible .entry Subsample_Bicubic_yuv444p_p016le(
	.param .u64 Subsample_Bicubic_yuv444p_p016le_param_0,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_param_1,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_param_2,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_param_3,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_param_4,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_param_5,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_param_6,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_param_7,
	.param .u32 Subsample_Bicubic_yuv444p_p016le_param_8,
	.param .u32 Subsample_Bicubic_yuv444p_p016le_param_9,
	.param .u32 Subsample_Bicubic_yuv444p_p016le_param_10,
	.param .u32 Subsample_Bicubic_yuv444p_p016le_param_11,
	.param .u32 Subsample_Bicubic_yuv444p_p016le_param_12,
	.param .f32 Subsample_Bicubic_yuv444p_p016le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB212_2;
	bra.uni 	$L__BB212_1;
$L__BB212_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p_p016le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv444p_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB212_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p_p016le_uv
.visible .entry Subsample_Bicubic_yuv444p_p016le_uv(
	.param .u64 Subsample_Bicubic_yuv444p_p016le_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv444p_p016le_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv444p_p016le_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv444p_p016le_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv444p_p016le_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv444p_p016le_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv444p_p016le_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv444p_p016le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB213_2;
	bra.uni 	$L__BB213_1;
$L__BB213_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p_p016le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p_p016le_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Bicubic_yuv444p_p016le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv444p_p016le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f153;
	mul.lo.s16 	%rs2, %rs1, 257;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f437F0000;
	cvt.rzi.u16.f32 	%rs3, %f190;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 2;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 2;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u16 	[%rd44], {%rs2, %rs4};
$L__BB213_2:
	ret;

}
	// .globl	Subsample_Bicubic_p010le_p016le
.visible .entry Subsample_Bicubic_p010le_p016le(
	.param .u64 Subsample_Bicubic_p010le_p016le_param_0,
	.param .u64 Subsample_Bicubic_p010le_p016le_param_1,
	.param .u64 Subsample_Bicubic_p010le_p016le_param_2,
	.param .u64 Subsample_Bicubic_p010le_p016le_param_3,
	.param .u64 Subsample_Bicubic_p010le_p016le_param_4,
	.param .u64 Subsample_Bicubic_p010le_p016le_param_5,
	.param .u64 Subsample_Bicubic_p010le_p016le_param_6,
	.param .u64 Subsample_Bicubic_p010le_p016le_param_7,
	.param .u32 Subsample_Bicubic_p010le_p016le_param_8,
	.param .u32 Subsample_Bicubic_p010le_p016le_param_9,
	.param .u32 Subsample_Bicubic_p010le_p016le_param_10,
	.param .u32 Subsample_Bicubic_p010le_p016le_param_11,
	.param .u32 Subsample_Bicubic_p010le_p016le_param_12,
	.param .f32 Subsample_Bicubic_p010le_p016le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p010le_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p010le_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB214_2;
	bra.uni 	$L__BB214_1;
$L__BB214_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p010le_p016le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p010le_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p010le_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p010le_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p010le_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p010le_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	shr.u16 	%rs2, %rs1, 10;
	or.b16  	%rs3, %rs2, %rs1;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs3;
$L__BB214_2:
	ret;

}
	// .globl	Subsample_Bicubic_p010le_p016le_uv
.visible .entry Subsample_Bicubic_p010le_p016le_uv(
	.param .u64 Subsample_Bicubic_p010le_p016le_uv_param_0,
	.param .u64 Subsample_Bicubic_p010le_p016le_uv_param_1,
	.param .u64 Subsample_Bicubic_p010le_p016le_uv_param_2,
	.param .u64 Subsample_Bicubic_p010le_p016le_uv_param_3,
	.param .u64 Subsample_Bicubic_p010le_p016le_uv_param_4,
	.param .u64 Subsample_Bicubic_p010le_p016le_uv_param_5,
	.param .u64 Subsample_Bicubic_p010le_p016le_uv_param_6,
	.param .u64 Subsample_Bicubic_p010le_p016le_uv_param_7,
	.param .u32 Subsample_Bicubic_p010le_p016le_uv_param_8,
	.param .u32 Subsample_Bicubic_p010le_p016le_uv_param_9,
	.param .u32 Subsample_Bicubic_p010le_p016le_uv_param_10,
	.param .u32 Subsample_Bicubic_p010le_p016le_uv_param_11,
	.param .u32 Subsample_Bicubic_p010le_p016le_uv_param_12,
	.param .f32 Subsample_Bicubic_p010le_p016le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p010le_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p010le_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB215_2;
	bra.uni 	$L__BB215_1;
$L__BB215_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p010le_p016le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p010le_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p010le_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p010le_p016le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p010le_p016le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p010le_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f477FFF00;
	mul.f32 	%f158, %f156, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	shr.u16 	%rs3, %rs1, 10;
	or.b16  	%rs4, %rs3, %rs1;
	shr.u16 	%rs5, %rs2, 10;
	or.b16  	%rs6, %rs5, %rs2;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u16 	[%rd27], {%rs4, %rs6};
$L__BB215_2:
	ret;

}
	// .globl	Subsample_Bicubic_p016le_p016le
.visible .entry Subsample_Bicubic_p016le_p016le(
	.param .u64 Subsample_Bicubic_p016le_p016le_param_0,
	.param .u64 Subsample_Bicubic_p016le_p016le_param_1,
	.param .u64 Subsample_Bicubic_p016le_p016le_param_2,
	.param .u64 Subsample_Bicubic_p016le_p016le_param_3,
	.param .u64 Subsample_Bicubic_p016le_p016le_param_4,
	.param .u64 Subsample_Bicubic_p016le_p016le_param_5,
	.param .u64 Subsample_Bicubic_p016le_p016le_param_6,
	.param .u64 Subsample_Bicubic_p016le_p016le_param_7,
	.param .u32 Subsample_Bicubic_p016le_p016le_param_8,
	.param .u32 Subsample_Bicubic_p016le_p016le_param_9,
	.param .u32 Subsample_Bicubic_p016le_p016le_param_10,
	.param .u32 Subsample_Bicubic_p016le_p016le_param_11,
	.param .u32 Subsample_Bicubic_p016le_p016le_param_12,
	.param .f32 Subsample_Bicubic_p016le_p016le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p016le_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p016le_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB216_2;
	bra.uni 	$L__BB216_1;
$L__BB216_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p016le_p016le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p016le_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p016le_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p016le_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p016le_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p016le_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs1;
$L__BB216_2:
	ret;

}
	// .globl	Subsample_Bicubic_p016le_p016le_uv
.visible .entry Subsample_Bicubic_p016le_p016le_uv(
	.param .u64 Subsample_Bicubic_p016le_p016le_uv_param_0,
	.param .u64 Subsample_Bicubic_p016le_p016le_uv_param_1,
	.param .u64 Subsample_Bicubic_p016le_p016le_uv_param_2,
	.param .u64 Subsample_Bicubic_p016le_p016le_uv_param_3,
	.param .u64 Subsample_Bicubic_p016le_p016le_uv_param_4,
	.param .u64 Subsample_Bicubic_p016le_p016le_uv_param_5,
	.param .u64 Subsample_Bicubic_p016le_p016le_uv_param_6,
	.param .u64 Subsample_Bicubic_p016le_p016le_uv_param_7,
	.param .u32 Subsample_Bicubic_p016le_p016le_uv_param_8,
	.param .u32 Subsample_Bicubic_p016le_p016le_uv_param_9,
	.param .u32 Subsample_Bicubic_p016le_p016le_uv_param_10,
	.param .u32 Subsample_Bicubic_p016le_p016le_uv_param_11,
	.param .u32 Subsample_Bicubic_p016le_p016le_uv_param_12,
	.param .f32 Subsample_Bicubic_p016le_p016le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p016le_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p016le_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB217_2;
	bra.uni 	$L__BB217_1;
$L__BB217_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p016le_p016le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p016le_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p016le_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p016le_p016le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p016le_p016le_uv_param_1];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p016le_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f477FFF00;
	mul.f32 	%f158, %f156, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u16 	[%rd27], {%rs1, %rs2};
$L__BB217_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p16le_p016le
.visible .entry Subsample_Bicubic_yuv444p16le_p016le(
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_param_0,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_param_1,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_param_2,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_param_3,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_param_4,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_param_5,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_param_6,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_param_7,
	.param .u32 Subsample_Bicubic_yuv444p16le_p016le_param_8,
	.param .u32 Subsample_Bicubic_yuv444p16le_p016le_param_9,
	.param .u32 Subsample_Bicubic_yuv444p16le_p016le_param_10,
	.param .u32 Subsample_Bicubic_yuv444p16le_p016le_param_11,
	.param .u32 Subsample_Bicubic_yuv444p16le_p016le_param_12,
	.param .f32 Subsample_Bicubic_yuv444p16le_p016le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p16le_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p16le_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB218_2;
	bra.uni 	$L__BB218_1;
$L__BB218_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p16le_p016le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p16le_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p16le_p016le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p16le_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p16le_p016le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv444p16le_p016le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs1;
$L__BB218_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p16le_p016le_uv
.visible .entry Subsample_Bicubic_yuv444p16le_p016le_uv(
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv444p16le_p016le_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv444p16le_p016le_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv444p16le_p016le_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv444p16le_p016le_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv444p16le_p016le_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv444p16le_p016le_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv444p16le_p016le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p16le_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p16le_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB219_2;
	bra.uni 	$L__BB219_1;
$L__BB219_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p16le_p016le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p16le_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p16le_p016le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p16le_p016le_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Bicubic_yuv444p16le_p016le_uv_param_2];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv444p16le_p016le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p16le_p016le_uv_param_5];
	cvta.to.global.u64 	%rd1, %rd4;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f153;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs2, %f190;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 2;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 2;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u16 	[%rd44], {%rs1, %rs2};
$L__BB219_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv420p_yuv444p16le
.visible .entry Subsample_Bicubic_yuv420p_yuv444p16le(
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_param_0,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_param_1,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_param_2,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_param_3,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_param_4,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_param_5,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_param_6,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_param_7,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p16le_param_8,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p16le_param_9,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p16le_param_10,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p16le_param_11,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p16le_param_12,
	.param .f32 Subsample_Bicubic_yuv420p_yuv444p16le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv420p_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv420p_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB220_2;
	bra.uni 	$L__BB220_1;
$L__BB220_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv420p_yuv444p16le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv420p_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv420p_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv420p_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv420p_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv420p_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB220_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv420p_yuv444p16le_uv
.visible .entry Subsample_Bicubic_yuv420p_yuv444p16le_uv(
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<48>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB221_2;
	bra.uni 	$L__BB221_1;
$L__BB221_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd23, [Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bicubic_yuv420p_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd7, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd7, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd7, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd7, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd7, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd7, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd7, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd7, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd7, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd7, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd7, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd7, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f153;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd39, %r2;
	cvt.s64.s32 	%rd40, %r5;
	shr.u64 	%rd41, %rd40, 1;
	mul.lo.s64 	%rd42, %rd41, %rd39;
	cvt.s64.s32 	%rd43, %r1;
	add.s64 	%rd44, %rd42, %rd43;
	shl.b64 	%rd45, %rd44, 1;
	add.s64 	%rd46, %rd2, %rd45;
	st.global.u16 	[%rd46], %rs2;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd23, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd23, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd23, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd23, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd23, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd23, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd23, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd23, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd23, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd23, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd23, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd23, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd23, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd23, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd23, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd23, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f437F0000;
	cvt.rzi.u16.f32 	%rs3, %f190;
	mul.lo.s16 	%rs4, %rs3, 257;
	add.s64 	%rd47, %rd1, %rd45;
	st.global.u16 	[%rd47], %rs4;
$L__BB221_2:
	ret;

}
	// .globl	Subsample_Bicubic_nv12_yuv444p16le
.visible .entry Subsample_Bicubic_nv12_yuv444p16le(
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_param_0,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_param_1,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_param_2,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_param_3,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_param_4,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_param_5,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_param_6,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_param_7,
	.param .u32 Subsample_Bicubic_nv12_yuv444p16le_param_8,
	.param .u32 Subsample_Bicubic_nv12_yuv444p16le_param_9,
	.param .u32 Subsample_Bicubic_nv12_yuv444p16le_param_10,
	.param .u32 Subsample_Bicubic_nv12_yuv444p16le_param_11,
	.param .u32 Subsample_Bicubic_nv12_yuv444p16le_param_12,
	.param .f32 Subsample_Bicubic_nv12_yuv444p16le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_nv12_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_nv12_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB222_2;
	bra.uni 	$L__BB222_1;
$L__BB222_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_nv12_yuv444p16le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_nv12_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_nv12_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_nv12_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_nv12_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_nv12_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB222_2:
	ret;

}
	// .globl	Subsample_Bicubic_nv12_yuv444p16le_uv
.visible .entry Subsample_Bicubic_nv12_yuv444p16le_uv(
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Bicubic_nv12_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Bicubic_nv12_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Bicubic_nv12_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Bicubic_nv12_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Bicubic_nv12_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Bicubic_nv12_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Bicubic_nv12_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<31>;

	ld.param.u32 	%r4, [Subsample_Bicubic_nv12_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_nv12_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB223_2;
	bra.uni 	$L__BB223_1;
$L__BB223_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_nv12_yuv444p16le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_nv12_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_nv12_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_nv12_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bicubic_nv12_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_nv12_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bicubic_nv12_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f437F0000;
	mul.f32 	%f158, %f156, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	mul.lo.s16 	%rs3, %rs1, 257;
	cvt.s64.s32 	%rd22, %r2;
	cvt.s64.s32 	%rd23, %r5;
	shr.u64 	%rd24, %rd23, 1;
	mul.lo.s64 	%rd25, %rd24, %rd22;
	cvt.s64.s32 	%rd26, %r1;
	add.s64 	%rd27, %rd25, %rd26;
	shl.b64 	%rd28, %rd27, 1;
	add.s64 	%rd29, %rd2, %rd28;
	st.global.u16 	[%rd29], %rs3;
	mul.lo.s16 	%rs4, %rs2, 257;
	add.s64 	%rd30, %rd1, %rd28;
	st.global.u16 	[%rd30], %rs4;
$L__BB223_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p_yuv444p16le
.visible .entry Subsample_Bicubic_yuv444p_yuv444p16le(
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_param_0,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_param_1,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_param_2,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_param_3,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_param_4,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_param_5,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_param_6,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_param_7,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p16le_param_8,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p16le_param_9,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p16le_param_10,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p16le_param_11,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p16le_param_12,
	.param .f32 Subsample_Bicubic_yuv444p_yuv444p16le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB224_2;
	bra.uni 	$L__BB224_1;
$L__BB224_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p_yuv444p16le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv444p_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f121;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB224_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p_yuv444p16le_uv
.visible .entry Subsample_Bicubic_yuv444p_yuv444p16le_uv(
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<48>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB225_2;
	bra.uni 	$L__BB225_1;
$L__BB225_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd23, [Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bicubic_yuv444p_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd7, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd7, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd7, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd7, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd7, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd7, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd7, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd7, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd7, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd7, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd7, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd7, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f153;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd39, %r2;
	cvt.s64.s32 	%rd40, %r5;
	shr.u64 	%rd41, %rd40, 1;
	mul.lo.s64 	%rd42, %rd41, %rd39;
	cvt.s64.s32 	%rd43, %r1;
	add.s64 	%rd44, %rd42, %rd43;
	shl.b64 	%rd45, %rd44, 1;
	add.s64 	%rd46, %rd2, %rd45;
	st.global.u16 	[%rd46], %rs2;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd23, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd23, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd23, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd23, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd23, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd23, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd23, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd23, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd23, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd23, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd23, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd23, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd23, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd23, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd23, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd23, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f437F0000;
	cvt.rzi.u16.f32 	%rs3, %f190;
	mul.lo.s16 	%rs4, %rs3, 257;
	add.s64 	%rd47, %rd1, %rd45;
	st.global.u16 	[%rd47], %rs4;
$L__BB225_2:
	ret;

}
	// .globl	Subsample_Bicubic_p010le_yuv444p16le
.visible .entry Subsample_Bicubic_p010le_yuv444p16le(
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_param_0,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_param_1,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_param_2,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_param_3,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_param_4,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_param_5,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_param_6,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_param_7,
	.param .u32 Subsample_Bicubic_p010le_yuv444p16le_param_8,
	.param .u32 Subsample_Bicubic_p010le_yuv444p16le_param_9,
	.param .u32 Subsample_Bicubic_p010le_yuv444p16le_param_10,
	.param .u32 Subsample_Bicubic_p010le_yuv444p16le_param_11,
	.param .u32 Subsample_Bicubic_p010le_yuv444p16le_param_12,
	.param .f32 Subsample_Bicubic_p010le_yuv444p16le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p010le_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p010le_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB226_2;
	bra.uni 	$L__BB226_1;
$L__BB226_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p010le_yuv444p16le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p010le_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p010le_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p010le_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p010le_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p010le_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	shr.u16 	%rs2, %rs1, 10;
	or.b16  	%rs3, %rs2, %rs1;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs3;
$L__BB226_2:
	ret;

}
	// .globl	Subsample_Bicubic_p010le_yuv444p16le_uv
.visible .entry Subsample_Bicubic_p010le_yuv444p16le_uv(
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Bicubic_p010le_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Bicubic_p010le_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Bicubic_p010le_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Bicubic_p010le_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Bicubic_p010le_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Bicubic_p010le_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Bicubic_p010le_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<31>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p010le_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p010le_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB227_2;
	bra.uni 	$L__BB227_1;
$L__BB227_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p010le_yuv444p16le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p010le_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p010le_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p010le_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bicubic_p010le_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p010le_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bicubic_p010le_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f477FFF00;
	mul.f32 	%f158, %f156, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	shr.u16 	%rs3, %rs1, 10;
	or.b16  	%rs4, %rs3, %rs1;
	cvt.s64.s32 	%rd22, %r2;
	cvt.s64.s32 	%rd23, %r5;
	shr.u64 	%rd24, %rd23, 1;
	mul.lo.s64 	%rd25, %rd24, %rd22;
	cvt.s64.s32 	%rd26, %r1;
	add.s64 	%rd27, %rd25, %rd26;
	shl.b64 	%rd28, %rd27, 1;
	add.s64 	%rd29, %rd2, %rd28;
	st.global.u16 	[%rd29], %rs4;
	shr.u16 	%rs5, %rs2, 10;
	or.b16  	%rs6, %rs5, %rs2;
	add.s64 	%rd30, %rd1, %rd28;
	st.global.u16 	[%rd30], %rs6;
$L__BB227_2:
	ret;

}
	// .globl	Subsample_Bicubic_p016le_yuv444p16le
.visible .entry Subsample_Bicubic_p016le_yuv444p16le(
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_param_0,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_param_1,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_param_2,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_param_3,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_param_4,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_param_5,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_param_6,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_param_7,
	.param .u32 Subsample_Bicubic_p016le_yuv444p16le_param_8,
	.param .u32 Subsample_Bicubic_p016le_yuv444p16le_param_9,
	.param .u32 Subsample_Bicubic_p016le_yuv444p16le_param_10,
	.param .u32 Subsample_Bicubic_p016le_yuv444p16le_param_11,
	.param .u32 Subsample_Bicubic_p016le_yuv444p16le_param_12,
	.param .f32 Subsample_Bicubic_p016le_yuv444p16le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p016le_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p016le_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB228_2;
	bra.uni 	$L__BB228_1;
$L__BB228_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p016le_yuv444p16le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p016le_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p016le_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p016le_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p016le_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_p016le_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs1;
$L__BB228_2:
	ret;

}
	// .globl	Subsample_Bicubic_p016le_yuv444p16le_uv
.visible .entry Subsample_Bicubic_p016le_yuv444p16le_uv(
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Bicubic_p016le_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Bicubic_p016le_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Bicubic_p016le_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Bicubic_p016le_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Bicubic_p016le_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Bicubic_p016le_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Bicubic_p016le_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<159>;
	.reg .b64 	%rd<31>;

	ld.param.u32 	%r4, [Subsample_Bicubic_p016le_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_p016le_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB229_2;
	bra.uni 	$L__BB229_1;
$L__BB229_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_p016le_yuv444p16le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_p016le_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_p016le_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_p016le_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Bicubic_p016le_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd4, [Subsample_Bicubic_p016le_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd4;
	ld.param.u64 	%rd5, [Subsample_Bicubic_p016le_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd5;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r18;
	mov.b32 	%f86, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f87, %r22;
	mov.b32 	%f88, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f89, %r26;
	mov.b32 	%f90, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f91, %r30;
	mov.b32 	%f92, %r29;
	mul.f32 	%f93, %f62, %f88;
	mul.f32 	%f94, %f62, %f87;
	fma.rn.f32 	%f95, %f56, %f86, %f93;
	fma.rn.f32 	%f96, %f56, %f85, %f94;
	fma.rn.f32 	%f97, %f67, %f90, %f95;
	fma.rn.f32 	%f98, %f67, %f89, %f96;
	fma.rn.f32 	%f99, %f70, %f92, %f97;
	fma.rn.f32 	%f100, %f70, %f91, %f98;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f101, %r34;
	mov.b32 	%f102, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f103, %r38;
	mov.b32 	%f104, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f105, %r42;
	mov.b32 	%f106, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f107, %r46;
	mov.b32 	%f108, %r45;
	mul.f32 	%f109, %f62, %f104;
	mul.f32 	%f110, %f62, %f103;
	fma.rn.f32 	%f111, %f56, %f102, %f109;
	fma.rn.f32 	%f112, %f56, %f101, %f110;
	fma.rn.f32 	%f113, %f67, %f106, %f111;
	fma.rn.f32 	%f114, %f67, %f105, %f112;
	fma.rn.f32 	%f115, %f70, %f108, %f113;
	fma.rn.f32 	%f116, %f70, %f107, %f114;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f117, %r50;
	mov.b32 	%f118, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f119, %r54;
	mov.b32 	%f120, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f121, %r58;
	mov.b32 	%f122, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f123, %r62;
	mov.b32 	%f124, %r61;
	mul.f32 	%f125, %f62, %f120;
	mul.f32 	%f126, %f62, %f119;
	fma.rn.f32 	%f127, %f56, %f118, %f125;
	fma.rn.f32 	%f128, %f56, %f117, %f126;
	fma.rn.f32 	%f129, %f67, %f122, %f127;
	fma.rn.f32 	%f130, %f67, %f121, %f128;
	fma.rn.f32 	%f131, %f70, %f124, %f129;
	fma.rn.f32 	%f132, %f70, %f123, %f130;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f133, %r66;
	mov.b32 	%f134, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f135, %r70;
	mov.b32 	%f136, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f137, %r74;
	mov.b32 	%f138, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f139, %r78;
	mov.b32 	%f140, %r77;
	mul.f32 	%f141, %f62, %f136;
	mul.f32 	%f142, %f62, %f135;
	fma.rn.f32 	%f143, %f56, %f134, %f141;
	fma.rn.f32 	%f144, %f56, %f133, %f142;
	fma.rn.f32 	%f145, %f67, %f138, %f143;
	fma.rn.f32 	%f146, %f67, %f137, %f144;
	fma.rn.f32 	%f147, %f70, %f140, %f145;
	fma.rn.f32 	%f148, %f70, %f139, %f146;
	mul.f32 	%f149, %f77, %f115;
	mul.f32 	%f150, %f77, %f116;
	fma.rn.f32 	%f151, %f74, %f99, %f149;
	fma.rn.f32 	%f152, %f74, %f100, %f150;
	fma.rn.f32 	%f153, %f81, %f131, %f151;
	fma.rn.f32 	%f154, %f81, %f132, %f152;
	fma.rn.f32 	%f155, %f84, %f147, %f153;
	fma.rn.f32 	%f156, %f84, %f148, %f154;
	mul.f32 	%f157, %f155, 0f477FFF00;
	mul.f32 	%f158, %f156, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f157;
	cvt.rzi.u16.f32 	%rs2, %f158;
	cvt.s64.s32 	%rd22, %r2;
	cvt.s64.s32 	%rd23, %r5;
	shr.u64 	%rd24, %rd23, 1;
	mul.lo.s64 	%rd25, %rd24, %rd22;
	cvt.s64.s32 	%rd26, %r1;
	add.s64 	%rd27, %rd25, %rd26;
	shl.b64 	%rd28, %rd27, 1;
	add.s64 	%rd29, %rd2, %rd28;
	st.global.u16 	[%rd29], %rs1;
	add.s64 	%rd30, %rd1, %rd28;
	st.global.u16 	[%rd30], %rs2;
$L__BB229_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p16le_yuv444p16le
.visible .entry Subsample_Bicubic_yuv444p16le_yuv444p16le(
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_0,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_1,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_2,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_3,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_4,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_5,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_6,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_7,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_8,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_9,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_10,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_11,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_12,
	.param .f32 Subsample_Bicubic_yuv444p16le_yuv444p16le_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<122>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p16le_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p16le_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB230_2;
	bra.uni 	$L__BB230_1;
$L__BB230_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p16le_yuv444p16le_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p16le_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p16le_yuv444p16le_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p16le_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_yuv444p16le_yuv444p16le_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_yuv444p16le_yuv444p16le_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f86, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f87, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f88, %r29;
	mul.f32 	%f89, %f62, %f86;
	fma.rn.f32 	%f90, %f56, %f85, %f89;
	fma.rn.f32 	%f91, %f67, %f87, %f90;
	fma.rn.f32 	%f92, %f70, %f88, %f91;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f93, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f94, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f95, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f96, %r45;
	mul.f32 	%f97, %f62, %f94;
	fma.rn.f32 	%f98, %f56, %f93, %f97;
	fma.rn.f32 	%f99, %f67, %f95, %f98;
	fma.rn.f32 	%f100, %f70, %f96, %f99;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f101, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f102, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f103, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f104, %r61;
	mul.f32 	%f105, %f62, %f102;
	fma.rn.f32 	%f106, %f56, %f101, %f105;
	fma.rn.f32 	%f107, %f67, %f103, %f106;
	fma.rn.f32 	%f108, %f70, %f104, %f107;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f109, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f110, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f111, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f112, %r77;
	mul.f32 	%f113, %f62, %f110;
	fma.rn.f32 	%f114, %f56, %f109, %f113;
	fma.rn.f32 	%f115, %f67, %f111, %f114;
	fma.rn.f32 	%f116, %f70, %f112, %f115;
	mul.f32 	%f117, %f77, %f100;
	fma.rn.f32 	%f118, %f74, %f92, %f117;
	fma.rn.f32 	%f119, %f81, %f108, %f118;
	fma.rn.f32 	%f120, %f84, %f116, %f119;
	mul.f32 	%f121, %f120, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f121;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs1;
$L__BB230_2:
	ret;

}
	// .globl	Subsample_Bicubic_yuv444p16le_yuv444p16le_uv
.visible .entry Subsample_Bicubic_yuv444p16le_yuv444p16le_uv(
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<191>;
	.reg .b64 	%rd<48>;

	ld.param.u32 	%r4, [Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB231_2;
	bra.uni 	$L__BB231_1;
$L__BB231_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd23, [Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_2];
	ld.param.u64 	%rd7, [Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_1];
	ld.param.u64 	%rd5, [Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_6];
	cvta.to.global.u64 	%rd1, %rd5;
	ld.param.u64 	%rd6, [Subsample_Bicubic_yuv444p16le_yuv444p16le_uv_param_5];
	cvta.to.global.u64 	%rd2, %rd6;
	cvt.rn.f32.s32 	%f66, %r6;
	cvt.rn.f32.s32 	%f67, %r3;
	div.rn.f32 	%f68, %f66, %f67;
	cvt.rn.f32.s32 	%f69, %r7;
	cvt.rn.f32.s32 	%f70, %r4;
	div.rn.f32 	%f71, %f69, %f70;
	cvt.rn.f32.s32 	%f72, %r1;
	add.f32 	%f73, %f72, 0f3F000000;
	fma.rn.f32 	%f74, %f68, %f73, 0fBF000000;
	cvt.rn.f32.s32 	%f75, %r2;
	add.f32 	%f76, %f75, 0f3F000000;
	fma.rn.f32 	%f77, %f71, %f76, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f74;
	cvt.rmi.f32.f32 	%f11, %f77;
	sub.f32 	%f78, %f74, %f4;
	sub.f32 	%f79, %f77, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f80, %f1;
	selp.f32 	%f81, 0f00000000, %f80, %p4;
	add.f32 	%f82, %f78, 0f3F800000;
	mul.f32 	%f83, %f81, 0fC0A00000;
	fma.rn.f32 	%f84, %f81, %f82, %f83;
	mul.f32 	%f85, %f81, 0f41000000;
	fma.rn.f32 	%f86, %f82, %f84, %f85;
	mul.f32 	%f87, %f81, 0fC0800000;
	fma.rn.f32 	%f88, %f82, %f86, %f87;
	add.f32 	%f89, %f81, 0f40000000;
	add.f32 	%f90, %f81, 0f40400000;
	neg.f32 	%f91, %f90;
	fma.rn.f32 	%f92, %f89, %f78, %f91;
	mul.f32 	%f93, %f78, %f92;
	fma.rn.f32 	%f94, %f78, %f93, 0f3F800000;
	mov.f32 	%f95, 0f3F800000;
	sub.f32 	%f96, %f95, %f78;
	fma.rn.f32 	%f97, %f89, %f96, %f91;
	mul.f32 	%f98, %f96, %f97;
	fma.rn.f32 	%f99, %f96, %f98, 0f3F800000;
	sub.f32 	%f100, %f95, %f88;
	sub.f32 	%f101, %f100, %f94;
	sub.f32 	%f102, %f101, %f99;
	add.f32 	%f103, %f79, 0f3F800000;
	fma.rn.f32 	%f104, %f81, %f103, %f83;
	fma.rn.f32 	%f105, %f103, %f104, %f85;
	fma.rn.f32 	%f106, %f103, %f105, %f87;
	fma.rn.f32 	%f107, %f89, %f79, %f91;
	mul.f32 	%f108, %f79, %f107;
	fma.rn.f32 	%f109, %f79, %f108, 0f3F800000;
	sub.f32 	%f110, %f95, %f79;
	fma.rn.f32 	%f111, %f89, %f110, %f91;
	mul.f32 	%f112, %f110, %f111;
	fma.rn.f32 	%f113, %f110, %f112, 0f3F800000;
	sub.f32 	%f114, %f95, %f106;
	sub.f32 	%f115, %f114, %f109;
	sub.f32 	%f116, %f115, %f113;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd7, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f117, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd7, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f118, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd7, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f119, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd7, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f120, %r29;
	mul.f32 	%f121, %f94, %f118;
	fma.rn.f32 	%f122, %f88, %f117, %f121;
	fma.rn.f32 	%f123, %f99, %f119, %f122;
	fma.rn.f32 	%f124, %f102, %f120, %f123;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd7, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f125, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd7, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f126, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd7, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f127, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd7, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f128, %r45;
	mul.f32 	%f129, %f94, %f126;
	fma.rn.f32 	%f130, %f88, %f125, %f129;
	fma.rn.f32 	%f131, %f99, %f127, %f130;
	fma.rn.f32 	%f132, %f102, %f128, %f131;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd7, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f133, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd7, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f134, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd7, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f135, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd7, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f136, %r61;
	mul.f32 	%f137, %f94, %f134;
	fma.rn.f32 	%f138, %f88, %f133, %f137;
	fma.rn.f32 	%f139, %f99, %f135, %f138;
	fma.rn.f32 	%f140, %f102, %f136, %f139;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd7, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f141, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd7, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f142, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd7, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f143, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd7, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f144, %r77;
	mul.f32 	%f145, %f94, %f142;
	fma.rn.f32 	%f146, %f88, %f141, %f145;
	fma.rn.f32 	%f147, %f99, %f143, %f146;
	fma.rn.f32 	%f148, %f102, %f144, %f147;
	mul.f32 	%f149, %f109, %f132;
	fma.rn.f32 	%f150, %f106, %f124, %f149;
	fma.rn.f32 	%f151, %f113, %f140, %f150;
	fma.rn.f32 	%f152, %f116, %f148, %f151;
	mul.f32 	%f153, %f152, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f153;
	cvt.s64.s32 	%rd39, %r2;
	cvt.s64.s32 	%rd40, %r5;
	shr.u64 	%rd41, %rd40, 1;
	mul.lo.s64 	%rd42, %rd41, %rd39;
	cvt.s64.s32 	%rd43, %r1;
	add.s64 	%rd44, %rd42, %rd43;
	shl.b64 	%rd45, %rd44, 1;
	add.s64 	%rd46, %rd2, %rd45;
	st.global.u16 	[%rd46], %rs1;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd23, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f154, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd23, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f155, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd23, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f156, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd23, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f157, %r93;
	mul.f32 	%f158, %f94, %f155;
	fma.rn.f32 	%f159, %f88, %f154, %f158;
	fma.rn.f32 	%f160, %f99, %f156, %f159;
	fma.rn.f32 	%f161, %f102, %f157, %f160;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd23, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f162, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd23, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f163, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd23, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f164, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd23, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f165, %r109;
	mul.f32 	%f166, %f94, %f163;
	fma.rn.f32 	%f167, %f88, %f162, %f166;
	fma.rn.f32 	%f168, %f99, %f164, %f167;
	fma.rn.f32 	%f169, %f102, %f165, %f168;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd23, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f170, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd23, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f171, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd23, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f172, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd23, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f173, %r125;
	mul.f32 	%f174, %f94, %f171;
	fma.rn.f32 	%f175, %f88, %f170, %f174;
	fma.rn.f32 	%f176, %f99, %f172, %f175;
	fma.rn.f32 	%f177, %f102, %f173, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd23, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f178, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd23, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f179, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd23, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f180, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd23, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f181, %r141;
	mul.f32 	%f182, %f94, %f179;
	fma.rn.f32 	%f183, %f88, %f178, %f182;
	fma.rn.f32 	%f184, %f99, %f180, %f183;
	fma.rn.f32 	%f185, %f102, %f181, %f184;
	mul.f32 	%f186, %f109, %f169;
	fma.rn.f32 	%f187, %f106, %f161, %f186;
	fma.rn.f32 	%f188, %f113, %f177, %f187;
	fma.rn.f32 	%f189, %f116, %f185, %f188;
	mul.f32 	%f190, %f189, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs2, %f190;
	add.s64 	%rd47, %rd1, %rd45;
	st.global.u16 	[%rd47], %rs2;
$L__BB231_2:
	ret;

}
	// .globl	Subsample_Bicubic_bgr0_bgr0
.visible .entry Subsample_Bicubic_bgr0_bgr0(
	.param .u64 Subsample_Bicubic_bgr0_bgr0_param_0,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_param_1,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_param_2,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_param_3,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_param_4,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_param_5,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_param_6,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_param_7,
	.param .u32 Subsample_Bicubic_bgr0_bgr0_param_8,
	.param .u32 Subsample_Bicubic_bgr0_bgr0_param_9,
	.param .u32 Subsample_Bicubic_bgr0_bgr0_param_10,
	.param .u32 Subsample_Bicubic_bgr0_bgr0_param_11,
	.param .u32 Subsample_Bicubic_bgr0_bgr0_param_12,
	.param .f32 Subsample_Bicubic_bgr0_bgr0_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<233>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_bgr0_bgr0_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_bgr0_bgr0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB232_2;
	bra.uni 	$L__BB232_1;
$L__BB232_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_bgr0_bgr0_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_bgr0_bgr0_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_bgr0_bgr0_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_bgr0_bgr0_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_bgr0_bgr0_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_bgr0_bgr0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r20;
	mov.b32 	%f86, %r19;
	mov.b32 	%f87, %r18;
	mov.b32 	%f88, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f89, %r24;
	mov.b32 	%f90, %r23;
	mov.b32 	%f91, %r22;
	mov.b32 	%f92, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f93, %r28;
	mov.b32 	%f94, %r27;
	mov.b32 	%f95, %r26;
	mov.b32 	%f96, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f97, %r32;
	mov.b32 	%f98, %r31;
	mov.b32 	%f99, %r30;
	mov.b32 	%f100, %r29;
	mul.f32 	%f101, %f62, %f92;
	mul.f32 	%f102, %f62, %f91;
	mul.f32 	%f103, %f62, %f90;
	mul.f32 	%f104, %f62, %f89;
	fma.rn.f32 	%f105, %f56, %f88, %f101;
	fma.rn.f32 	%f106, %f56, %f87, %f102;
	fma.rn.f32 	%f107, %f56, %f86, %f103;
	fma.rn.f32 	%f108, %f56, %f85, %f104;
	fma.rn.f32 	%f109, %f67, %f96, %f105;
	fma.rn.f32 	%f110, %f67, %f95, %f106;
	fma.rn.f32 	%f111, %f67, %f94, %f107;
	fma.rn.f32 	%f112, %f67, %f93, %f108;
	fma.rn.f32 	%f113, %f70, %f100, %f109;
	fma.rn.f32 	%f114, %f70, %f99, %f110;
	fma.rn.f32 	%f115, %f70, %f98, %f111;
	fma.rn.f32 	%f116, %f70, %f97, %f112;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f117, %r36;
	mov.b32 	%f118, %r35;
	mov.b32 	%f119, %r34;
	mov.b32 	%f120, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f121, %r40;
	mov.b32 	%f122, %r39;
	mov.b32 	%f123, %r38;
	mov.b32 	%f124, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f125, %r44;
	mov.b32 	%f126, %r43;
	mov.b32 	%f127, %r42;
	mov.b32 	%f128, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f129, %r48;
	mov.b32 	%f130, %r47;
	mov.b32 	%f131, %r46;
	mov.b32 	%f132, %r45;
	mul.f32 	%f133, %f62, %f124;
	mul.f32 	%f134, %f62, %f123;
	mul.f32 	%f135, %f62, %f122;
	mul.f32 	%f136, %f62, %f121;
	fma.rn.f32 	%f137, %f56, %f120, %f133;
	fma.rn.f32 	%f138, %f56, %f119, %f134;
	fma.rn.f32 	%f139, %f56, %f118, %f135;
	fma.rn.f32 	%f140, %f56, %f117, %f136;
	fma.rn.f32 	%f141, %f67, %f128, %f137;
	fma.rn.f32 	%f142, %f67, %f127, %f138;
	fma.rn.f32 	%f143, %f67, %f126, %f139;
	fma.rn.f32 	%f144, %f67, %f125, %f140;
	fma.rn.f32 	%f145, %f70, %f132, %f141;
	fma.rn.f32 	%f146, %f70, %f131, %f142;
	fma.rn.f32 	%f147, %f70, %f130, %f143;
	fma.rn.f32 	%f148, %f70, %f129, %f144;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f149, %r52;
	mov.b32 	%f150, %r51;
	mov.b32 	%f151, %r50;
	mov.b32 	%f152, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f153, %r56;
	mov.b32 	%f154, %r55;
	mov.b32 	%f155, %r54;
	mov.b32 	%f156, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f157, %r60;
	mov.b32 	%f158, %r59;
	mov.b32 	%f159, %r58;
	mov.b32 	%f160, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f161, %r64;
	mov.b32 	%f162, %r63;
	mov.b32 	%f163, %r62;
	mov.b32 	%f164, %r61;
	mul.f32 	%f165, %f62, %f156;
	mul.f32 	%f166, %f62, %f155;
	mul.f32 	%f167, %f62, %f154;
	mul.f32 	%f168, %f62, %f153;
	fma.rn.f32 	%f169, %f56, %f152, %f165;
	fma.rn.f32 	%f170, %f56, %f151, %f166;
	fma.rn.f32 	%f171, %f56, %f150, %f167;
	fma.rn.f32 	%f172, %f56, %f149, %f168;
	fma.rn.f32 	%f173, %f67, %f160, %f169;
	fma.rn.f32 	%f174, %f67, %f159, %f170;
	fma.rn.f32 	%f175, %f67, %f158, %f171;
	fma.rn.f32 	%f176, %f67, %f157, %f172;
	fma.rn.f32 	%f177, %f70, %f164, %f173;
	fma.rn.f32 	%f178, %f70, %f163, %f174;
	fma.rn.f32 	%f179, %f70, %f162, %f175;
	fma.rn.f32 	%f180, %f70, %f161, %f176;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f181, %r68;
	mov.b32 	%f182, %r67;
	mov.b32 	%f183, %r66;
	mov.b32 	%f184, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f185, %r72;
	mov.b32 	%f186, %r71;
	mov.b32 	%f187, %r70;
	mov.b32 	%f188, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f189, %r76;
	mov.b32 	%f190, %r75;
	mov.b32 	%f191, %r74;
	mov.b32 	%f192, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f193, %r80;
	mov.b32 	%f194, %r79;
	mov.b32 	%f195, %r78;
	mov.b32 	%f196, %r77;
	mul.f32 	%f197, %f62, %f188;
	mul.f32 	%f198, %f62, %f187;
	mul.f32 	%f199, %f62, %f186;
	mul.f32 	%f200, %f62, %f185;
	fma.rn.f32 	%f201, %f56, %f184, %f197;
	fma.rn.f32 	%f202, %f56, %f183, %f198;
	fma.rn.f32 	%f203, %f56, %f182, %f199;
	fma.rn.f32 	%f204, %f56, %f181, %f200;
	fma.rn.f32 	%f205, %f67, %f192, %f201;
	fma.rn.f32 	%f206, %f67, %f191, %f202;
	fma.rn.f32 	%f207, %f67, %f190, %f203;
	fma.rn.f32 	%f208, %f67, %f189, %f204;
	fma.rn.f32 	%f209, %f70, %f196, %f205;
	fma.rn.f32 	%f210, %f70, %f195, %f206;
	fma.rn.f32 	%f211, %f70, %f194, %f207;
	fma.rn.f32 	%f212, %f70, %f193, %f208;
	mul.f32 	%f213, %f77, %f145;
	mul.f32 	%f214, %f77, %f146;
	mul.f32 	%f215, %f77, %f147;
	mul.f32 	%f216, %f77, %f148;
	fma.rn.f32 	%f217, %f74, %f113, %f213;
	fma.rn.f32 	%f218, %f74, %f114, %f214;
	fma.rn.f32 	%f219, %f74, %f115, %f215;
	fma.rn.f32 	%f220, %f74, %f116, %f216;
	fma.rn.f32 	%f221, %f81, %f177, %f217;
	fma.rn.f32 	%f222, %f81, %f178, %f218;
	fma.rn.f32 	%f223, %f81, %f179, %f219;
	fma.rn.f32 	%f224, %f81, %f180, %f220;
	fma.rn.f32 	%f225, %f84, %f209, %f221;
	fma.rn.f32 	%f226, %f84, %f210, %f222;
	fma.rn.f32 	%f227, %f84, %f211, %f223;
	fma.rn.f32 	%f228, %f84, %f212, %f224;
	mul.f32 	%f229, %f225, 0f437F0000;
	mul.f32 	%f230, %f226, 0f437F0000;
	mul.f32 	%f231, %f227, 0f437F0000;
	mul.f32 	%f232, %f228, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f229;
	cvt.rzi.u16.f32 	%rs2, %f230;
	cvt.rzi.u16.f32 	%rs3, %f231;
	cvt.rzi.u16.f32 	%rs4, %f232;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v4.u8 	[%rd27], {%rs1, %rs2, %rs3, %rs4};
$L__BB232_2:
	ret;

}
	// .globl	Subsample_Bicubic_bgr0_bgr0_uv
.visible .entry Subsample_Bicubic_bgr0_bgr0_uv(
	.param .u64 Subsample_Bicubic_bgr0_bgr0_uv_param_0,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_uv_param_1,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_uv_param_2,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_uv_param_3,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_uv_param_4,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_uv_param_5,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_uv_param_6,
	.param .u64 Subsample_Bicubic_bgr0_bgr0_uv_param_7,
	.param .u32 Subsample_Bicubic_bgr0_bgr0_uv_param_8,
	.param .u32 Subsample_Bicubic_bgr0_bgr0_uv_param_9,
	.param .u32 Subsample_Bicubic_bgr0_bgr0_uv_param_10,
	.param .u32 Subsample_Bicubic_bgr0_bgr0_uv_param_11,
	.param .u32 Subsample_Bicubic_bgr0_bgr0_uv_param_12,
	.param .f32 Subsample_Bicubic_bgr0_bgr0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Bicubic_rgb0_rgb0
.visible .entry Subsample_Bicubic_rgb0_rgb0(
	.param .u64 Subsample_Bicubic_rgb0_rgb0_param_0,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_param_1,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_param_2,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_param_3,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_param_4,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_param_5,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_param_6,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_param_7,
	.param .u32 Subsample_Bicubic_rgb0_rgb0_param_8,
	.param .u32 Subsample_Bicubic_rgb0_rgb0_param_9,
	.param .u32 Subsample_Bicubic_rgb0_rgb0_param_10,
	.param .u32 Subsample_Bicubic_rgb0_rgb0_param_11,
	.param .u32 Subsample_Bicubic_rgb0_rgb0_param_12,
	.param .f32 Subsample_Bicubic_rgb0_rgb0_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<233>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_rgb0_rgb0_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_rgb0_rgb0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB234_2;
	bra.uni 	$L__BB234_1;
$L__BB234_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_rgb0_rgb0_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_rgb0_rgb0_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_rgb0_rgb0_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_rgb0_rgb0_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_rgb0_rgb0_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_rgb0_rgb0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r20;
	mov.b32 	%f86, %r19;
	mov.b32 	%f87, %r18;
	mov.b32 	%f88, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f89, %r24;
	mov.b32 	%f90, %r23;
	mov.b32 	%f91, %r22;
	mov.b32 	%f92, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f93, %r28;
	mov.b32 	%f94, %r27;
	mov.b32 	%f95, %r26;
	mov.b32 	%f96, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f97, %r32;
	mov.b32 	%f98, %r31;
	mov.b32 	%f99, %r30;
	mov.b32 	%f100, %r29;
	mul.f32 	%f101, %f62, %f92;
	mul.f32 	%f102, %f62, %f91;
	mul.f32 	%f103, %f62, %f90;
	mul.f32 	%f104, %f62, %f89;
	fma.rn.f32 	%f105, %f56, %f88, %f101;
	fma.rn.f32 	%f106, %f56, %f87, %f102;
	fma.rn.f32 	%f107, %f56, %f86, %f103;
	fma.rn.f32 	%f108, %f56, %f85, %f104;
	fma.rn.f32 	%f109, %f67, %f96, %f105;
	fma.rn.f32 	%f110, %f67, %f95, %f106;
	fma.rn.f32 	%f111, %f67, %f94, %f107;
	fma.rn.f32 	%f112, %f67, %f93, %f108;
	fma.rn.f32 	%f113, %f70, %f100, %f109;
	fma.rn.f32 	%f114, %f70, %f99, %f110;
	fma.rn.f32 	%f115, %f70, %f98, %f111;
	fma.rn.f32 	%f116, %f70, %f97, %f112;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f117, %r36;
	mov.b32 	%f118, %r35;
	mov.b32 	%f119, %r34;
	mov.b32 	%f120, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f121, %r40;
	mov.b32 	%f122, %r39;
	mov.b32 	%f123, %r38;
	mov.b32 	%f124, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f125, %r44;
	mov.b32 	%f126, %r43;
	mov.b32 	%f127, %r42;
	mov.b32 	%f128, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f129, %r48;
	mov.b32 	%f130, %r47;
	mov.b32 	%f131, %r46;
	mov.b32 	%f132, %r45;
	mul.f32 	%f133, %f62, %f124;
	mul.f32 	%f134, %f62, %f123;
	mul.f32 	%f135, %f62, %f122;
	mul.f32 	%f136, %f62, %f121;
	fma.rn.f32 	%f137, %f56, %f120, %f133;
	fma.rn.f32 	%f138, %f56, %f119, %f134;
	fma.rn.f32 	%f139, %f56, %f118, %f135;
	fma.rn.f32 	%f140, %f56, %f117, %f136;
	fma.rn.f32 	%f141, %f67, %f128, %f137;
	fma.rn.f32 	%f142, %f67, %f127, %f138;
	fma.rn.f32 	%f143, %f67, %f126, %f139;
	fma.rn.f32 	%f144, %f67, %f125, %f140;
	fma.rn.f32 	%f145, %f70, %f132, %f141;
	fma.rn.f32 	%f146, %f70, %f131, %f142;
	fma.rn.f32 	%f147, %f70, %f130, %f143;
	fma.rn.f32 	%f148, %f70, %f129, %f144;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f149, %r52;
	mov.b32 	%f150, %r51;
	mov.b32 	%f151, %r50;
	mov.b32 	%f152, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f153, %r56;
	mov.b32 	%f154, %r55;
	mov.b32 	%f155, %r54;
	mov.b32 	%f156, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f157, %r60;
	mov.b32 	%f158, %r59;
	mov.b32 	%f159, %r58;
	mov.b32 	%f160, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f161, %r64;
	mov.b32 	%f162, %r63;
	mov.b32 	%f163, %r62;
	mov.b32 	%f164, %r61;
	mul.f32 	%f165, %f62, %f156;
	mul.f32 	%f166, %f62, %f155;
	mul.f32 	%f167, %f62, %f154;
	mul.f32 	%f168, %f62, %f153;
	fma.rn.f32 	%f169, %f56, %f152, %f165;
	fma.rn.f32 	%f170, %f56, %f151, %f166;
	fma.rn.f32 	%f171, %f56, %f150, %f167;
	fma.rn.f32 	%f172, %f56, %f149, %f168;
	fma.rn.f32 	%f173, %f67, %f160, %f169;
	fma.rn.f32 	%f174, %f67, %f159, %f170;
	fma.rn.f32 	%f175, %f67, %f158, %f171;
	fma.rn.f32 	%f176, %f67, %f157, %f172;
	fma.rn.f32 	%f177, %f70, %f164, %f173;
	fma.rn.f32 	%f178, %f70, %f163, %f174;
	fma.rn.f32 	%f179, %f70, %f162, %f175;
	fma.rn.f32 	%f180, %f70, %f161, %f176;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f181, %r68;
	mov.b32 	%f182, %r67;
	mov.b32 	%f183, %r66;
	mov.b32 	%f184, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f185, %r72;
	mov.b32 	%f186, %r71;
	mov.b32 	%f187, %r70;
	mov.b32 	%f188, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f189, %r76;
	mov.b32 	%f190, %r75;
	mov.b32 	%f191, %r74;
	mov.b32 	%f192, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f193, %r80;
	mov.b32 	%f194, %r79;
	mov.b32 	%f195, %r78;
	mov.b32 	%f196, %r77;
	mul.f32 	%f197, %f62, %f188;
	mul.f32 	%f198, %f62, %f187;
	mul.f32 	%f199, %f62, %f186;
	mul.f32 	%f200, %f62, %f185;
	fma.rn.f32 	%f201, %f56, %f184, %f197;
	fma.rn.f32 	%f202, %f56, %f183, %f198;
	fma.rn.f32 	%f203, %f56, %f182, %f199;
	fma.rn.f32 	%f204, %f56, %f181, %f200;
	fma.rn.f32 	%f205, %f67, %f192, %f201;
	fma.rn.f32 	%f206, %f67, %f191, %f202;
	fma.rn.f32 	%f207, %f67, %f190, %f203;
	fma.rn.f32 	%f208, %f67, %f189, %f204;
	fma.rn.f32 	%f209, %f70, %f196, %f205;
	fma.rn.f32 	%f210, %f70, %f195, %f206;
	fma.rn.f32 	%f211, %f70, %f194, %f207;
	fma.rn.f32 	%f212, %f70, %f193, %f208;
	mul.f32 	%f213, %f77, %f145;
	mul.f32 	%f214, %f77, %f146;
	mul.f32 	%f215, %f77, %f147;
	mul.f32 	%f216, %f77, %f148;
	fma.rn.f32 	%f217, %f74, %f113, %f213;
	fma.rn.f32 	%f218, %f74, %f114, %f214;
	fma.rn.f32 	%f219, %f74, %f115, %f215;
	fma.rn.f32 	%f220, %f74, %f116, %f216;
	fma.rn.f32 	%f221, %f81, %f177, %f217;
	fma.rn.f32 	%f222, %f81, %f178, %f218;
	fma.rn.f32 	%f223, %f81, %f179, %f219;
	fma.rn.f32 	%f224, %f81, %f180, %f220;
	fma.rn.f32 	%f225, %f84, %f209, %f221;
	fma.rn.f32 	%f226, %f84, %f210, %f222;
	fma.rn.f32 	%f227, %f84, %f211, %f223;
	fma.rn.f32 	%f228, %f84, %f212, %f224;
	mul.f32 	%f229, %f225, 0f437F0000;
	mul.f32 	%f230, %f226, 0f437F0000;
	mul.f32 	%f231, %f227, 0f437F0000;
	mul.f32 	%f232, %f228, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f229;
	cvt.rzi.u16.f32 	%rs2, %f230;
	cvt.rzi.u16.f32 	%rs3, %f231;
	cvt.rzi.u16.f32 	%rs4, %f232;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v4.u8 	[%rd27], {%rs1, %rs2, %rs3, %rs4};
$L__BB234_2:
	ret;

}
	// .globl	Subsample_Bicubic_rgb0_rgb0_uv
.visible .entry Subsample_Bicubic_rgb0_rgb0_uv(
	.param .u64 Subsample_Bicubic_rgb0_rgb0_uv_param_0,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_uv_param_1,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_uv_param_2,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_uv_param_3,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_uv_param_4,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_uv_param_5,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_uv_param_6,
	.param .u64 Subsample_Bicubic_rgb0_rgb0_uv_param_7,
	.param .u32 Subsample_Bicubic_rgb0_rgb0_uv_param_8,
	.param .u32 Subsample_Bicubic_rgb0_rgb0_uv_param_9,
	.param .u32 Subsample_Bicubic_rgb0_rgb0_uv_param_10,
	.param .u32 Subsample_Bicubic_rgb0_rgb0_uv_param_11,
	.param .u32 Subsample_Bicubic_rgb0_rgb0_uv_param_12,
	.param .f32 Subsample_Bicubic_rgb0_rgb0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Bicubic_bgr0_rgb0
.visible .entry Subsample_Bicubic_bgr0_rgb0(
	.param .u64 Subsample_Bicubic_bgr0_rgb0_param_0,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_param_1,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_param_2,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_param_3,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_param_4,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_param_5,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_param_6,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_param_7,
	.param .u32 Subsample_Bicubic_bgr0_rgb0_param_8,
	.param .u32 Subsample_Bicubic_bgr0_rgb0_param_9,
	.param .u32 Subsample_Bicubic_bgr0_rgb0_param_10,
	.param .u32 Subsample_Bicubic_bgr0_rgb0_param_11,
	.param .u32 Subsample_Bicubic_bgr0_rgb0_param_12,
	.param .f32 Subsample_Bicubic_bgr0_rgb0_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<233>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_bgr0_rgb0_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_bgr0_rgb0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB236_2;
	bra.uni 	$L__BB236_1;
$L__BB236_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_bgr0_rgb0_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_bgr0_rgb0_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_bgr0_rgb0_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_bgr0_rgb0_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_bgr0_rgb0_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_bgr0_rgb0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r20;
	mov.b32 	%f86, %r19;
	mov.b32 	%f87, %r18;
	mov.b32 	%f88, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f89, %r24;
	mov.b32 	%f90, %r23;
	mov.b32 	%f91, %r22;
	mov.b32 	%f92, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f93, %r28;
	mov.b32 	%f94, %r27;
	mov.b32 	%f95, %r26;
	mov.b32 	%f96, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f97, %r32;
	mov.b32 	%f98, %r31;
	mov.b32 	%f99, %r30;
	mov.b32 	%f100, %r29;
	mul.f32 	%f101, %f62, %f92;
	mul.f32 	%f102, %f62, %f91;
	mul.f32 	%f103, %f62, %f90;
	mul.f32 	%f104, %f62, %f89;
	fma.rn.f32 	%f105, %f56, %f88, %f101;
	fma.rn.f32 	%f106, %f56, %f87, %f102;
	fma.rn.f32 	%f107, %f56, %f86, %f103;
	fma.rn.f32 	%f108, %f56, %f85, %f104;
	fma.rn.f32 	%f109, %f67, %f96, %f105;
	fma.rn.f32 	%f110, %f67, %f95, %f106;
	fma.rn.f32 	%f111, %f67, %f94, %f107;
	fma.rn.f32 	%f112, %f67, %f93, %f108;
	fma.rn.f32 	%f113, %f70, %f100, %f109;
	fma.rn.f32 	%f114, %f70, %f99, %f110;
	fma.rn.f32 	%f115, %f70, %f98, %f111;
	fma.rn.f32 	%f116, %f70, %f97, %f112;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f117, %r36;
	mov.b32 	%f118, %r35;
	mov.b32 	%f119, %r34;
	mov.b32 	%f120, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f121, %r40;
	mov.b32 	%f122, %r39;
	mov.b32 	%f123, %r38;
	mov.b32 	%f124, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f125, %r44;
	mov.b32 	%f126, %r43;
	mov.b32 	%f127, %r42;
	mov.b32 	%f128, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f129, %r48;
	mov.b32 	%f130, %r47;
	mov.b32 	%f131, %r46;
	mov.b32 	%f132, %r45;
	mul.f32 	%f133, %f62, %f124;
	mul.f32 	%f134, %f62, %f123;
	mul.f32 	%f135, %f62, %f122;
	mul.f32 	%f136, %f62, %f121;
	fma.rn.f32 	%f137, %f56, %f120, %f133;
	fma.rn.f32 	%f138, %f56, %f119, %f134;
	fma.rn.f32 	%f139, %f56, %f118, %f135;
	fma.rn.f32 	%f140, %f56, %f117, %f136;
	fma.rn.f32 	%f141, %f67, %f128, %f137;
	fma.rn.f32 	%f142, %f67, %f127, %f138;
	fma.rn.f32 	%f143, %f67, %f126, %f139;
	fma.rn.f32 	%f144, %f67, %f125, %f140;
	fma.rn.f32 	%f145, %f70, %f132, %f141;
	fma.rn.f32 	%f146, %f70, %f131, %f142;
	fma.rn.f32 	%f147, %f70, %f130, %f143;
	fma.rn.f32 	%f148, %f70, %f129, %f144;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f149, %r52;
	mov.b32 	%f150, %r51;
	mov.b32 	%f151, %r50;
	mov.b32 	%f152, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f153, %r56;
	mov.b32 	%f154, %r55;
	mov.b32 	%f155, %r54;
	mov.b32 	%f156, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f157, %r60;
	mov.b32 	%f158, %r59;
	mov.b32 	%f159, %r58;
	mov.b32 	%f160, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f161, %r64;
	mov.b32 	%f162, %r63;
	mov.b32 	%f163, %r62;
	mov.b32 	%f164, %r61;
	mul.f32 	%f165, %f62, %f156;
	mul.f32 	%f166, %f62, %f155;
	mul.f32 	%f167, %f62, %f154;
	mul.f32 	%f168, %f62, %f153;
	fma.rn.f32 	%f169, %f56, %f152, %f165;
	fma.rn.f32 	%f170, %f56, %f151, %f166;
	fma.rn.f32 	%f171, %f56, %f150, %f167;
	fma.rn.f32 	%f172, %f56, %f149, %f168;
	fma.rn.f32 	%f173, %f67, %f160, %f169;
	fma.rn.f32 	%f174, %f67, %f159, %f170;
	fma.rn.f32 	%f175, %f67, %f158, %f171;
	fma.rn.f32 	%f176, %f67, %f157, %f172;
	fma.rn.f32 	%f177, %f70, %f164, %f173;
	fma.rn.f32 	%f178, %f70, %f163, %f174;
	fma.rn.f32 	%f179, %f70, %f162, %f175;
	fma.rn.f32 	%f180, %f70, %f161, %f176;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f181, %r68;
	mov.b32 	%f182, %r67;
	mov.b32 	%f183, %r66;
	mov.b32 	%f184, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f185, %r72;
	mov.b32 	%f186, %r71;
	mov.b32 	%f187, %r70;
	mov.b32 	%f188, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f189, %r76;
	mov.b32 	%f190, %r75;
	mov.b32 	%f191, %r74;
	mov.b32 	%f192, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f193, %r80;
	mov.b32 	%f194, %r79;
	mov.b32 	%f195, %r78;
	mov.b32 	%f196, %r77;
	mul.f32 	%f197, %f62, %f188;
	mul.f32 	%f198, %f62, %f187;
	mul.f32 	%f199, %f62, %f186;
	mul.f32 	%f200, %f62, %f185;
	fma.rn.f32 	%f201, %f56, %f184, %f197;
	fma.rn.f32 	%f202, %f56, %f183, %f198;
	fma.rn.f32 	%f203, %f56, %f182, %f199;
	fma.rn.f32 	%f204, %f56, %f181, %f200;
	fma.rn.f32 	%f205, %f67, %f192, %f201;
	fma.rn.f32 	%f206, %f67, %f191, %f202;
	fma.rn.f32 	%f207, %f67, %f190, %f203;
	fma.rn.f32 	%f208, %f67, %f189, %f204;
	fma.rn.f32 	%f209, %f70, %f196, %f205;
	fma.rn.f32 	%f210, %f70, %f195, %f206;
	fma.rn.f32 	%f211, %f70, %f194, %f207;
	fma.rn.f32 	%f212, %f70, %f193, %f208;
	mul.f32 	%f213, %f77, %f145;
	mul.f32 	%f214, %f77, %f146;
	mul.f32 	%f215, %f77, %f147;
	mul.f32 	%f216, %f77, %f148;
	fma.rn.f32 	%f217, %f74, %f113, %f213;
	fma.rn.f32 	%f218, %f74, %f114, %f214;
	fma.rn.f32 	%f219, %f74, %f115, %f215;
	fma.rn.f32 	%f220, %f74, %f116, %f216;
	fma.rn.f32 	%f221, %f81, %f177, %f217;
	fma.rn.f32 	%f222, %f81, %f178, %f218;
	fma.rn.f32 	%f223, %f81, %f179, %f219;
	fma.rn.f32 	%f224, %f81, %f180, %f220;
	fma.rn.f32 	%f225, %f84, %f209, %f221;
	fma.rn.f32 	%f226, %f84, %f210, %f222;
	fma.rn.f32 	%f227, %f84, %f211, %f223;
	fma.rn.f32 	%f228, %f84, %f212, %f224;
	mul.f32 	%f229, %f225, 0f437F0000;
	mul.f32 	%f230, %f226, 0f437F0000;
	mul.f32 	%f231, %f227, 0f437F0000;
	mul.f32 	%f232, %f228, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f229;
	cvt.rzi.u16.f32 	%rs2, %f230;
	cvt.rzi.u16.f32 	%rs3, %f231;
	cvt.rzi.u16.f32 	%rs4, %f232;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v4.u8 	[%rd27], {%rs3, %rs2, %rs1, %rs4};
$L__BB236_2:
	ret;

}
	// .globl	Subsample_Bicubic_bgr0_rgb0_uv
.visible .entry Subsample_Bicubic_bgr0_rgb0_uv(
	.param .u64 Subsample_Bicubic_bgr0_rgb0_uv_param_0,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_uv_param_1,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_uv_param_2,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_uv_param_3,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_uv_param_4,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_uv_param_5,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_uv_param_6,
	.param .u64 Subsample_Bicubic_bgr0_rgb0_uv_param_7,
	.param .u32 Subsample_Bicubic_bgr0_rgb0_uv_param_8,
	.param .u32 Subsample_Bicubic_bgr0_rgb0_uv_param_9,
	.param .u32 Subsample_Bicubic_bgr0_rgb0_uv_param_10,
	.param .u32 Subsample_Bicubic_bgr0_rgb0_uv_param_11,
	.param .u32 Subsample_Bicubic_bgr0_rgb0_uv_param_12,
	.param .f32 Subsample_Bicubic_bgr0_rgb0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Bicubic_rgb0_bgr0
.visible .entry Subsample_Bicubic_rgb0_bgr0(
	.param .u64 Subsample_Bicubic_rgb0_bgr0_param_0,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_param_1,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_param_2,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_param_3,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_param_4,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_param_5,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_param_6,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_param_7,
	.param .u32 Subsample_Bicubic_rgb0_bgr0_param_8,
	.param .u32 Subsample_Bicubic_rgb0_bgr0_param_9,
	.param .u32 Subsample_Bicubic_rgb0_bgr0_param_10,
	.param .u32 Subsample_Bicubic_rgb0_bgr0_param_11,
	.param .u32 Subsample_Bicubic_rgb0_bgr0_param_12,
	.param .f32 Subsample_Bicubic_rgb0_bgr0_param_13
)
{
	.reg .pred 	%p<5>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<233>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Bicubic_rgb0_bgr0_param_9];
	ld.param.u32 	%r3, [Subsample_Bicubic_rgb0_bgr0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB238_2;
	bra.uni 	$L__BB238_1;
$L__BB238_1:
	ld.param.f32 	%f1, [Subsample_Bicubic_rgb0_bgr0_param_13];
	ld.param.u32 	%r7, [Subsample_Bicubic_rgb0_bgr0_param_12];
	ld.param.u32 	%r6, [Subsample_Bicubic_rgb0_bgr0_param_11];
	ld.param.u32 	%r5, [Subsample_Bicubic_rgb0_bgr0_param_10];
	ld.param.u64 	%rd4, [Subsample_Bicubic_rgb0_bgr0_param_0];
	ld.param.u64 	%rd3, [Subsample_Bicubic_rgb0_bgr0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	cvt.rn.f32.s32 	%f34, %r6;
	cvt.rn.f32.s32 	%f35, %r3;
	div.rn.f32 	%f36, %f34, %f35;
	cvt.rn.f32.s32 	%f37, %r7;
	cvt.rn.f32.s32 	%f38, %r4;
	div.rn.f32 	%f39, %f37, %f38;
	cvt.rn.f32.s32 	%f40, %r1;
	add.f32 	%f41, %f40, 0f3F000000;
	fma.rn.f32 	%f42, %f36, %f41, 0fBF000000;
	cvt.rn.f32.s32 	%f43, %r2;
	add.f32 	%f44, %f43, 0f3F000000;
	fma.rn.f32 	%f45, %f39, %f44, 0fBF000000;
	cvt.rmi.f32.f32 	%f4, %f42;
	cvt.rmi.f32.f32 	%f11, %f45;
	sub.f32 	%f46, %f42, %f4;
	sub.f32 	%f47, %f45, %f11;
	setp.eq.f32 	%p4, %f1, 0f497423F0;
	neg.f32 	%f48, %f1;
	selp.f32 	%f49, 0f00000000, %f48, %p4;
	add.f32 	%f50, %f46, 0f3F800000;
	mul.f32 	%f51, %f49, 0fC0A00000;
	fma.rn.f32 	%f52, %f49, %f50, %f51;
	mul.f32 	%f53, %f49, 0f41000000;
	fma.rn.f32 	%f54, %f50, %f52, %f53;
	mul.f32 	%f55, %f49, 0fC0800000;
	fma.rn.f32 	%f56, %f50, %f54, %f55;
	add.f32 	%f57, %f49, 0f40000000;
	add.f32 	%f58, %f49, 0f40400000;
	neg.f32 	%f59, %f58;
	fma.rn.f32 	%f60, %f57, %f46, %f59;
	mul.f32 	%f61, %f46, %f60;
	fma.rn.f32 	%f62, %f46, %f61, 0f3F800000;
	mov.f32 	%f63, 0f3F800000;
	sub.f32 	%f64, %f63, %f46;
	fma.rn.f32 	%f65, %f57, %f64, %f59;
	mul.f32 	%f66, %f64, %f65;
	fma.rn.f32 	%f67, %f64, %f66, 0f3F800000;
	sub.f32 	%f68, %f63, %f56;
	sub.f32 	%f69, %f68, %f62;
	sub.f32 	%f70, %f69, %f67;
	add.f32 	%f71, %f47, 0f3F800000;
	fma.rn.f32 	%f72, %f49, %f71, %f51;
	fma.rn.f32 	%f73, %f71, %f72, %f53;
	fma.rn.f32 	%f74, %f71, %f73, %f55;
	fma.rn.f32 	%f75, %f57, %f47, %f59;
	mul.f32 	%f76, %f47, %f75;
	fma.rn.f32 	%f77, %f47, %f76, 0f3F800000;
	sub.f32 	%f78, %f63, %f47;
	fma.rn.f32 	%f79, %f57, %f78, %f59;
	mul.f32 	%f80, %f78, %f79;
	fma.rn.f32 	%f81, %f78, %f80, 0f3F800000;
	sub.f32 	%f82, %f63, %f74;
	sub.f32 	%f83, %f82, %f77;
	sub.f32 	%f84, %f83, %f81;
	add.f32 	%f2, %f4, 0fBF800000;
	add.f32 	%f3, %f11, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f2, %f3}];
	// end inline asm
	mov.b32 	%f85, %r20;
	mov.b32 	%f86, %r19;
	mov.b32 	%f87, %r18;
	mov.b32 	%f88, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f4, %f3}];
	// end inline asm
	mov.b32 	%f89, %r24;
	mov.b32 	%f90, %r23;
	mov.b32 	%f91, %r22;
	mov.b32 	%f92, %r21;
	add.f32 	%f6, %f4, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f6, %f3}];
	// end inline asm
	mov.b32 	%f93, %r28;
	mov.b32 	%f94, %r27;
	mov.b32 	%f95, %r26;
	mov.b32 	%f96, %r25;
	add.f32 	%f8, %f4, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f8, %f3}];
	// end inline asm
	mov.b32 	%f97, %r32;
	mov.b32 	%f98, %r31;
	mov.b32 	%f99, %r30;
	mov.b32 	%f100, %r29;
	mul.f32 	%f101, %f62, %f92;
	mul.f32 	%f102, %f62, %f91;
	mul.f32 	%f103, %f62, %f90;
	mul.f32 	%f104, %f62, %f89;
	fma.rn.f32 	%f105, %f56, %f88, %f101;
	fma.rn.f32 	%f106, %f56, %f87, %f102;
	fma.rn.f32 	%f107, %f56, %f86, %f103;
	fma.rn.f32 	%f108, %f56, %f85, %f104;
	fma.rn.f32 	%f109, %f67, %f96, %f105;
	fma.rn.f32 	%f110, %f67, %f95, %f106;
	fma.rn.f32 	%f111, %f67, %f94, %f107;
	fma.rn.f32 	%f112, %f67, %f93, %f108;
	fma.rn.f32 	%f113, %f70, %f100, %f109;
	fma.rn.f32 	%f114, %f70, %f99, %f110;
	fma.rn.f32 	%f115, %f70, %f98, %f111;
	fma.rn.f32 	%f116, %f70, %f97, %f112;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f2, %f11}];
	// end inline asm
	mov.b32 	%f117, %r36;
	mov.b32 	%f118, %r35;
	mov.b32 	%f119, %r34;
	mov.b32 	%f120, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f4, %f11}];
	// end inline asm
	mov.b32 	%f121, %r40;
	mov.b32 	%f122, %r39;
	mov.b32 	%f123, %r38;
	mov.b32 	%f124, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f6, %f11}];
	// end inline asm
	mov.b32 	%f125, %r44;
	mov.b32 	%f126, %r43;
	mov.b32 	%f127, %r42;
	mov.b32 	%f128, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f8, %f11}];
	// end inline asm
	mov.b32 	%f129, %r48;
	mov.b32 	%f130, %r47;
	mov.b32 	%f131, %r46;
	mov.b32 	%f132, %r45;
	mul.f32 	%f133, %f62, %f124;
	mul.f32 	%f134, %f62, %f123;
	mul.f32 	%f135, %f62, %f122;
	mul.f32 	%f136, %f62, %f121;
	fma.rn.f32 	%f137, %f56, %f120, %f133;
	fma.rn.f32 	%f138, %f56, %f119, %f134;
	fma.rn.f32 	%f139, %f56, %f118, %f135;
	fma.rn.f32 	%f140, %f56, %f117, %f136;
	fma.rn.f32 	%f141, %f67, %f128, %f137;
	fma.rn.f32 	%f142, %f67, %f127, %f138;
	fma.rn.f32 	%f143, %f67, %f126, %f139;
	fma.rn.f32 	%f144, %f67, %f125, %f140;
	fma.rn.f32 	%f145, %f70, %f132, %f141;
	fma.rn.f32 	%f146, %f70, %f131, %f142;
	fma.rn.f32 	%f147, %f70, %f130, %f143;
	fma.rn.f32 	%f148, %f70, %f129, %f144;
	add.f32 	%f19, %f11, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f2, %f19}];
	// end inline asm
	mov.b32 	%f149, %r52;
	mov.b32 	%f150, %r51;
	mov.b32 	%f151, %r50;
	mov.b32 	%f152, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f4, %f19}];
	// end inline asm
	mov.b32 	%f153, %r56;
	mov.b32 	%f154, %r55;
	mov.b32 	%f155, %r54;
	mov.b32 	%f156, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f6, %f19}];
	// end inline asm
	mov.b32 	%f157, %r60;
	mov.b32 	%f158, %r59;
	mov.b32 	%f159, %r58;
	mov.b32 	%f160, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f8, %f19}];
	// end inline asm
	mov.b32 	%f161, %r64;
	mov.b32 	%f162, %r63;
	mov.b32 	%f163, %r62;
	mov.b32 	%f164, %r61;
	mul.f32 	%f165, %f62, %f156;
	mul.f32 	%f166, %f62, %f155;
	mul.f32 	%f167, %f62, %f154;
	mul.f32 	%f168, %f62, %f153;
	fma.rn.f32 	%f169, %f56, %f152, %f165;
	fma.rn.f32 	%f170, %f56, %f151, %f166;
	fma.rn.f32 	%f171, %f56, %f150, %f167;
	fma.rn.f32 	%f172, %f56, %f149, %f168;
	fma.rn.f32 	%f173, %f67, %f160, %f169;
	fma.rn.f32 	%f174, %f67, %f159, %f170;
	fma.rn.f32 	%f175, %f67, %f158, %f171;
	fma.rn.f32 	%f176, %f67, %f157, %f172;
	fma.rn.f32 	%f177, %f70, %f164, %f173;
	fma.rn.f32 	%f178, %f70, %f163, %f174;
	fma.rn.f32 	%f179, %f70, %f162, %f175;
	fma.rn.f32 	%f180, %f70, %f161, %f176;
	add.f32 	%f27, %f11, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f2, %f27}];
	// end inline asm
	mov.b32 	%f181, %r68;
	mov.b32 	%f182, %r67;
	mov.b32 	%f183, %r66;
	mov.b32 	%f184, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f4, %f27}];
	// end inline asm
	mov.b32 	%f185, %r72;
	mov.b32 	%f186, %r71;
	mov.b32 	%f187, %r70;
	mov.b32 	%f188, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f6, %f27}];
	// end inline asm
	mov.b32 	%f189, %r76;
	mov.b32 	%f190, %r75;
	mov.b32 	%f191, %r74;
	mov.b32 	%f192, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f8, %f27}];
	// end inline asm
	mov.b32 	%f193, %r80;
	mov.b32 	%f194, %r79;
	mov.b32 	%f195, %r78;
	mov.b32 	%f196, %r77;
	mul.f32 	%f197, %f62, %f188;
	mul.f32 	%f198, %f62, %f187;
	mul.f32 	%f199, %f62, %f186;
	mul.f32 	%f200, %f62, %f185;
	fma.rn.f32 	%f201, %f56, %f184, %f197;
	fma.rn.f32 	%f202, %f56, %f183, %f198;
	fma.rn.f32 	%f203, %f56, %f182, %f199;
	fma.rn.f32 	%f204, %f56, %f181, %f200;
	fma.rn.f32 	%f205, %f67, %f192, %f201;
	fma.rn.f32 	%f206, %f67, %f191, %f202;
	fma.rn.f32 	%f207, %f67, %f190, %f203;
	fma.rn.f32 	%f208, %f67, %f189, %f204;
	fma.rn.f32 	%f209, %f70, %f196, %f205;
	fma.rn.f32 	%f210, %f70, %f195, %f206;
	fma.rn.f32 	%f211, %f70, %f194, %f207;
	fma.rn.f32 	%f212, %f70, %f193, %f208;
	mul.f32 	%f213, %f77, %f145;
	mul.f32 	%f214, %f77, %f146;
	mul.f32 	%f215, %f77, %f147;
	mul.f32 	%f216, %f77, %f148;
	fma.rn.f32 	%f217, %f74, %f113, %f213;
	fma.rn.f32 	%f218, %f74, %f114, %f214;
	fma.rn.f32 	%f219, %f74, %f115, %f215;
	fma.rn.f32 	%f220, %f74, %f116, %f216;
	fma.rn.f32 	%f221, %f81, %f177, %f217;
	fma.rn.f32 	%f222, %f81, %f178, %f218;
	fma.rn.f32 	%f223, %f81, %f179, %f219;
	fma.rn.f32 	%f224, %f81, %f180, %f220;
	fma.rn.f32 	%f225, %f84, %f209, %f221;
	fma.rn.f32 	%f226, %f84, %f210, %f222;
	fma.rn.f32 	%f227, %f84, %f211, %f223;
	fma.rn.f32 	%f228, %f84, %f212, %f224;
	mul.f32 	%f229, %f225, 0f437F0000;
	mul.f32 	%f230, %f226, 0f437F0000;
	mul.f32 	%f231, %f227, 0f437F0000;
	mul.f32 	%f232, %f228, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f229;
	cvt.rzi.u16.f32 	%rs2, %f230;
	cvt.rzi.u16.f32 	%rs3, %f231;
	cvt.rzi.u16.f32 	%rs4, %f232;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v4.u8 	[%rd27], {%rs3, %rs2, %rs1, %rs4};
$L__BB238_2:
	ret;

}
	// .globl	Subsample_Bicubic_rgb0_bgr0_uv
.visible .entry Subsample_Bicubic_rgb0_bgr0_uv(
	.param .u64 Subsample_Bicubic_rgb0_bgr0_uv_param_0,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_uv_param_1,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_uv_param_2,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_uv_param_3,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_uv_param_4,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_uv_param_5,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_uv_param_6,
	.param .u64 Subsample_Bicubic_rgb0_bgr0_uv_param_7,
	.param .u32 Subsample_Bicubic_rgb0_bgr0_uv_param_8,
	.param .u32 Subsample_Bicubic_rgb0_bgr0_uv_param_9,
	.param .u32 Subsample_Bicubic_rgb0_bgr0_uv_param_10,
	.param .u32 Subsample_Bicubic_rgb0_bgr0_uv_param_11,
	.param .u32 Subsample_Bicubic_rgb0_bgr0_uv_param_12,
	.param .f32 Subsample_Bicubic_rgb0_bgr0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Lanczos_yuv420p_yuv420p
.visible .entry Subsample_Lanczos_yuv420p_yuv420p(
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_param_0,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_param_1,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_param_2,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_param_3,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_param_4,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_param_5,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_param_6,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_param_7,
	.param .u32 Subsample_Lanczos_yuv420p_yuv420p_param_8,
	.param .u32 Subsample_Lanczos_yuv420p_yuv420p_param_9,
	.param .u32 Subsample_Lanczos_yuv420p_yuv420p_param_10,
	.param .u32 Subsample_Lanczos_yuv420p_yuv420p_param_11,
	.param .u32 Subsample_Lanczos_yuv420p_yuv420p_param_12,
	.param .f32 Subsample_Lanczos_yuv420p_yuv420p_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv420p_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv420p_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB240_18;
	bra.uni 	$L__BB240_1;
$L__BB240_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv420p_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv420p_yuv420p_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB240_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB240_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB240_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB240_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB240_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB240_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB240_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB240_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB240_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB240_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB240_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB240_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv420p_yuv420p_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB240_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB240_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv420p_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv420p_yuv420p_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB240_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB240_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB240_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv420p_yuv420p_uv
.visible .entry Subsample_Lanczos_yuv420p_yuv420p_uv(
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv420p_yuv420p_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv420p_yuv420p_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv420p_yuv420p_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv420p_yuv420p_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv420p_yuv420p_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv420p_yuv420p_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv420p_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<339>;
	.reg .b64 	%rd<44>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv420p_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv420p_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB241_34;
	bra.uni 	$L__BB241_1;
$L__BB241_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv420p_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv420p_yuv420p_uv_param_11];
	cvt.rn.f32.s32 	%f67, %r6;
	cvt.rn.f32.s32 	%f68, %r3;
	div.rn.f32 	%f69, %f67, %f68;
	cvt.rn.f32.s32 	%f70, %r7;
	cvt.rn.f32.s32 	%f71, %r4;
	div.rn.f32 	%f72, %f70, %f71;
	cvt.rn.f32.s32 	%f73, %r1;
	add.f32 	%f74, %f73, 0f3F000000;
	fma.rn.f32 	%f75, %f69, %f74, 0fBF000000;
	cvt.rn.f32.s32 	%f76, %r2;
	add.f32 	%f77, %f76, 0f3F000000;
	cvt.rmi.f32.f32 	%f242, %f75;
	sub.f32 	%f79, %f75, %f242;
	add.f32 	%f80, %f79, 0f3F800000;
	mul.f32 	%f4, %f80, 0f40490FDB;
	mul.f32 	%f5, %f79, 0f40490FDB;
	add.f32 	%f81, %f79, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f83, %f4, %f4;
	mul.f32 	%f9, %f83, 0f3F000000;
	mov.f32 	%f338, 0f3F800000;
	mov.f32 	%f323, %f338;
	@%p4 bra 	$L__BB241_3;
	sin.approx.f32 	%f84, %f4;
	sin.approx.f32 	%f85, %f8;
	mul.f32 	%f86, %f84, %f85;
	div.rn.f32 	%f323, %f86, %f9;
$L__BB241_3:
	fma.rn.f32 	%f78, %f72, %f77, 0fBF000000;
	add.f32 	%f82, %f79, 0fC0000000;
	mul.f32 	%f6, %f81, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f88, %f5, %f5;
	mul.f32 	%f13, %f88, 0f3F000000;
	mov.f32 	%f324, %f338;
	@%p5 bra 	$L__BB241_5;
	sin.approx.f32 	%f89, %f5;
	sin.approx.f32 	%f90, %f12;
	mul.f32 	%f91, %f89, %f90;
	div.rn.f32 	%f324, %f91, %f13;
$L__BB241_5:
	cvt.rmi.f32.f32 	%f249, %f78;
	mul.f32 	%f7, %f82, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f93, %f6, %f6;
	mul.f32 	%f17, %f93, 0f3F000000;
	mov.f32 	%f325, %f338;
	@%p6 bra 	$L__BB241_7;
	sin.approx.f32 	%f94, %f6;
	sin.approx.f32 	%f95, %f16;
	mul.f32 	%f96, %f94, %f95;
	div.rn.f32 	%f325, %f96, %f17;
$L__BB241_7:
	sub.f32 	%f3, %f78, %f249;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f98, %f7, %f7;
	mul.f32 	%f21, %f98, 0f3F000000;
	mov.f32 	%f326, %f338;
	@%p7 bra 	$L__BB241_9;
	sin.approx.f32 	%f99, %f7;
	sin.approx.f32 	%f100, %f20;
	mul.f32 	%f101, %f99, %f100;
	div.rn.f32 	%f326, %f101, %f21;
$L__BB241_9:
	add.f32 	%f103, %f3, 0f3F800000;
	mul.f32 	%f24, %f103, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f104, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f106, %f24, %f24;
	mul.f32 	%f29, %f106, 0f3F000000;
	mov.f32 	%f327, %f338;
	@%p8 bra 	$L__BB241_11;
	sin.approx.f32 	%f107, %f24;
	sin.approx.f32 	%f108, %f28;
	mul.f32 	%f109, %f107, %f108;
	div.rn.f32 	%f327, %f109, %f29;
$L__BB241_11:
	add.f32 	%f105, %f3, 0fC0000000;
	mul.f32 	%f26, %f104, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f111, %f25, %f25;
	mul.f32 	%f33, %f111, 0f3F000000;
	mov.f32 	%f328, %f338;
	@%p9 bra 	$L__BB241_13;
	sin.approx.f32 	%f112, %f25;
	sin.approx.f32 	%f113, %f32;
	mul.f32 	%f114, %f112, %f113;
	div.rn.f32 	%f328, %f114, %f33;
$L__BB241_13:
	ld.param.u64 	%rd7, [Subsample_Lanczos_yuv420p_yuv420p_uv_param_5];
	mul.f32 	%f27, %f105, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f116, %f26, %f26;
	mul.f32 	%f37, %f116, 0f3F000000;
	mov.f32 	%f329, %f338;
	@%p10 bra 	$L__BB241_15;
	sin.approx.f32 	%f117, %f26;
	sin.approx.f32 	%f118, %f36;
	mul.f32 	%f119, %f117, %f118;
	div.rn.f32 	%f329, %f119, %f37;
$L__BB241_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv420p_yuv420p_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Lanczos_yuv420p_yuv420p_uv_param_1];
	cvta.to.global.u64 	%rd2, %rd7;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f121, %f27, %f27;
	mul.f32 	%f41, %f121, 0f3F000000;
	mov.f32 	%f330, %f338;
	@%p11 bra 	$L__BB241_17;
	sin.approx.f32 	%f122, %f27;
	sin.approx.f32 	%f123, %f40;
	mul.f32 	%f124, %f122, %f123;
	div.rn.f32 	%f330, %f124, %f41;
$L__BB241_17:
	add.f32 	%f158, %f323, %f324;
	add.f32 	%f159, %f158, %f325;
	add.f32 	%f160, %f159, %f326;
	div.rn.f32 	%f161, %f326, %f160;
	div.rn.f32 	%f162, %f325, %f160;
	div.rn.f32 	%f163, %f324, %f160;
	div.rn.f32 	%f164, %f323, %f160;
	add.f32 	%f165, %f327, %f328;
	add.f32 	%f166, %f165, %f329;
	add.f32 	%f167, %f166, %f330;
	div.rn.f32 	%f168, %f327, %f167;
	div.rn.f32 	%f169, %f328, %f167;
	div.rn.f32 	%f170, %f329, %f167;
	div.rn.f32 	%f171, %f330, %f167;
	add.f32 	%f240, %f242, 0fBF800000;
	add.f32 	%f241, %f249, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd8, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f172, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f173, %r21;
	add.f32 	%f244, %f242, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd8, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f174, %r25;
	add.f32 	%f246, %f242, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd8, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f175, %r29;
	mul.f32 	%f176, %f163, %f173;
	fma.rn.f32 	%f177, %f164, %f172, %f176;
	fma.rn.f32 	%f178, %f162, %f174, %f177;
	fma.rn.f32 	%f179, %f161, %f175, %f178;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd8, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f180, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd8, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f181, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd8, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f182, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd8, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f183, %r45;
	mul.f32 	%f184, %f163, %f181;
	fma.rn.f32 	%f185, %f164, %f180, %f184;
	fma.rn.f32 	%f186, %f162, %f182, %f185;
	fma.rn.f32 	%f187, %f161, %f183, %f186;
	add.f32 	%f257, %f249, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd8, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f188, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd8, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f189, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd8, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f190, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd8, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f191, %r61;
	mul.f32 	%f192, %f163, %f189;
	fma.rn.f32 	%f193, %f164, %f188, %f192;
	fma.rn.f32 	%f194, %f162, %f190, %f193;
	fma.rn.f32 	%f195, %f161, %f191, %f194;
	add.f32 	%f265, %f249, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd8, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f196, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd8, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f197, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd8, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f198, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd8, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f199, %r77;
	mul.f32 	%f200, %f163, %f197;
	fma.rn.f32 	%f201, %f164, %f196, %f200;
	fma.rn.f32 	%f202, %f162, %f198, %f201;
	fma.rn.f32 	%f203, %f161, %f199, %f202;
	mul.f32 	%f204, %f169, %f187;
	fma.rn.f32 	%f205, %f168, %f179, %f204;
	fma.rn.f32 	%f206, %f170, %f195, %f205;
	fma.rn.f32 	%f207, %f171, %f203, %f206;
	mul.f32 	%f208, %f207, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f208;
	mul.wide.s32 	%rd24, %r2, %r5;
	cvt.s64.s32 	%rd25, %r1;
	add.s64 	%rd3, %rd24, %rd25;
	add.s64 	%rd26, %rd2, %rd3;
	st.global.u8 	[%rd26], %rs1;
	mov.f32 	%f331, %f338;
	@%p4 bra 	$L__BB241_19;
	sin.approx.f32 	%f209, %f4;
	sin.approx.f32 	%f210, %f8;
	mul.f32 	%f211, %f209, %f210;
	div.rn.f32 	%f331, %f211, %f9;
$L__BB241_19:
	mov.f32 	%f332, %f338;
	@%p5 bra 	$L__BB241_21;
	sin.approx.f32 	%f213, %f5;
	sin.approx.f32 	%f214, %f12;
	mul.f32 	%f215, %f213, %f214;
	div.rn.f32 	%f332, %f215, %f13;
$L__BB241_21:
	mov.f32 	%f333, %f338;
	@%p6 bra 	$L__BB241_23;
	sin.approx.f32 	%f217, %f6;
	sin.approx.f32 	%f218, %f16;
	mul.f32 	%f219, %f217, %f218;
	div.rn.f32 	%f333, %f219, %f17;
$L__BB241_23:
	mov.f32 	%f334, %f338;
	@%p7 bra 	$L__BB241_25;
	sin.approx.f32 	%f221, %f7;
	sin.approx.f32 	%f222, %f20;
	mul.f32 	%f223, %f221, %f222;
	div.rn.f32 	%f334, %f223, %f21;
$L__BB241_25:
	mov.f32 	%f335, %f338;
	@%p8 bra 	$L__BB241_27;
	sin.approx.f32 	%f225, %f24;
	sin.approx.f32 	%f226, %f28;
	mul.f32 	%f227, %f225, %f226;
	div.rn.f32 	%f335, %f227, %f29;
$L__BB241_27:
	mov.f32 	%f336, %f338;
	@%p9 bra 	$L__BB241_29;
	sin.approx.f32 	%f229, %f25;
	sin.approx.f32 	%f230, %f32;
	mul.f32 	%f231, %f229, %f230;
	div.rn.f32 	%f336, %f231, %f33;
$L__BB241_29:
	ld.param.u64 	%rd6, [Subsample_Lanczos_yuv420p_yuv420p_uv_param_6];
	mov.f32 	%f337, %f338;
	@%p10 bra 	$L__BB241_31;
	sin.approx.f32 	%f233, %f26;
	sin.approx.f32 	%f234, %f36;
	mul.f32 	%f235, %f233, %f234;
	div.rn.f32 	%f337, %f235, %f37;
$L__BB241_31:
	ld.param.u64 	%rd27, [Subsample_Lanczos_yuv420p_yuv420p_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd6;
	@%p11 bra 	$L__BB241_33;
	sin.approx.f32 	%f237, %f27;
	sin.approx.f32 	%f238, %f40;
	mul.f32 	%f239, %f237, %f238;
	div.rn.f32 	%f338, %f239, %f41;
$L__BB241_33:
	add.f32 	%f272, %f331, %f332;
	add.f32 	%f273, %f272, %f333;
	add.f32 	%f274, %f273, %f334;
	div.rn.f32 	%f275, %f334, %f274;
	div.rn.f32 	%f276, %f333, %f274;
	div.rn.f32 	%f277, %f332, %f274;
	div.rn.f32 	%f278, %f331, %f274;
	add.f32 	%f279, %f335, %f336;
	add.f32 	%f280, %f279, %f337;
	add.f32 	%f281, %f280, %f338;
	div.rn.f32 	%f282, %f335, %f281;
	div.rn.f32 	%f283, %f336, %f281;
	div.rn.f32 	%f284, %f337, %f281;
	div.rn.f32 	%f285, %f338, %f281;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd27, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f286, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd27, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f287, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd27, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f288, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd27, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f289, %r93;
	mul.f32 	%f290, %f277, %f287;
	fma.rn.f32 	%f291, %f278, %f286, %f290;
	fma.rn.f32 	%f292, %f276, %f288, %f291;
	fma.rn.f32 	%f293, %f275, %f289, %f292;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd27, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f294, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd27, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f295, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd27, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f296, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd27, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f297, %r109;
	mul.f32 	%f298, %f277, %f295;
	fma.rn.f32 	%f299, %f278, %f294, %f298;
	fma.rn.f32 	%f300, %f276, %f296, %f299;
	fma.rn.f32 	%f301, %f275, %f297, %f300;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd27, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f302, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd27, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f303, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd27, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f304, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd27, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f305, %r125;
	mul.f32 	%f306, %f277, %f303;
	fma.rn.f32 	%f307, %f278, %f302, %f306;
	fma.rn.f32 	%f308, %f276, %f304, %f307;
	fma.rn.f32 	%f309, %f275, %f305, %f308;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd27, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f310, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd27, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f311, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd27, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f312, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd27, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f313, %r141;
	mul.f32 	%f314, %f277, %f311;
	fma.rn.f32 	%f315, %f278, %f310, %f314;
	fma.rn.f32 	%f316, %f276, %f312, %f315;
	fma.rn.f32 	%f317, %f275, %f313, %f316;
	mul.f32 	%f318, %f283, %f301;
	fma.rn.f32 	%f319, %f282, %f293, %f318;
	fma.rn.f32 	%f320, %f284, %f309, %f319;
	fma.rn.f32 	%f321, %f285, %f317, %f320;
	mul.f32 	%f322, %f321, 0f437F0000;
	cvt.rzi.u16.f32 	%rs2, %f322;
	add.s64 	%rd43, %rd1, %rd3;
	st.global.u8 	[%rd43], %rs2;
$L__BB241_34:
	ret;

}
	// .globl	Subsample_Lanczos_nv12_yuv420p
.visible .entry Subsample_Lanczos_nv12_yuv420p(
	.param .u64 Subsample_Lanczos_nv12_yuv420p_param_0,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_param_1,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_param_2,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_param_3,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_param_4,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_param_5,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_param_6,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_param_7,
	.param .u32 Subsample_Lanczos_nv12_yuv420p_param_8,
	.param .u32 Subsample_Lanczos_nv12_yuv420p_param_9,
	.param .u32 Subsample_Lanczos_nv12_yuv420p_param_10,
	.param .u32 Subsample_Lanczos_nv12_yuv420p_param_11,
	.param .u32 Subsample_Lanczos_nv12_yuv420p_param_12,
	.param .f32 Subsample_Lanczos_nv12_yuv420p_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_nv12_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_nv12_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB242_18;
	bra.uni 	$L__BB242_1;
$L__BB242_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_nv12_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_nv12_yuv420p_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB242_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB242_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB242_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB242_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB242_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB242_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB242_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB242_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB242_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB242_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB242_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB242_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_nv12_yuv420p_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB242_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB242_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_nv12_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_nv12_yuv420p_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB242_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB242_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB242_18:
	ret;

}
	// .globl	Subsample_Lanczos_nv12_yuv420p_uv
.visible .entry Subsample_Lanczos_nv12_yuv420p_uv(
	.param .u64 Subsample_Lanczos_nv12_yuv420p_uv_param_0,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_uv_param_1,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_uv_param_2,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_uv_param_3,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_uv_param_4,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_uv_param_5,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_uv_param_6,
	.param .u64 Subsample_Lanczos_nv12_yuv420p_uv_param_7,
	.param .u32 Subsample_Lanczos_nv12_yuv420p_uv_param_8,
	.param .u32 Subsample_Lanczos_nv12_yuv420p_uv_param_9,
	.param .u32 Subsample_Lanczos_nv12_yuv420p_uv_param_10,
	.param .u32 Subsample_Lanczos_nv12_yuv420p_uv_param_11,
	.param .u32 Subsample_Lanczos_nv12_yuv420p_uv_param_12,
	.param .f32 Subsample_Lanczos_nv12_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<27>;

	ld.param.u32 	%r4, [Subsample_Lanczos_nv12_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_nv12_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB243_18;
	bra.uni 	$L__BB243_1;
$L__BB243_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_nv12_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_nv12_yuv420p_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB243_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB243_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB243_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB243_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB243_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB243_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB243_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB243_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB243_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB243_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB243_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB243_13:
	ld.param.u64 	%rd4, [Subsample_Lanczos_nv12_yuv420p_uv_param_6];
	ld.param.u64 	%rd5, [Subsample_Lanczos_nv12_yuv420p_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB243_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB243_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_nv12_yuv420p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Lanczos_nv12_yuv420p_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd4;
	cvta.to.global.u64 	%rd2, %rd5;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB243_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB243_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f437F0000;
	mul.f32 	%f222, %f220, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	mul.wide.s32 	%rd22, %r2, %r5;
	cvt.s64.s32 	%rd23, %r1;
	add.s64 	%rd24, %rd22, %rd23;
	add.s64 	%rd25, %rd2, %rd24;
	st.global.u8 	[%rd25], %rs1;
	add.s64 	%rd26, %rd1, %rd24;
	st.global.u8 	[%rd26], %rs2;
$L__BB243_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p_yuv420p
.visible .entry Subsample_Lanczos_yuv444p_yuv420p(
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_param_0,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_param_1,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_param_2,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_param_3,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_param_4,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_param_5,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_param_6,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_param_7,
	.param .u32 Subsample_Lanczos_yuv444p_yuv420p_param_8,
	.param .u32 Subsample_Lanczos_yuv444p_yuv420p_param_9,
	.param .u32 Subsample_Lanczos_yuv444p_yuv420p_param_10,
	.param .u32 Subsample_Lanczos_yuv444p_yuv420p_param_11,
	.param .u32 Subsample_Lanczos_yuv444p_yuv420p_param_12,
	.param .f32 Subsample_Lanczos_yuv444p_yuv420p_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB244_18;
	bra.uni 	$L__BB244_1;
$L__BB244_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p_yuv420p_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB244_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB244_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB244_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB244_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB244_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB244_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB244_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB244_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB244_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB244_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB244_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB244_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv444p_yuv420p_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB244_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB244_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p_yuv420p_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB244_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB244_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB244_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p_yuv420p_uv
.visible .entry Subsample_Lanczos_yuv444p_yuv420p_uv(
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv444p_yuv420p_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv444p_yuv420p_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv444p_yuv420p_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv444p_yuv420p_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv444p_yuv420p_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv444p_yuv420p_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv444p_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<339>;
	.reg .b64 	%rd<44>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB245_34;
	bra.uni 	$L__BB245_1;
$L__BB245_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p_yuv420p_uv_param_11];
	cvt.rn.f32.s32 	%f67, %r6;
	cvt.rn.f32.s32 	%f68, %r3;
	div.rn.f32 	%f69, %f67, %f68;
	cvt.rn.f32.s32 	%f70, %r7;
	cvt.rn.f32.s32 	%f71, %r4;
	div.rn.f32 	%f72, %f70, %f71;
	cvt.rn.f32.s32 	%f73, %r1;
	add.f32 	%f74, %f73, 0f3F000000;
	fma.rn.f32 	%f75, %f69, %f74, 0fBF000000;
	cvt.rn.f32.s32 	%f76, %r2;
	add.f32 	%f77, %f76, 0f3F000000;
	cvt.rmi.f32.f32 	%f242, %f75;
	sub.f32 	%f79, %f75, %f242;
	add.f32 	%f80, %f79, 0f3F800000;
	mul.f32 	%f4, %f80, 0f40490FDB;
	mul.f32 	%f5, %f79, 0f40490FDB;
	add.f32 	%f81, %f79, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f83, %f4, %f4;
	mul.f32 	%f9, %f83, 0f3F000000;
	mov.f32 	%f338, 0f3F800000;
	mov.f32 	%f323, %f338;
	@%p4 bra 	$L__BB245_3;
	sin.approx.f32 	%f84, %f4;
	sin.approx.f32 	%f85, %f8;
	mul.f32 	%f86, %f84, %f85;
	div.rn.f32 	%f323, %f86, %f9;
$L__BB245_3:
	fma.rn.f32 	%f78, %f72, %f77, 0fBF000000;
	add.f32 	%f82, %f79, 0fC0000000;
	mul.f32 	%f6, %f81, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f88, %f5, %f5;
	mul.f32 	%f13, %f88, 0f3F000000;
	mov.f32 	%f324, %f338;
	@%p5 bra 	$L__BB245_5;
	sin.approx.f32 	%f89, %f5;
	sin.approx.f32 	%f90, %f12;
	mul.f32 	%f91, %f89, %f90;
	div.rn.f32 	%f324, %f91, %f13;
$L__BB245_5:
	cvt.rmi.f32.f32 	%f249, %f78;
	mul.f32 	%f7, %f82, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f93, %f6, %f6;
	mul.f32 	%f17, %f93, 0f3F000000;
	mov.f32 	%f325, %f338;
	@%p6 bra 	$L__BB245_7;
	sin.approx.f32 	%f94, %f6;
	sin.approx.f32 	%f95, %f16;
	mul.f32 	%f96, %f94, %f95;
	div.rn.f32 	%f325, %f96, %f17;
$L__BB245_7:
	sub.f32 	%f3, %f78, %f249;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f98, %f7, %f7;
	mul.f32 	%f21, %f98, 0f3F000000;
	mov.f32 	%f326, %f338;
	@%p7 bra 	$L__BB245_9;
	sin.approx.f32 	%f99, %f7;
	sin.approx.f32 	%f100, %f20;
	mul.f32 	%f101, %f99, %f100;
	div.rn.f32 	%f326, %f101, %f21;
$L__BB245_9:
	add.f32 	%f103, %f3, 0f3F800000;
	mul.f32 	%f24, %f103, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f104, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f106, %f24, %f24;
	mul.f32 	%f29, %f106, 0f3F000000;
	mov.f32 	%f327, %f338;
	@%p8 bra 	$L__BB245_11;
	sin.approx.f32 	%f107, %f24;
	sin.approx.f32 	%f108, %f28;
	mul.f32 	%f109, %f107, %f108;
	div.rn.f32 	%f327, %f109, %f29;
$L__BB245_11:
	add.f32 	%f105, %f3, 0fC0000000;
	mul.f32 	%f26, %f104, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f111, %f25, %f25;
	mul.f32 	%f33, %f111, 0f3F000000;
	mov.f32 	%f328, %f338;
	@%p9 bra 	$L__BB245_13;
	sin.approx.f32 	%f112, %f25;
	sin.approx.f32 	%f113, %f32;
	mul.f32 	%f114, %f112, %f113;
	div.rn.f32 	%f328, %f114, %f33;
$L__BB245_13:
	ld.param.u64 	%rd7, [Subsample_Lanczos_yuv444p_yuv420p_uv_param_5];
	mul.f32 	%f27, %f105, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f116, %f26, %f26;
	mul.f32 	%f37, %f116, 0f3F000000;
	mov.f32 	%f329, %f338;
	@%p10 bra 	$L__BB245_15;
	sin.approx.f32 	%f117, %f26;
	sin.approx.f32 	%f118, %f36;
	mul.f32 	%f119, %f117, %f118;
	div.rn.f32 	%f329, %f119, %f37;
$L__BB245_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p_yuv420p_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Lanczos_yuv444p_yuv420p_uv_param_1];
	cvta.to.global.u64 	%rd2, %rd7;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f121, %f27, %f27;
	mul.f32 	%f41, %f121, 0f3F000000;
	mov.f32 	%f330, %f338;
	@%p11 bra 	$L__BB245_17;
	sin.approx.f32 	%f122, %f27;
	sin.approx.f32 	%f123, %f40;
	mul.f32 	%f124, %f122, %f123;
	div.rn.f32 	%f330, %f124, %f41;
$L__BB245_17:
	add.f32 	%f158, %f323, %f324;
	add.f32 	%f159, %f158, %f325;
	add.f32 	%f160, %f159, %f326;
	div.rn.f32 	%f161, %f326, %f160;
	div.rn.f32 	%f162, %f325, %f160;
	div.rn.f32 	%f163, %f324, %f160;
	div.rn.f32 	%f164, %f323, %f160;
	add.f32 	%f165, %f327, %f328;
	add.f32 	%f166, %f165, %f329;
	add.f32 	%f167, %f166, %f330;
	div.rn.f32 	%f168, %f327, %f167;
	div.rn.f32 	%f169, %f328, %f167;
	div.rn.f32 	%f170, %f329, %f167;
	div.rn.f32 	%f171, %f330, %f167;
	add.f32 	%f240, %f242, 0fBF800000;
	add.f32 	%f241, %f249, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd8, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f172, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f173, %r21;
	add.f32 	%f244, %f242, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd8, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f174, %r25;
	add.f32 	%f246, %f242, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd8, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f175, %r29;
	mul.f32 	%f176, %f163, %f173;
	fma.rn.f32 	%f177, %f164, %f172, %f176;
	fma.rn.f32 	%f178, %f162, %f174, %f177;
	fma.rn.f32 	%f179, %f161, %f175, %f178;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd8, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f180, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd8, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f181, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd8, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f182, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd8, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f183, %r45;
	mul.f32 	%f184, %f163, %f181;
	fma.rn.f32 	%f185, %f164, %f180, %f184;
	fma.rn.f32 	%f186, %f162, %f182, %f185;
	fma.rn.f32 	%f187, %f161, %f183, %f186;
	add.f32 	%f257, %f249, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd8, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f188, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd8, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f189, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd8, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f190, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd8, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f191, %r61;
	mul.f32 	%f192, %f163, %f189;
	fma.rn.f32 	%f193, %f164, %f188, %f192;
	fma.rn.f32 	%f194, %f162, %f190, %f193;
	fma.rn.f32 	%f195, %f161, %f191, %f194;
	add.f32 	%f265, %f249, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd8, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f196, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd8, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f197, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd8, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f198, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd8, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f199, %r77;
	mul.f32 	%f200, %f163, %f197;
	fma.rn.f32 	%f201, %f164, %f196, %f200;
	fma.rn.f32 	%f202, %f162, %f198, %f201;
	fma.rn.f32 	%f203, %f161, %f199, %f202;
	mul.f32 	%f204, %f169, %f187;
	fma.rn.f32 	%f205, %f168, %f179, %f204;
	fma.rn.f32 	%f206, %f170, %f195, %f205;
	fma.rn.f32 	%f207, %f171, %f203, %f206;
	mul.f32 	%f208, %f207, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f208;
	mul.wide.s32 	%rd24, %r2, %r5;
	cvt.s64.s32 	%rd25, %r1;
	add.s64 	%rd3, %rd24, %rd25;
	add.s64 	%rd26, %rd2, %rd3;
	st.global.u8 	[%rd26], %rs1;
	mov.f32 	%f331, %f338;
	@%p4 bra 	$L__BB245_19;
	sin.approx.f32 	%f209, %f4;
	sin.approx.f32 	%f210, %f8;
	mul.f32 	%f211, %f209, %f210;
	div.rn.f32 	%f331, %f211, %f9;
$L__BB245_19:
	mov.f32 	%f332, %f338;
	@%p5 bra 	$L__BB245_21;
	sin.approx.f32 	%f213, %f5;
	sin.approx.f32 	%f214, %f12;
	mul.f32 	%f215, %f213, %f214;
	div.rn.f32 	%f332, %f215, %f13;
$L__BB245_21:
	mov.f32 	%f333, %f338;
	@%p6 bra 	$L__BB245_23;
	sin.approx.f32 	%f217, %f6;
	sin.approx.f32 	%f218, %f16;
	mul.f32 	%f219, %f217, %f218;
	div.rn.f32 	%f333, %f219, %f17;
$L__BB245_23:
	mov.f32 	%f334, %f338;
	@%p7 bra 	$L__BB245_25;
	sin.approx.f32 	%f221, %f7;
	sin.approx.f32 	%f222, %f20;
	mul.f32 	%f223, %f221, %f222;
	div.rn.f32 	%f334, %f223, %f21;
$L__BB245_25:
	mov.f32 	%f335, %f338;
	@%p8 bra 	$L__BB245_27;
	sin.approx.f32 	%f225, %f24;
	sin.approx.f32 	%f226, %f28;
	mul.f32 	%f227, %f225, %f226;
	div.rn.f32 	%f335, %f227, %f29;
$L__BB245_27:
	mov.f32 	%f336, %f338;
	@%p9 bra 	$L__BB245_29;
	sin.approx.f32 	%f229, %f25;
	sin.approx.f32 	%f230, %f32;
	mul.f32 	%f231, %f229, %f230;
	div.rn.f32 	%f336, %f231, %f33;
$L__BB245_29:
	ld.param.u64 	%rd6, [Subsample_Lanczos_yuv444p_yuv420p_uv_param_6];
	mov.f32 	%f337, %f338;
	@%p10 bra 	$L__BB245_31;
	sin.approx.f32 	%f233, %f26;
	sin.approx.f32 	%f234, %f36;
	mul.f32 	%f235, %f233, %f234;
	div.rn.f32 	%f337, %f235, %f37;
$L__BB245_31:
	ld.param.u64 	%rd27, [Subsample_Lanczos_yuv444p_yuv420p_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd6;
	@%p11 bra 	$L__BB245_33;
	sin.approx.f32 	%f237, %f27;
	sin.approx.f32 	%f238, %f40;
	mul.f32 	%f239, %f237, %f238;
	div.rn.f32 	%f338, %f239, %f41;
$L__BB245_33:
	add.f32 	%f272, %f331, %f332;
	add.f32 	%f273, %f272, %f333;
	add.f32 	%f274, %f273, %f334;
	div.rn.f32 	%f275, %f334, %f274;
	div.rn.f32 	%f276, %f333, %f274;
	div.rn.f32 	%f277, %f332, %f274;
	div.rn.f32 	%f278, %f331, %f274;
	add.f32 	%f279, %f335, %f336;
	add.f32 	%f280, %f279, %f337;
	add.f32 	%f281, %f280, %f338;
	div.rn.f32 	%f282, %f335, %f281;
	div.rn.f32 	%f283, %f336, %f281;
	div.rn.f32 	%f284, %f337, %f281;
	div.rn.f32 	%f285, %f338, %f281;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd27, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f286, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd27, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f287, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd27, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f288, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd27, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f289, %r93;
	mul.f32 	%f290, %f277, %f287;
	fma.rn.f32 	%f291, %f278, %f286, %f290;
	fma.rn.f32 	%f292, %f276, %f288, %f291;
	fma.rn.f32 	%f293, %f275, %f289, %f292;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd27, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f294, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd27, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f295, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd27, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f296, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd27, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f297, %r109;
	mul.f32 	%f298, %f277, %f295;
	fma.rn.f32 	%f299, %f278, %f294, %f298;
	fma.rn.f32 	%f300, %f276, %f296, %f299;
	fma.rn.f32 	%f301, %f275, %f297, %f300;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd27, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f302, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd27, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f303, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd27, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f304, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd27, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f305, %r125;
	mul.f32 	%f306, %f277, %f303;
	fma.rn.f32 	%f307, %f278, %f302, %f306;
	fma.rn.f32 	%f308, %f276, %f304, %f307;
	fma.rn.f32 	%f309, %f275, %f305, %f308;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd27, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f310, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd27, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f311, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd27, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f312, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd27, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f313, %r141;
	mul.f32 	%f314, %f277, %f311;
	fma.rn.f32 	%f315, %f278, %f310, %f314;
	fma.rn.f32 	%f316, %f276, %f312, %f315;
	fma.rn.f32 	%f317, %f275, %f313, %f316;
	mul.f32 	%f318, %f283, %f301;
	fma.rn.f32 	%f319, %f282, %f293, %f318;
	fma.rn.f32 	%f320, %f284, %f309, %f319;
	fma.rn.f32 	%f321, %f285, %f317, %f320;
	mul.f32 	%f322, %f321, 0f437F0000;
	cvt.rzi.u16.f32 	%rs2, %f322;
	add.s64 	%rd43, %rd1, %rd3;
	st.global.u8 	[%rd43], %rs2;
$L__BB245_34:
	ret;

}
	// .globl	Subsample_Lanczos_p010le_yuv420p
.visible .entry Subsample_Lanczos_p010le_yuv420p(
	.param .u64 Subsample_Lanczos_p010le_yuv420p_param_0,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_param_1,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_param_2,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_param_3,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_param_4,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_param_5,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_param_6,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_param_7,
	.param .u32 Subsample_Lanczos_p010le_yuv420p_param_8,
	.param .u32 Subsample_Lanczos_p010le_yuv420p_param_9,
	.param .u32 Subsample_Lanczos_p010le_yuv420p_param_10,
	.param .u32 Subsample_Lanczos_p010le_yuv420p_param_11,
	.param .u32 Subsample_Lanczos_p010le_yuv420p_param_12,
	.param .f32 Subsample_Lanczos_p010le_yuv420p_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p010le_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p010le_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB246_18;
	bra.uni 	$L__BB246_1;
$L__BB246_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p010le_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p010le_yuv420p_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB246_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB246_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB246_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB246_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB246_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB246_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB246_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB246_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB246_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB246_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB246_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB246_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p010le_yuv420p_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB246_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB246_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p010le_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p010le_yuv420p_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB246_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB246_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB246_18:
	ret;

}
	// .globl	Subsample_Lanczos_p010le_yuv420p_uv
.visible .entry Subsample_Lanczos_p010le_yuv420p_uv(
	.param .u64 Subsample_Lanczos_p010le_yuv420p_uv_param_0,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_uv_param_1,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_uv_param_2,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_uv_param_3,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_uv_param_4,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_uv_param_5,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_uv_param_6,
	.param .u64 Subsample_Lanczos_p010le_yuv420p_uv_param_7,
	.param .u32 Subsample_Lanczos_p010le_yuv420p_uv_param_8,
	.param .u32 Subsample_Lanczos_p010le_yuv420p_uv_param_9,
	.param .u32 Subsample_Lanczos_p010le_yuv420p_uv_param_10,
	.param .u32 Subsample_Lanczos_p010le_yuv420p_uv_param_11,
	.param .u32 Subsample_Lanczos_p010le_yuv420p_uv_param_12,
	.param .f32 Subsample_Lanczos_p010le_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<27>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p010le_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p010le_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB247_18;
	bra.uni 	$L__BB247_1;
$L__BB247_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p010le_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p010le_yuv420p_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB247_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB247_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB247_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB247_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB247_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB247_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB247_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB247_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB247_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB247_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB247_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB247_13:
	ld.param.u64 	%rd4, [Subsample_Lanczos_p010le_yuv420p_uv_param_6];
	ld.param.u64 	%rd5, [Subsample_Lanczos_p010le_yuv420p_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB247_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB247_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p010le_yuv420p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Lanczos_p010le_yuv420p_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd4;
	cvta.to.global.u64 	%rd2, %rd5;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB247_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB247_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f477FFF00;
	mul.f32 	%f222, %f220, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	shr.u16 	%rs3, %rs1, 8;
	mul.wide.s32 	%rd22, %r2, %r5;
	cvt.s64.s32 	%rd23, %r1;
	add.s64 	%rd24, %rd22, %rd23;
	add.s64 	%rd25, %rd2, %rd24;
	st.global.u8 	[%rd25], %rs3;
	shr.u16 	%rs4, %rs2, 8;
	add.s64 	%rd26, %rd1, %rd24;
	st.global.u8 	[%rd26], %rs4;
$L__BB247_18:
	ret;

}
	// .globl	Subsample_Lanczos_p016le_yuv420p
.visible .entry Subsample_Lanczos_p016le_yuv420p(
	.param .u64 Subsample_Lanczos_p016le_yuv420p_param_0,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_param_1,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_param_2,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_param_3,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_param_4,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_param_5,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_param_6,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_param_7,
	.param .u32 Subsample_Lanczos_p016le_yuv420p_param_8,
	.param .u32 Subsample_Lanczos_p016le_yuv420p_param_9,
	.param .u32 Subsample_Lanczos_p016le_yuv420p_param_10,
	.param .u32 Subsample_Lanczos_p016le_yuv420p_param_11,
	.param .u32 Subsample_Lanczos_p016le_yuv420p_param_12,
	.param .f32 Subsample_Lanczos_p016le_yuv420p_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p016le_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p016le_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB248_18;
	bra.uni 	$L__BB248_1;
$L__BB248_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p016le_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p016le_yuv420p_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB248_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB248_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB248_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB248_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB248_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB248_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB248_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB248_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB248_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB248_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB248_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB248_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p016le_yuv420p_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB248_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB248_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p016le_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p016le_yuv420p_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB248_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB248_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB248_18:
	ret;

}
	// .globl	Subsample_Lanczos_p016le_yuv420p_uv
.visible .entry Subsample_Lanczos_p016le_yuv420p_uv(
	.param .u64 Subsample_Lanczos_p016le_yuv420p_uv_param_0,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_uv_param_1,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_uv_param_2,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_uv_param_3,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_uv_param_4,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_uv_param_5,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_uv_param_6,
	.param .u64 Subsample_Lanczos_p016le_yuv420p_uv_param_7,
	.param .u32 Subsample_Lanczos_p016le_yuv420p_uv_param_8,
	.param .u32 Subsample_Lanczos_p016le_yuv420p_uv_param_9,
	.param .u32 Subsample_Lanczos_p016le_yuv420p_uv_param_10,
	.param .u32 Subsample_Lanczos_p016le_yuv420p_uv_param_11,
	.param .u32 Subsample_Lanczos_p016le_yuv420p_uv_param_12,
	.param .f32 Subsample_Lanczos_p016le_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<27>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p016le_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p016le_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB249_18;
	bra.uni 	$L__BB249_1;
$L__BB249_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p016le_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p016le_yuv420p_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB249_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB249_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB249_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB249_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB249_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB249_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB249_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB249_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB249_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB249_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB249_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB249_13:
	ld.param.u64 	%rd4, [Subsample_Lanczos_p016le_yuv420p_uv_param_6];
	ld.param.u64 	%rd5, [Subsample_Lanczos_p016le_yuv420p_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB249_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB249_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p016le_yuv420p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Lanczos_p016le_yuv420p_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd4;
	cvta.to.global.u64 	%rd2, %rd5;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB249_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB249_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f477FFF00;
	mul.f32 	%f222, %f220, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	shr.u16 	%rs3, %rs1, 8;
	mul.wide.s32 	%rd22, %r2, %r5;
	cvt.s64.s32 	%rd23, %r1;
	add.s64 	%rd24, %rd22, %rd23;
	add.s64 	%rd25, %rd2, %rd24;
	st.global.u8 	[%rd25], %rs3;
	shr.u16 	%rs4, %rs2, 8;
	add.s64 	%rd26, %rd1, %rd24;
	st.global.u8 	[%rd26], %rs4;
$L__BB249_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p16le_yuv420p
.visible .entry Subsample_Lanczos_yuv444p16le_yuv420p(
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_param_0,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_param_1,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_param_2,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_param_3,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_param_4,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_param_5,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_param_6,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_param_7,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv420p_param_8,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv420p_param_9,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv420p_param_10,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv420p_param_11,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv420p_param_12,
	.param .f32 Subsample_Lanczos_yuv444p16le_yuv420p_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p16le_yuv420p_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p16le_yuv420p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB250_18;
	bra.uni 	$L__BB250_1;
$L__BB250_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p16le_yuv420p_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p16le_yuv420p_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB250_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB250_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB250_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB250_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB250_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB250_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB250_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB250_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB250_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB250_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB250_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB250_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv444p16le_yuv420p_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB250_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB250_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p16le_yuv420p_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p16le_yuv420p_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB250_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB250_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB250_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p16le_yuv420p_uv
.visible .entry Subsample_Lanczos_yuv444p16le_yuv420p_uv(
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<339>;
	.reg .b64 	%rd<44>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB251_34;
	bra.uni 	$L__BB251_1;
$L__BB251_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_11];
	cvt.rn.f32.s32 	%f67, %r6;
	cvt.rn.f32.s32 	%f68, %r3;
	div.rn.f32 	%f69, %f67, %f68;
	cvt.rn.f32.s32 	%f70, %r7;
	cvt.rn.f32.s32 	%f71, %r4;
	div.rn.f32 	%f72, %f70, %f71;
	cvt.rn.f32.s32 	%f73, %r1;
	add.f32 	%f74, %f73, 0f3F000000;
	fma.rn.f32 	%f75, %f69, %f74, 0fBF000000;
	cvt.rn.f32.s32 	%f76, %r2;
	add.f32 	%f77, %f76, 0f3F000000;
	cvt.rmi.f32.f32 	%f242, %f75;
	sub.f32 	%f79, %f75, %f242;
	add.f32 	%f80, %f79, 0f3F800000;
	mul.f32 	%f4, %f80, 0f40490FDB;
	mul.f32 	%f5, %f79, 0f40490FDB;
	add.f32 	%f81, %f79, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f83, %f4, %f4;
	mul.f32 	%f9, %f83, 0f3F000000;
	mov.f32 	%f338, 0f3F800000;
	mov.f32 	%f323, %f338;
	@%p4 bra 	$L__BB251_3;
	sin.approx.f32 	%f84, %f4;
	sin.approx.f32 	%f85, %f8;
	mul.f32 	%f86, %f84, %f85;
	div.rn.f32 	%f323, %f86, %f9;
$L__BB251_3:
	fma.rn.f32 	%f78, %f72, %f77, 0fBF000000;
	add.f32 	%f82, %f79, 0fC0000000;
	mul.f32 	%f6, %f81, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f88, %f5, %f5;
	mul.f32 	%f13, %f88, 0f3F000000;
	mov.f32 	%f324, %f338;
	@%p5 bra 	$L__BB251_5;
	sin.approx.f32 	%f89, %f5;
	sin.approx.f32 	%f90, %f12;
	mul.f32 	%f91, %f89, %f90;
	div.rn.f32 	%f324, %f91, %f13;
$L__BB251_5:
	cvt.rmi.f32.f32 	%f249, %f78;
	mul.f32 	%f7, %f82, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f93, %f6, %f6;
	mul.f32 	%f17, %f93, 0f3F000000;
	mov.f32 	%f325, %f338;
	@%p6 bra 	$L__BB251_7;
	sin.approx.f32 	%f94, %f6;
	sin.approx.f32 	%f95, %f16;
	mul.f32 	%f96, %f94, %f95;
	div.rn.f32 	%f325, %f96, %f17;
$L__BB251_7:
	sub.f32 	%f3, %f78, %f249;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f98, %f7, %f7;
	mul.f32 	%f21, %f98, 0f3F000000;
	mov.f32 	%f326, %f338;
	@%p7 bra 	$L__BB251_9;
	sin.approx.f32 	%f99, %f7;
	sin.approx.f32 	%f100, %f20;
	mul.f32 	%f101, %f99, %f100;
	div.rn.f32 	%f326, %f101, %f21;
$L__BB251_9:
	add.f32 	%f103, %f3, 0f3F800000;
	mul.f32 	%f24, %f103, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f104, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f106, %f24, %f24;
	mul.f32 	%f29, %f106, 0f3F000000;
	mov.f32 	%f327, %f338;
	@%p8 bra 	$L__BB251_11;
	sin.approx.f32 	%f107, %f24;
	sin.approx.f32 	%f108, %f28;
	mul.f32 	%f109, %f107, %f108;
	div.rn.f32 	%f327, %f109, %f29;
$L__BB251_11:
	add.f32 	%f105, %f3, 0fC0000000;
	mul.f32 	%f26, %f104, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f111, %f25, %f25;
	mul.f32 	%f33, %f111, 0f3F000000;
	mov.f32 	%f328, %f338;
	@%p9 bra 	$L__BB251_13;
	sin.approx.f32 	%f112, %f25;
	sin.approx.f32 	%f113, %f32;
	mul.f32 	%f114, %f112, %f113;
	div.rn.f32 	%f328, %f114, %f33;
$L__BB251_13:
	ld.param.u64 	%rd7, [Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_5];
	mul.f32 	%f27, %f105, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f116, %f26, %f26;
	mul.f32 	%f37, %f116, 0f3F000000;
	mov.f32 	%f329, %f338;
	@%p10 bra 	$L__BB251_15;
	sin.approx.f32 	%f117, %f26;
	sin.approx.f32 	%f118, %f36;
	mul.f32 	%f119, %f117, %f118;
	div.rn.f32 	%f329, %f119, %f37;
$L__BB251_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_1];
	cvta.to.global.u64 	%rd2, %rd7;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f121, %f27, %f27;
	mul.f32 	%f41, %f121, 0f3F000000;
	mov.f32 	%f330, %f338;
	@%p11 bra 	$L__BB251_17;
	sin.approx.f32 	%f122, %f27;
	sin.approx.f32 	%f123, %f40;
	mul.f32 	%f124, %f122, %f123;
	div.rn.f32 	%f330, %f124, %f41;
$L__BB251_17:
	add.f32 	%f158, %f323, %f324;
	add.f32 	%f159, %f158, %f325;
	add.f32 	%f160, %f159, %f326;
	div.rn.f32 	%f161, %f326, %f160;
	div.rn.f32 	%f162, %f325, %f160;
	div.rn.f32 	%f163, %f324, %f160;
	div.rn.f32 	%f164, %f323, %f160;
	add.f32 	%f165, %f327, %f328;
	add.f32 	%f166, %f165, %f329;
	add.f32 	%f167, %f166, %f330;
	div.rn.f32 	%f168, %f327, %f167;
	div.rn.f32 	%f169, %f328, %f167;
	div.rn.f32 	%f170, %f329, %f167;
	div.rn.f32 	%f171, %f330, %f167;
	add.f32 	%f240, %f242, 0fBF800000;
	add.f32 	%f241, %f249, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd8, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f172, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f173, %r21;
	add.f32 	%f244, %f242, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd8, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f174, %r25;
	add.f32 	%f246, %f242, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd8, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f175, %r29;
	mul.f32 	%f176, %f163, %f173;
	fma.rn.f32 	%f177, %f164, %f172, %f176;
	fma.rn.f32 	%f178, %f162, %f174, %f177;
	fma.rn.f32 	%f179, %f161, %f175, %f178;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd8, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f180, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd8, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f181, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd8, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f182, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd8, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f183, %r45;
	mul.f32 	%f184, %f163, %f181;
	fma.rn.f32 	%f185, %f164, %f180, %f184;
	fma.rn.f32 	%f186, %f162, %f182, %f185;
	fma.rn.f32 	%f187, %f161, %f183, %f186;
	add.f32 	%f257, %f249, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd8, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f188, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd8, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f189, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd8, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f190, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd8, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f191, %r61;
	mul.f32 	%f192, %f163, %f189;
	fma.rn.f32 	%f193, %f164, %f188, %f192;
	fma.rn.f32 	%f194, %f162, %f190, %f193;
	fma.rn.f32 	%f195, %f161, %f191, %f194;
	add.f32 	%f265, %f249, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd8, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f196, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd8, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f197, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd8, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f198, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd8, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f199, %r77;
	mul.f32 	%f200, %f163, %f197;
	fma.rn.f32 	%f201, %f164, %f196, %f200;
	fma.rn.f32 	%f202, %f162, %f198, %f201;
	fma.rn.f32 	%f203, %f161, %f199, %f202;
	mul.f32 	%f204, %f169, %f187;
	fma.rn.f32 	%f205, %f168, %f179, %f204;
	fma.rn.f32 	%f206, %f170, %f195, %f205;
	fma.rn.f32 	%f207, %f171, %f203, %f206;
	mul.f32 	%f208, %f207, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f208;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd24, %r2, %r5;
	cvt.s64.s32 	%rd25, %r1;
	add.s64 	%rd3, %rd24, %rd25;
	add.s64 	%rd26, %rd2, %rd3;
	st.global.u8 	[%rd26], %rs2;
	mov.f32 	%f331, %f338;
	@%p4 bra 	$L__BB251_19;
	sin.approx.f32 	%f209, %f4;
	sin.approx.f32 	%f210, %f8;
	mul.f32 	%f211, %f209, %f210;
	div.rn.f32 	%f331, %f211, %f9;
$L__BB251_19:
	mov.f32 	%f332, %f338;
	@%p5 bra 	$L__BB251_21;
	sin.approx.f32 	%f213, %f5;
	sin.approx.f32 	%f214, %f12;
	mul.f32 	%f215, %f213, %f214;
	div.rn.f32 	%f332, %f215, %f13;
$L__BB251_21:
	mov.f32 	%f333, %f338;
	@%p6 bra 	$L__BB251_23;
	sin.approx.f32 	%f217, %f6;
	sin.approx.f32 	%f218, %f16;
	mul.f32 	%f219, %f217, %f218;
	div.rn.f32 	%f333, %f219, %f17;
$L__BB251_23:
	mov.f32 	%f334, %f338;
	@%p7 bra 	$L__BB251_25;
	sin.approx.f32 	%f221, %f7;
	sin.approx.f32 	%f222, %f20;
	mul.f32 	%f223, %f221, %f222;
	div.rn.f32 	%f334, %f223, %f21;
$L__BB251_25:
	mov.f32 	%f335, %f338;
	@%p8 bra 	$L__BB251_27;
	sin.approx.f32 	%f225, %f24;
	sin.approx.f32 	%f226, %f28;
	mul.f32 	%f227, %f225, %f226;
	div.rn.f32 	%f335, %f227, %f29;
$L__BB251_27:
	mov.f32 	%f336, %f338;
	@%p9 bra 	$L__BB251_29;
	sin.approx.f32 	%f229, %f25;
	sin.approx.f32 	%f230, %f32;
	mul.f32 	%f231, %f229, %f230;
	div.rn.f32 	%f336, %f231, %f33;
$L__BB251_29:
	ld.param.u64 	%rd6, [Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_6];
	mov.f32 	%f337, %f338;
	@%p10 bra 	$L__BB251_31;
	sin.approx.f32 	%f233, %f26;
	sin.approx.f32 	%f234, %f36;
	mul.f32 	%f235, %f233, %f234;
	div.rn.f32 	%f337, %f235, %f37;
$L__BB251_31:
	ld.param.u64 	%rd27, [Subsample_Lanczos_yuv444p16le_yuv420p_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd6;
	@%p11 bra 	$L__BB251_33;
	sin.approx.f32 	%f237, %f27;
	sin.approx.f32 	%f238, %f40;
	mul.f32 	%f239, %f237, %f238;
	div.rn.f32 	%f338, %f239, %f41;
$L__BB251_33:
	add.f32 	%f272, %f331, %f332;
	add.f32 	%f273, %f272, %f333;
	add.f32 	%f274, %f273, %f334;
	div.rn.f32 	%f275, %f334, %f274;
	div.rn.f32 	%f276, %f333, %f274;
	div.rn.f32 	%f277, %f332, %f274;
	div.rn.f32 	%f278, %f331, %f274;
	add.f32 	%f279, %f335, %f336;
	add.f32 	%f280, %f279, %f337;
	add.f32 	%f281, %f280, %f338;
	div.rn.f32 	%f282, %f335, %f281;
	div.rn.f32 	%f283, %f336, %f281;
	div.rn.f32 	%f284, %f337, %f281;
	div.rn.f32 	%f285, %f338, %f281;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd27, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f286, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd27, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f287, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd27, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f288, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd27, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f289, %r93;
	mul.f32 	%f290, %f277, %f287;
	fma.rn.f32 	%f291, %f278, %f286, %f290;
	fma.rn.f32 	%f292, %f276, %f288, %f291;
	fma.rn.f32 	%f293, %f275, %f289, %f292;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd27, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f294, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd27, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f295, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd27, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f296, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd27, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f297, %r109;
	mul.f32 	%f298, %f277, %f295;
	fma.rn.f32 	%f299, %f278, %f294, %f298;
	fma.rn.f32 	%f300, %f276, %f296, %f299;
	fma.rn.f32 	%f301, %f275, %f297, %f300;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd27, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f302, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd27, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f303, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd27, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f304, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd27, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f305, %r125;
	mul.f32 	%f306, %f277, %f303;
	fma.rn.f32 	%f307, %f278, %f302, %f306;
	fma.rn.f32 	%f308, %f276, %f304, %f307;
	fma.rn.f32 	%f309, %f275, %f305, %f308;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd27, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f310, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd27, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f311, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd27, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f312, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd27, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f313, %r141;
	mul.f32 	%f314, %f277, %f311;
	fma.rn.f32 	%f315, %f278, %f310, %f314;
	fma.rn.f32 	%f316, %f276, %f312, %f315;
	fma.rn.f32 	%f317, %f275, %f313, %f316;
	mul.f32 	%f318, %f283, %f301;
	fma.rn.f32 	%f319, %f282, %f293, %f318;
	fma.rn.f32 	%f320, %f284, %f309, %f319;
	fma.rn.f32 	%f321, %f285, %f317, %f320;
	mul.f32 	%f322, %f321, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs3, %f322;
	shr.u16 	%rs4, %rs3, 8;
	add.s64 	%rd43, %rd1, %rd3;
	st.global.u8 	[%rd43], %rs4;
$L__BB251_34:
	ret;

}
	// .globl	Subsample_Lanczos_yuv420p_nv12
.visible .entry Subsample_Lanczos_yuv420p_nv12(
	.param .u64 Subsample_Lanczos_yuv420p_nv12_param_0,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_param_1,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_param_2,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_param_3,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_param_4,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_param_5,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_param_6,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_param_7,
	.param .u32 Subsample_Lanczos_yuv420p_nv12_param_8,
	.param .u32 Subsample_Lanczos_yuv420p_nv12_param_9,
	.param .u32 Subsample_Lanczos_yuv420p_nv12_param_10,
	.param .u32 Subsample_Lanczos_yuv420p_nv12_param_11,
	.param .u32 Subsample_Lanczos_yuv420p_nv12_param_12,
	.param .f32 Subsample_Lanczos_yuv420p_nv12_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv420p_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv420p_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB252_18;
	bra.uni 	$L__BB252_1;
$L__BB252_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv420p_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv420p_nv12_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB252_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB252_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB252_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB252_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB252_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB252_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB252_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB252_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB252_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB252_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB252_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB252_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv420p_nv12_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB252_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB252_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv420p_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv420p_nv12_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB252_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB252_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB252_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv420p_nv12_uv
.visible .entry Subsample_Lanczos_yuv420p_nv12_uv(
	.param .u64 Subsample_Lanczos_yuv420p_nv12_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv420p_nv12_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv420p_nv12_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv420p_nv12_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv420p_nv12_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv420p_nv12_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv420p_nv12_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv420p_nv12_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<387>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv420p_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv420p_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB253_34;
	bra.uni 	$L__BB253_1;
$L__BB253_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv420p_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv420p_nv12_uv_param_11];
	cvt.rn.f32.s32 	%f131, %r6;
	cvt.rn.f32.s32 	%f132, %r3;
	div.rn.f32 	%f133, %f131, %f132;
	cvt.rn.f32.s32 	%f134, %r7;
	cvt.rn.f32.s32 	%f135, %r4;
	div.rn.f32 	%f136, %f134, %f135;
	cvt.rn.f32.s32 	%f137, %r1;
	add.f32 	%f138, %f137, 0f3F000000;
	fma.rn.f32 	%f139, %f133, %f138, 0fBF000000;
	cvt.rn.f32.s32 	%f140, %r2;
	add.f32 	%f141, %f140, 0f3F000000;
	cvt.rmi.f32.f32 	%f255, %f139;
	sub.f32 	%f143, %f139, %f255;
	add.f32 	%f144, %f143, 0f3F800000;
	mul.f32 	%f4, %f144, 0f40490FDB;
	mul.f32 	%f5, %f143, 0f40490FDB;
	add.f32 	%f145, %f143, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f147, %f4, %f4;
	mul.f32 	%f9, %f147, 0f3F000000;
	mov.f32 	%f386, 0f3F800000;
	mov.f32 	%f371, %f386;
	@%p4 bra 	$L__BB253_3;
	sin.approx.f32 	%f148, %f4;
	sin.approx.f32 	%f149, %f8;
	mul.f32 	%f150, %f148, %f149;
	div.rn.f32 	%f371, %f150, %f9;
$L__BB253_3:
	fma.rn.f32 	%f142, %f136, %f141, 0fBF000000;
	add.f32 	%f146, %f143, 0fC0000000;
	mul.f32 	%f6, %f145, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f152, %f5, %f5;
	mul.f32 	%f13, %f152, 0f3F000000;
	mov.f32 	%f372, %f386;
	@%p5 bra 	$L__BB253_5;
	sin.approx.f32 	%f153, %f5;
	sin.approx.f32 	%f154, %f12;
	mul.f32 	%f155, %f153, %f154;
	div.rn.f32 	%f372, %f155, %f13;
$L__BB253_5:
	cvt.rmi.f32.f32 	%f262, %f142;
	mul.f32 	%f7, %f146, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f157, %f6, %f6;
	mul.f32 	%f17, %f157, 0f3F000000;
	mov.f32 	%f373, %f386;
	@%p6 bra 	$L__BB253_7;
	sin.approx.f32 	%f158, %f6;
	sin.approx.f32 	%f159, %f16;
	mul.f32 	%f160, %f158, %f159;
	div.rn.f32 	%f373, %f160, %f17;
$L__BB253_7:
	sub.f32 	%f3, %f142, %f262;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f162, %f7, %f7;
	mul.f32 	%f21, %f162, 0f3F000000;
	mov.f32 	%f374, %f386;
	@%p7 bra 	$L__BB253_9;
	sin.approx.f32 	%f163, %f7;
	sin.approx.f32 	%f164, %f20;
	mul.f32 	%f165, %f163, %f164;
	div.rn.f32 	%f374, %f165, %f21;
$L__BB253_9:
	add.f32 	%f167, %f3, 0f3F800000;
	mul.f32 	%f24, %f167, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f168, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f170, %f24, %f24;
	mul.f32 	%f29, %f170, 0f3F000000;
	mov.f32 	%f375, %f386;
	@%p8 bra 	$L__BB253_11;
	sin.approx.f32 	%f171, %f24;
	sin.approx.f32 	%f172, %f28;
	mul.f32 	%f173, %f171, %f172;
	div.rn.f32 	%f375, %f173, %f29;
$L__BB253_11:
	add.f32 	%f169, %f3, 0fC0000000;
	mul.f32 	%f26, %f168, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f175, %f25, %f25;
	mul.f32 	%f33, %f175, 0f3F000000;
	mov.f32 	%f376, %f386;
	@%p9 bra 	$L__BB253_13;
	sin.approx.f32 	%f176, %f25;
	sin.approx.f32 	%f177, %f32;
	mul.f32 	%f178, %f176, %f177;
	div.rn.f32 	%f376, %f178, %f33;
$L__BB253_13:
	mul.f32 	%f27, %f169, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f180, %f26, %f26;
	mul.f32 	%f37, %f180, 0f3F000000;
	mov.f32 	%f377, %f386;
	@%p10 bra 	$L__BB253_15;
	sin.approx.f32 	%f181, %f26;
	sin.approx.f32 	%f182, %f36;
	mul.f32 	%f183, %f181, %f182;
	div.rn.f32 	%f377, %f183, %f37;
$L__BB253_15:
	ld.param.u64 	%rd5, [Subsample_Lanczos_yuv420p_nv12_uv_param_1];
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f185, %f27, %f27;
	mul.f32 	%f41, %f185, 0f3F000000;
	mov.f32 	%f378, %f386;
	@%p11 bra 	$L__BB253_17;
	sin.approx.f32 	%f186, %f27;
	sin.approx.f32 	%f187, %f40;
	mul.f32 	%f188, %f186, %f187;
	div.rn.f32 	%f378, %f188, %f41;
$L__BB253_17:
	add.f32 	%f253, %f255, 0fBF800000;
	add.f32 	%f254, %f262, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f253, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f255, %f254}];
	// end inline asm
	add.f32 	%f257, %f255, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f257, %f254}];
	// end inline asm
	add.f32 	%f259, %f255, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f259, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f253, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f255, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f257, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f259, %f262}];
	// end inline asm
	add.f32 	%f270, %f262, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f253, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f255, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f257, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f259, %f270}];
	// end inline asm
	add.f32 	%f278, %f262, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f253, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f255, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f257, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f259, %f278}];
	// end inline asm
	mov.f32 	%f379, %f386;
	@%p4 bra 	$L__BB253_19;
	sin.approx.f32 	%f222, %f4;
	sin.approx.f32 	%f223, %f8;
	mul.f32 	%f224, %f222, %f223;
	div.rn.f32 	%f379, %f224, %f9;
$L__BB253_19:
	mov.f32 	%f380, %f386;
	@%p5 bra 	$L__BB253_21;
	sin.approx.f32 	%f226, %f5;
	sin.approx.f32 	%f227, %f12;
	mul.f32 	%f228, %f226, %f227;
	div.rn.f32 	%f380, %f228, %f13;
$L__BB253_21:
	mov.f32 	%f381, %f386;
	@%p6 bra 	$L__BB253_23;
	sin.approx.f32 	%f230, %f6;
	sin.approx.f32 	%f231, %f16;
	mul.f32 	%f232, %f230, %f231;
	div.rn.f32 	%f381, %f232, %f17;
$L__BB253_23:
	mov.f32 	%f382, %f386;
	@%p7 bra 	$L__BB253_25;
	sin.approx.f32 	%f234, %f7;
	sin.approx.f32 	%f235, %f20;
	mul.f32 	%f236, %f234, %f235;
	div.rn.f32 	%f382, %f236, %f21;
$L__BB253_25:
	mov.f32 	%f383, %f386;
	@%p8 bra 	$L__BB253_27;
	sin.approx.f32 	%f238, %f24;
	sin.approx.f32 	%f239, %f28;
	mul.f32 	%f240, %f238, %f239;
	div.rn.f32 	%f383, %f240, %f29;
$L__BB253_27:
	mov.f32 	%f384, %f386;
	@%p9 bra 	$L__BB253_29;
	sin.approx.f32 	%f242, %f25;
	sin.approx.f32 	%f243, %f32;
	mul.f32 	%f244, %f242, %f243;
	div.rn.f32 	%f384, %f244, %f33;
$L__BB253_29:
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv420p_nv12_uv_param_5];
	mov.f32 	%f385, %f386;
	@%p10 bra 	$L__BB253_31;
	sin.approx.f32 	%f246, %f26;
	sin.approx.f32 	%f247, %f36;
	mul.f32 	%f248, %f246, %f247;
	div.rn.f32 	%f385, %f248, %f37;
$L__BB253_31:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv420p_nv12_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Lanczos_yuv420p_nv12_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd4;
	mov.b32 	%f46, %r17;
	mov.b32 	%f50, %r21;
	mov.b32 	%f55, %r25;
	mov.b32 	%f60, %r29;
	mov.b32 	%f64, %r33;
	mov.b32 	%f68, %r37;
	mov.b32 	%f72, %r41;
	mov.b32 	%f76, %r45;
	mov.b32 	%f81, %r49;
	mov.b32 	%f85, %r53;
	mov.b32 	%f89, %r57;
	mov.b32 	%f93, %r61;
	mov.b32 	%f98, %r65;
	mov.b32 	%f102, %r69;
	mov.b32 	%f106, %r73;
	mov.b32 	%f110, %r77;
	@%p11 bra 	$L__BB253_33;
	sin.approx.f32 	%f250, %f27;
	sin.approx.f32 	%f251, %f40;
	mul.f32 	%f252, %f250, %f251;
	div.rn.f32 	%f386, %f252, %f41;
$L__BB253_33:
	add.f32 	%f285, %f375, %f376;
	add.f32 	%f286, %f285, %f377;
	add.f32 	%f287, %f286, %f378;
	div.rn.f32 	%f288, %f375, %f287;
	add.f32 	%f289, %f371, %f372;
	add.f32 	%f290, %f289, %f373;
	add.f32 	%f291, %f290, %f374;
	div.rn.f32 	%f292, %f371, %f291;
	div.rn.f32 	%f293, %f372, %f291;
	mul.f32 	%f294, %f293, %f50;
	fma.rn.f32 	%f295, %f292, %f46, %f294;
	div.rn.f32 	%f296, %f373, %f291;
	fma.rn.f32 	%f297, %f296, %f55, %f295;
	div.rn.f32 	%f298, %f374, %f291;
	fma.rn.f32 	%f299, %f298, %f60, %f297;
	div.rn.f32 	%f300, %f376, %f287;
	mul.f32 	%f301, %f293, %f68;
	fma.rn.f32 	%f302, %f292, %f64, %f301;
	fma.rn.f32 	%f303, %f296, %f72, %f302;
	fma.rn.f32 	%f304, %f298, %f76, %f303;
	mul.f32 	%f305, %f300, %f304;
	fma.rn.f32 	%f306, %f288, %f299, %f305;
	div.rn.f32 	%f307, %f377, %f287;
	mul.f32 	%f308, %f293, %f85;
	fma.rn.f32 	%f309, %f292, %f81, %f308;
	fma.rn.f32 	%f310, %f296, %f89, %f309;
	fma.rn.f32 	%f311, %f298, %f93, %f310;
	fma.rn.f32 	%f312, %f307, %f311, %f306;
	div.rn.f32 	%f313, %f378, %f287;
	mul.f32 	%f314, %f293, %f102;
	fma.rn.f32 	%f315, %f292, %f98, %f314;
	fma.rn.f32 	%f316, %f296, %f106, %f315;
	fma.rn.f32 	%f317, %f298, %f110, %f316;
	fma.rn.f32 	%f318, %f313, %f317, %f312;
	mul.f32 	%f319, %f318, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f319;
	add.f32 	%f320, %f379, %f380;
	add.f32 	%f321, %f320, %f381;
	add.f32 	%f322, %f321, %f382;
	div.rn.f32 	%f323, %f382, %f322;
	div.rn.f32 	%f324, %f381, %f322;
	div.rn.f32 	%f325, %f380, %f322;
	div.rn.f32 	%f326, %f379, %f322;
	add.f32 	%f327, %f383, %f384;
	add.f32 	%f328, %f327, %f385;
	add.f32 	%f329, %f328, %f386;
	div.rn.f32 	%f330, %f383, %f329;
	div.rn.f32 	%f331, %f384, %f329;
	div.rn.f32 	%f332, %f385, %f329;
	div.rn.f32 	%f333, %f386, %f329;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f253, %f254}];
	// end inline asm
	mov.b32 	%f334, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f255, %f254}];
	// end inline asm
	mov.b32 	%f335, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f257, %f254}];
	// end inline asm
	mov.b32 	%f336, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f259, %f254}];
	// end inline asm
	mov.b32 	%f337, %r93;
	mul.f32 	%f338, %f325, %f335;
	fma.rn.f32 	%f339, %f326, %f334, %f338;
	fma.rn.f32 	%f340, %f324, %f336, %f339;
	fma.rn.f32 	%f341, %f323, %f337, %f340;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f253, %f262}];
	// end inline asm
	mov.b32 	%f342, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f255, %f262}];
	// end inline asm
	mov.b32 	%f343, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f257, %f262}];
	// end inline asm
	mov.b32 	%f344, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f259, %f262}];
	// end inline asm
	mov.b32 	%f345, %r109;
	mul.f32 	%f346, %f325, %f343;
	fma.rn.f32 	%f347, %f326, %f342, %f346;
	fma.rn.f32 	%f348, %f324, %f344, %f347;
	fma.rn.f32 	%f349, %f323, %f345, %f348;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f253, %f270}];
	// end inline asm
	mov.b32 	%f350, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f255, %f270}];
	// end inline asm
	mov.b32 	%f351, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f257, %f270}];
	// end inline asm
	mov.b32 	%f352, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f259, %f270}];
	// end inline asm
	mov.b32 	%f353, %r125;
	mul.f32 	%f354, %f325, %f351;
	fma.rn.f32 	%f355, %f326, %f350, %f354;
	fma.rn.f32 	%f356, %f324, %f352, %f355;
	fma.rn.f32 	%f357, %f323, %f353, %f356;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f253, %f278}];
	// end inline asm
	mov.b32 	%f358, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f255, %f278}];
	// end inline asm
	mov.b32 	%f359, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f257, %f278}];
	// end inline asm
	mov.b32 	%f360, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f259, %f278}];
	// end inline asm
	mov.b32 	%f361, %r141;
	mul.f32 	%f362, %f325, %f359;
	fma.rn.f32 	%f363, %f326, %f358, %f362;
	fma.rn.f32 	%f364, %f324, %f360, %f363;
	fma.rn.f32 	%f365, %f323, %f361, %f364;
	mul.f32 	%f366, %f331, %f349;
	fma.rn.f32 	%f367, %f330, %f341, %f366;
	fma.rn.f32 	%f368, %f332, %f357, %f367;
	fma.rn.f32 	%f369, %f333, %f365, %f368;
	mul.f32 	%f370, %f369, 0f437F0000;
	cvt.rzi.u16.f32 	%rs2, %f370;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 1;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 1;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u8 	[%rd44], {%rs1, %rs2};
$L__BB253_34:
	ret;

}
	// .globl	Subsample_Lanczos_nv12_nv12
.visible .entry Subsample_Lanczos_nv12_nv12(
	.param .u64 Subsample_Lanczos_nv12_nv12_param_0,
	.param .u64 Subsample_Lanczos_nv12_nv12_param_1,
	.param .u64 Subsample_Lanczos_nv12_nv12_param_2,
	.param .u64 Subsample_Lanczos_nv12_nv12_param_3,
	.param .u64 Subsample_Lanczos_nv12_nv12_param_4,
	.param .u64 Subsample_Lanczos_nv12_nv12_param_5,
	.param .u64 Subsample_Lanczos_nv12_nv12_param_6,
	.param .u64 Subsample_Lanczos_nv12_nv12_param_7,
	.param .u32 Subsample_Lanczos_nv12_nv12_param_8,
	.param .u32 Subsample_Lanczos_nv12_nv12_param_9,
	.param .u32 Subsample_Lanczos_nv12_nv12_param_10,
	.param .u32 Subsample_Lanczos_nv12_nv12_param_11,
	.param .u32 Subsample_Lanczos_nv12_nv12_param_12,
	.param .f32 Subsample_Lanczos_nv12_nv12_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_nv12_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_nv12_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB254_18;
	bra.uni 	$L__BB254_1;
$L__BB254_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_nv12_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_nv12_nv12_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB254_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB254_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB254_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB254_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB254_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB254_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB254_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB254_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB254_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB254_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB254_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB254_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_nv12_nv12_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB254_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB254_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_nv12_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_nv12_nv12_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB254_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB254_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB254_18:
	ret;

}
	// .globl	Subsample_Lanczos_nv12_nv12_uv
.visible .entry Subsample_Lanczos_nv12_nv12_uv(
	.param .u64 Subsample_Lanczos_nv12_nv12_uv_param_0,
	.param .u64 Subsample_Lanczos_nv12_nv12_uv_param_1,
	.param .u64 Subsample_Lanczos_nv12_nv12_uv_param_2,
	.param .u64 Subsample_Lanczos_nv12_nv12_uv_param_3,
	.param .u64 Subsample_Lanczos_nv12_nv12_uv_param_4,
	.param .u64 Subsample_Lanczos_nv12_nv12_uv_param_5,
	.param .u64 Subsample_Lanczos_nv12_nv12_uv_param_6,
	.param .u64 Subsample_Lanczos_nv12_nv12_uv_param_7,
	.param .u32 Subsample_Lanczos_nv12_nv12_uv_param_8,
	.param .u32 Subsample_Lanczos_nv12_nv12_uv_param_9,
	.param .u32 Subsample_Lanczos_nv12_nv12_uv_param_10,
	.param .u32 Subsample_Lanczos_nv12_nv12_uv_param_11,
	.param .u32 Subsample_Lanczos_nv12_nv12_uv_param_12,
	.param .f32 Subsample_Lanczos_nv12_nv12_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_nv12_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_nv12_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB255_18;
	bra.uni 	$L__BB255_1;
$L__BB255_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_nv12_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_nv12_nv12_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB255_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB255_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB255_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB255_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB255_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB255_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB255_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB255_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB255_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB255_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB255_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB255_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_nv12_nv12_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB255_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB255_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_nv12_nv12_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_nv12_nv12_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB255_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB255_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f437F0000;
	mul.f32 	%f222, %f220, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u8 	[%rd27], {%rs1, %rs2};
$L__BB255_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p_nv12
.visible .entry Subsample_Lanczos_yuv444p_nv12(
	.param .u64 Subsample_Lanczos_yuv444p_nv12_param_0,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_param_1,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_param_2,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_param_3,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_param_4,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_param_5,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_param_6,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_param_7,
	.param .u32 Subsample_Lanczos_yuv444p_nv12_param_8,
	.param .u32 Subsample_Lanczos_yuv444p_nv12_param_9,
	.param .u32 Subsample_Lanczos_yuv444p_nv12_param_10,
	.param .u32 Subsample_Lanczos_yuv444p_nv12_param_11,
	.param .u32 Subsample_Lanczos_yuv444p_nv12_param_12,
	.param .f32 Subsample_Lanczos_yuv444p_nv12_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB256_18;
	bra.uni 	$L__BB256_1;
$L__BB256_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p_nv12_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB256_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB256_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB256_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB256_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB256_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB256_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB256_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB256_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB256_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB256_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB256_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB256_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv444p_nv12_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB256_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB256_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p_nv12_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB256_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB256_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB256_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p_nv12_uv
.visible .entry Subsample_Lanczos_yuv444p_nv12_uv(
	.param .u64 Subsample_Lanczos_yuv444p_nv12_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv444p_nv12_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv444p_nv12_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv444p_nv12_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv444p_nv12_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv444p_nv12_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv444p_nv12_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv444p_nv12_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<387>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB257_34;
	bra.uni 	$L__BB257_1;
$L__BB257_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p_nv12_uv_param_11];
	cvt.rn.f32.s32 	%f131, %r6;
	cvt.rn.f32.s32 	%f132, %r3;
	div.rn.f32 	%f133, %f131, %f132;
	cvt.rn.f32.s32 	%f134, %r7;
	cvt.rn.f32.s32 	%f135, %r4;
	div.rn.f32 	%f136, %f134, %f135;
	cvt.rn.f32.s32 	%f137, %r1;
	add.f32 	%f138, %f137, 0f3F000000;
	fma.rn.f32 	%f139, %f133, %f138, 0fBF000000;
	cvt.rn.f32.s32 	%f140, %r2;
	add.f32 	%f141, %f140, 0f3F000000;
	cvt.rmi.f32.f32 	%f255, %f139;
	sub.f32 	%f143, %f139, %f255;
	add.f32 	%f144, %f143, 0f3F800000;
	mul.f32 	%f4, %f144, 0f40490FDB;
	mul.f32 	%f5, %f143, 0f40490FDB;
	add.f32 	%f145, %f143, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f147, %f4, %f4;
	mul.f32 	%f9, %f147, 0f3F000000;
	mov.f32 	%f386, 0f3F800000;
	mov.f32 	%f371, %f386;
	@%p4 bra 	$L__BB257_3;
	sin.approx.f32 	%f148, %f4;
	sin.approx.f32 	%f149, %f8;
	mul.f32 	%f150, %f148, %f149;
	div.rn.f32 	%f371, %f150, %f9;
$L__BB257_3:
	fma.rn.f32 	%f142, %f136, %f141, 0fBF000000;
	add.f32 	%f146, %f143, 0fC0000000;
	mul.f32 	%f6, %f145, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f152, %f5, %f5;
	mul.f32 	%f13, %f152, 0f3F000000;
	mov.f32 	%f372, %f386;
	@%p5 bra 	$L__BB257_5;
	sin.approx.f32 	%f153, %f5;
	sin.approx.f32 	%f154, %f12;
	mul.f32 	%f155, %f153, %f154;
	div.rn.f32 	%f372, %f155, %f13;
$L__BB257_5:
	cvt.rmi.f32.f32 	%f262, %f142;
	mul.f32 	%f7, %f146, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f157, %f6, %f6;
	mul.f32 	%f17, %f157, 0f3F000000;
	mov.f32 	%f373, %f386;
	@%p6 bra 	$L__BB257_7;
	sin.approx.f32 	%f158, %f6;
	sin.approx.f32 	%f159, %f16;
	mul.f32 	%f160, %f158, %f159;
	div.rn.f32 	%f373, %f160, %f17;
$L__BB257_7:
	sub.f32 	%f3, %f142, %f262;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f162, %f7, %f7;
	mul.f32 	%f21, %f162, 0f3F000000;
	mov.f32 	%f374, %f386;
	@%p7 bra 	$L__BB257_9;
	sin.approx.f32 	%f163, %f7;
	sin.approx.f32 	%f164, %f20;
	mul.f32 	%f165, %f163, %f164;
	div.rn.f32 	%f374, %f165, %f21;
$L__BB257_9:
	add.f32 	%f167, %f3, 0f3F800000;
	mul.f32 	%f24, %f167, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f168, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f170, %f24, %f24;
	mul.f32 	%f29, %f170, 0f3F000000;
	mov.f32 	%f375, %f386;
	@%p8 bra 	$L__BB257_11;
	sin.approx.f32 	%f171, %f24;
	sin.approx.f32 	%f172, %f28;
	mul.f32 	%f173, %f171, %f172;
	div.rn.f32 	%f375, %f173, %f29;
$L__BB257_11:
	add.f32 	%f169, %f3, 0fC0000000;
	mul.f32 	%f26, %f168, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f175, %f25, %f25;
	mul.f32 	%f33, %f175, 0f3F000000;
	mov.f32 	%f376, %f386;
	@%p9 bra 	$L__BB257_13;
	sin.approx.f32 	%f176, %f25;
	sin.approx.f32 	%f177, %f32;
	mul.f32 	%f178, %f176, %f177;
	div.rn.f32 	%f376, %f178, %f33;
$L__BB257_13:
	mul.f32 	%f27, %f169, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f180, %f26, %f26;
	mul.f32 	%f37, %f180, 0f3F000000;
	mov.f32 	%f377, %f386;
	@%p10 bra 	$L__BB257_15;
	sin.approx.f32 	%f181, %f26;
	sin.approx.f32 	%f182, %f36;
	mul.f32 	%f183, %f181, %f182;
	div.rn.f32 	%f377, %f183, %f37;
$L__BB257_15:
	ld.param.u64 	%rd5, [Subsample_Lanczos_yuv444p_nv12_uv_param_1];
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f185, %f27, %f27;
	mul.f32 	%f41, %f185, 0f3F000000;
	mov.f32 	%f378, %f386;
	@%p11 bra 	$L__BB257_17;
	sin.approx.f32 	%f186, %f27;
	sin.approx.f32 	%f187, %f40;
	mul.f32 	%f188, %f186, %f187;
	div.rn.f32 	%f378, %f188, %f41;
$L__BB257_17:
	add.f32 	%f253, %f255, 0fBF800000;
	add.f32 	%f254, %f262, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f253, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f255, %f254}];
	// end inline asm
	add.f32 	%f257, %f255, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f257, %f254}];
	// end inline asm
	add.f32 	%f259, %f255, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f259, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f253, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f255, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f257, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f259, %f262}];
	// end inline asm
	add.f32 	%f270, %f262, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f253, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f255, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f257, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f259, %f270}];
	// end inline asm
	add.f32 	%f278, %f262, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f253, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f255, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f257, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f259, %f278}];
	// end inline asm
	mov.f32 	%f379, %f386;
	@%p4 bra 	$L__BB257_19;
	sin.approx.f32 	%f222, %f4;
	sin.approx.f32 	%f223, %f8;
	mul.f32 	%f224, %f222, %f223;
	div.rn.f32 	%f379, %f224, %f9;
$L__BB257_19:
	mov.f32 	%f380, %f386;
	@%p5 bra 	$L__BB257_21;
	sin.approx.f32 	%f226, %f5;
	sin.approx.f32 	%f227, %f12;
	mul.f32 	%f228, %f226, %f227;
	div.rn.f32 	%f380, %f228, %f13;
$L__BB257_21:
	mov.f32 	%f381, %f386;
	@%p6 bra 	$L__BB257_23;
	sin.approx.f32 	%f230, %f6;
	sin.approx.f32 	%f231, %f16;
	mul.f32 	%f232, %f230, %f231;
	div.rn.f32 	%f381, %f232, %f17;
$L__BB257_23:
	mov.f32 	%f382, %f386;
	@%p7 bra 	$L__BB257_25;
	sin.approx.f32 	%f234, %f7;
	sin.approx.f32 	%f235, %f20;
	mul.f32 	%f236, %f234, %f235;
	div.rn.f32 	%f382, %f236, %f21;
$L__BB257_25:
	mov.f32 	%f383, %f386;
	@%p8 bra 	$L__BB257_27;
	sin.approx.f32 	%f238, %f24;
	sin.approx.f32 	%f239, %f28;
	mul.f32 	%f240, %f238, %f239;
	div.rn.f32 	%f383, %f240, %f29;
$L__BB257_27:
	mov.f32 	%f384, %f386;
	@%p9 bra 	$L__BB257_29;
	sin.approx.f32 	%f242, %f25;
	sin.approx.f32 	%f243, %f32;
	mul.f32 	%f244, %f242, %f243;
	div.rn.f32 	%f384, %f244, %f33;
$L__BB257_29:
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p_nv12_uv_param_5];
	mov.f32 	%f385, %f386;
	@%p10 bra 	$L__BB257_31;
	sin.approx.f32 	%f246, %f26;
	sin.approx.f32 	%f247, %f36;
	mul.f32 	%f248, %f246, %f247;
	div.rn.f32 	%f385, %f248, %f37;
$L__BB257_31:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p_nv12_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Lanczos_yuv444p_nv12_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd4;
	mov.b32 	%f46, %r17;
	mov.b32 	%f50, %r21;
	mov.b32 	%f55, %r25;
	mov.b32 	%f60, %r29;
	mov.b32 	%f64, %r33;
	mov.b32 	%f68, %r37;
	mov.b32 	%f72, %r41;
	mov.b32 	%f76, %r45;
	mov.b32 	%f81, %r49;
	mov.b32 	%f85, %r53;
	mov.b32 	%f89, %r57;
	mov.b32 	%f93, %r61;
	mov.b32 	%f98, %r65;
	mov.b32 	%f102, %r69;
	mov.b32 	%f106, %r73;
	mov.b32 	%f110, %r77;
	@%p11 bra 	$L__BB257_33;
	sin.approx.f32 	%f250, %f27;
	sin.approx.f32 	%f251, %f40;
	mul.f32 	%f252, %f250, %f251;
	div.rn.f32 	%f386, %f252, %f41;
$L__BB257_33:
	add.f32 	%f285, %f375, %f376;
	add.f32 	%f286, %f285, %f377;
	add.f32 	%f287, %f286, %f378;
	div.rn.f32 	%f288, %f375, %f287;
	add.f32 	%f289, %f371, %f372;
	add.f32 	%f290, %f289, %f373;
	add.f32 	%f291, %f290, %f374;
	div.rn.f32 	%f292, %f371, %f291;
	div.rn.f32 	%f293, %f372, %f291;
	mul.f32 	%f294, %f293, %f50;
	fma.rn.f32 	%f295, %f292, %f46, %f294;
	div.rn.f32 	%f296, %f373, %f291;
	fma.rn.f32 	%f297, %f296, %f55, %f295;
	div.rn.f32 	%f298, %f374, %f291;
	fma.rn.f32 	%f299, %f298, %f60, %f297;
	div.rn.f32 	%f300, %f376, %f287;
	mul.f32 	%f301, %f293, %f68;
	fma.rn.f32 	%f302, %f292, %f64, %f301;
	fma.rn.f32 	%f303, %f296, %f72, %f302;
	fma.rn.f32 	%f304, %f298, %f76, %f303;
	mul.f32 	%f305, %f300, %f304;
	fma.rn.f32 	%f306, %f288, %f299, %f305;
	div.rn.f32 	%f307, %f377, %f287;
	mul.f32 	%f308, %f293, %f85;
	fma.rn.f32 	%f309, %f292, %f81, %f308;
	fma.rn.f32 	%f310, %f296, %f89, %f309;
	fma.rn.f32 	%f311, %f298, %f93, %f310;
	fma.rn.f32 	%f312, %f307, %f311, %f306;
	div.rn.f32 	%f313, %f378, %f287;
	mul.f32 	%f314, %f293, %f102;
	fma.rn.f32 	%f315, %f292, %f98, %f314;
	fma.rn.f32 	%f316, %f296, %f106, %f315;
	fma.rn.f32 	%f317, %f298, %f110, %f316;
	fma.rn.f32 	%f318, %f313, %f317, %f312;
	mul.f32 	%f319, %f318, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f319;
	add.f32 	%f320, %f379, %f380;
	add.f32 	%f321, %f320, %f381;
	add.f32 	%f322, %f321, %f382;
	div.rn.f32 	%f323, %f382, %f322;
	div.rn.f32 	%f324, %f381, %f322;
	div.rn.f32 	%f325, %f380, %f322;
	div.rn.f32 	%f326, %f379, %f322;
	add.f32 	%f327, %f383, %f384;
	add.f32 	%f328, %f327, %f385;
	add.f32 	%f329, %f328, %f386;
	div.rn.f32 	%f330, %f383, %f329;
	div.rn.f32 	%f331, %f384, %f329;
	div.rn.f32 	%f332, %f385, %f329;
	div.rn.f32 	%f333, %f386, %f329;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f253, %f254}];
	// end inline asm
	mov.b32 	%f334, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f255, %f254}];
	// end inline asm
	mov.b32 	%f335, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f257, %f254}];
	// end inline asm
	mov.b32 	%f336, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f259, %f254}];
	// end inline asm
	mov.b32 	%f337, %r93;
	mul.f32 	%f338, %f325, %f335;
	fma.rn.f32 	%f339, %f326, %f334, %f338;
	fma.rn.f32 	%f340, %f324, %f336, %f339;
	fma.rn.f32 	%f341, %f323, %f337, %f340;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f253, %f262}];
	// end inline asm
	mov.b32 	%f342, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f255, %f262}];
	// end inline asm
	mov.b32 	%f343, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f257, %f262}];
	// end inline asm
	mov.b32 	%f344, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f259, %f262}];
	// end inline asm
	mov.b32 	%f345, %r109;
	mul.f32 	%f346, %f325, %f343;
	fma.rn.f32 	%f347, %f326, %f342, %f346;
	fma.rn.f32 	%f348, %f324, %f344, %f347;
	fma.rn.f32 	%f349, %f323, %f345, %f348;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f253, %f270}];
	// end inline asm
	mov.b32 	%f350, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f255, %f270}];
	// end inline asm
	mov.b32 	%f351, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f257, %f270}];
	// end inline asm
	mov.b32 	%f352, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f259, %f270}];
	// end inline asm
	mov.b32 	%f353, %r125;
	mul.f32 	%f354, %f325, %f351;
	fma.rn.f32 	%f355, %f326, %f350, %f354;
	fma.rn.f32 	%f356, %f324, %f352, %f355;
	fma.rn.f32 	%f357, %f323, %f353, %f356;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f253, %f278}];
	// end inline asm
	mov.b32 	%f358, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f255, %f278}];
	// end inline asm
	mov.b32 	%f359, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f257, %f278}];
	// end inline asm
	mov.b32 	%f360, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f259, %f278}];
	// end inline asm
	mov.b32 	%f361, %r141;
	mul.f32 	%f362, %f325, %f359;
	fma.rn.f32 	%f363, %f326, %f358, %f362;
	fma.rn.f32 	%f364, %f324, %f360, %f363;
	fma.rn.f32 	%f365, %f323, %f361, %f364;
	mul.f32 	%f366, %f331, %f349;
	fma.rn.f32 	%f367, %f330, %f341, %f366;
	fma.rn.f32 	%f368, %f332, %f357, %f367;
	fma.rn.f32 	%f369, %f333, %f365, %f368;
	mul.f32 	%f370, %f369, 0f437F0000;
	cvt.rzi.u16.f32 	%rs2, %f370;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 1;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 1;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u8 	[%rd44], {%rs1, %rs2};
$L__BB257_34:
	ret;

}
	// .globl	Subsample_Lanczos_p010le_nv12
.visible .entry Subsample_Lanczos_p010le_nv12(
	.param .u64 Subsample_Lanczos_p010le_nv12_param_0,
	.param .u64 Subsample_Lanczos_p010le_nv12_param_1,
	.param .u64 Subsample_Lanczos_p010le_nv12_param_2,
	.param .u64 Subsample_Lanczos_p010le_nv12_param_3,
	.param .u64 Subsample_Lanczos_p010le_nv12_param_4,
	.param .u64 Subsample_Lanczos_p010le_nv12_param_5,
	.param .u64 Subsample_Lanczos_p010le_nv12_param_6,
	.param .u64 Subsample_Lanczos_p010le_nv12_param_7,
	.param .u32 Subsample_Lanczos_p010le_nv12_param_8,
	.param .u32 Subsample_Lanczos_p010le_nv12_param_9,
	.param .u32 Subsample_Lanczos_p010le_nv12_param_10,
	.param .u32 Subsample_Lanczos_p010le_nv12_param_11,
	.param .u32 Subsample_Lanczos_p010le_nv12_param_12,
	.param .f32 Subsample_Lanczos_p010le_nv12_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p010le_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p010le_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB258_18;
	bra.uni 	$L__BB258_1;
$L__BB258_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p010le_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p010le_nv12_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB258_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB258_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB258_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB258_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB258_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB258_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB258_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB258_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB258_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB258_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB258_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB258_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p010le_nv12_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB258_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB258_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p010le_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p010le_nv12_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB258_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB258_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB258_18:
	ret;

}
	// .globl	Subsample_Lanczos_p010le_nv12_uv
.visible .entry Subsample_Lanczos_p010le_nv12_uv(
	.param .u64 Subsample_Lanczos_p010le_nv12_uv_param_0,
	.param .u64 Subsample_Lanczos_p010le_nv12_uv_param_1,
	.param .u64 Subsample_Lanczos_p010le_nv12_uv_param_2,
	.param .u64 Subsample_Lanczos_p010le_nv12_uv_param_3,
	.param .u64 Subsample_Lanczos_p010le_nv12_uv_param_4,
	.param .u64 Subsample_Lanczos_p010le_nv12_uv_param_5,
	.param .u64 Subsample_Lanczos_p010le_nv12_uv_param_6,
	.param .u64 Subsample_Lanczos_p010le_nv12_uv_param_7,
	.param .u32 Subsample_Lanczos_p010le_nv12_uv_param_8,
	.param .u32 Subsample_Lanczos_p010le_nv12_uv_param_9,
	.param .u32 Subsample_Lanczos_p010le_nv12_uv_param_10,
	.param .u32 Subsample_Lanczos_p010le_nv12_uv_param_11,
	.param .u32 Subsample_Lanczos_p010le_nv12_uv_param_12,
	.param .f32 Subsample_Lanczos_p010le_nv12_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p010le_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p010le_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB259_18;
	bra.uni 	$L__BB259_1;
$L__BB259_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p010le_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p010le_nv12_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB259_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB259_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB259_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB259_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB259_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB259_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB259_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB259_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB259_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB259_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB259_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB259_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p010le_nv12_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB259_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB259_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p010le_nv12_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p010le_nv12_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB259_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB259_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f477FFF00;
	mul.f32 	%f222, %f220, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	shr.u16 	%rs3, %rs1, 8;
	shr.u16 	%rs4, %rs2, 8;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u8 	[%rd27], {%rs3, %rs4};
$L__BB259_18:
	ret;

}
	// .globl	Subsample_Lanczos_p016le_nv12
.visible .entry Subsample_Lanczos_p016le_nv12(
	.param .u64 Subsample_Lanczos_p016le_nv12_param_0,
	.param .u64 Subsample_Lanczos_p016le_nv12_param_1,
	.param .u64 Subsample_Lanczos_p016le_nv12_param_2,
	.param .u64 Subsample_Lanczos_p016le_nv12_param_3,
	.param .u64 Subsample_Lanczos_p016le_nv12_param_4,
	.param .u64 Subsample_Lanczos_p016le_nv12_param_5,
	.param .u64 Subsample_Lanczos_p016le_nv12_param_6,
	.param .u64 Subsample_Lanczos_p016le_nv12_param_7,
	.param .u32 Subsample_Lanczos_p016le_nv12_param_8,
	.param .u32 Subsample_Lanczos_p016le_nv12_param_9,
	.param .u32 Subsample_Lanczos_p016le_nv12_param_10,
	.param .u32 Subsample_Lanczos_p016le_nv12_param_11,
	.param .u32 Subsample_Lanczos_p016le_nv12_param_12,
	.param .f32 Subsample_Lanczos_p016le_nv12_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p016le_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p016le_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB260_18;
	bra.uni 	$L__BB260_1;
$L__BB260_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p016le_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p016le_nv12_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB260_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB260_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB260_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB260_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB260_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB260_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB260_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB260_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB260_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB260_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB260_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB260_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p016le_nv12_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB260_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB260_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p016le_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p016le_nv12_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB260_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB260_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB260_18:
	ret;

}
	// .globl	Subsample_Lanczos_p016le_nv12_uv
.visible .entry Subsample_Lanczos_p016le_nv12_uv(
	.param .u64 Subsample_Lanczos_p016le_nv12_uv_param_0,
	.param .u64 Subsample_Lanczos_p016le_nv12_uv_param_1,
	.param .u64 Subsample_Lanczos_p016le_nv12_uv_param_2,
	.param .u64 Subsample_Lanczos_p016le_nv12_uv_param_3,
	.param .u64 Subsample_Lanczos_p016le_nv12_uv_param_4,
	.param .u64 Subsample_Lanczos_p016le_nv12_uv_param_5,
	.param .u64 Subsample_Lanczos_p016le_nv12_uv_param_6,
	.param .u64 Subsample_Lanczos_p016le_nv12_uv_param_7,
	.param .u32 Subsample_Lanczos_p016le_nv12_uv_param_8,
	.param .u32 Subsample_Lanczos_p016le_nv12_uv_param_9,
	.param .u32 Subsample_Lanczos_p016le_nv12_uv_param_10,
	.param .u32 Subsample_Lanczos_p016le_nv12_uv_param_11,
	.param .u32 Subsample_Lanczos_p016le_nv12_uv_param_12,
	.param .f32 Subsample_Lanczos_p016le_nv12_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p016le_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p016le_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB261_18;
	bra.uni 	$L__BB261_1;
$L__BB261_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p016le_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p016le_nv12_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB261_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB261_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB261_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB261_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB261_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB261_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB261_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB261_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB261_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB261_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB261_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB261_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p016le_nv12_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB261_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB261_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p016le_nv12_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p016le_nv12_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB261_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB261_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f477FFF00;
	mul.f32 	%f222, %f220, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	shr.u16 	%rs3, %rs1, 8;
	shr.u16 	%rs4, %rs2, 8;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u8 	[%rd27], {%rs3, %rs4};
$L__BB261_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p16le_nv12
.visible .entry Subsample_Lanczos_yuv444p16le_nv12(
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_param_0,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_param_1,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_param_2,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_param_3,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_param_4,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_param_5,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_param_6,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_param_7,
	.param .u32 Subsample_Lanczos_yuv444p16le_nv12_param_8,
	.param .u32 Subsample_Lanczos_yuv444p16le_nv12_param_9,
	.param .u32 Subsample_Lanczos_yuv444p16le_nv12_param_10,
	.param .u32 Subsample_Lanczos_yuv444p16le_nv12_param_11,
	.param .u32 Subsample_Lanczos_yuv444p16le_nv12_param_12,
	.param .f32 Subsample_Lanczos_yuv444p16le_nv12_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p16le_nv12_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p16le_nv12_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB262_18;
	bra.uni 	$L__BB262_1;
$L__BB262_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p16le_nv12_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p16le_nv12_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB262_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB262_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB262_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB262_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB262_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB262_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB262_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB262_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB262_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB262_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB262_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB262_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv444p16le_nv12_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB262_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB262_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p16le_nv12_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p16le_nv12_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB262_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB262_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB262_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p16le_nv12_uv
.visible .entry Subsample_Lanczos_yuv444p16le_nv12_uv(
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv444p16le_nv12_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv444p16le_nv12_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv444p16le_nv12_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv444p16le_nv12_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv444p16le_nv12_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv444p16le_nv12_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv444p16le_nv12_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<387>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p16le_nv12_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p16le_nv12_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB263_34;
	bra.uni 	$L__BB263_1;
$L__BB263_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p16le_nv12_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p16le_nv12_uv_param_11];
	cvt.rn.f32.s32 	%f131, %r6;
	cvt.rn.f32.s32 	%f132, %r3;
	div.rn.f32 	%f133, %f131, %f132;
	cvt.rn.f32.s32 	%f134, %r7;
	cvt.rn.f32.s32 	%f135, %r4;
	div.rn.f32 	%f136, %f134, %f135;
	cvt.rn.f32.s32 	%f137, %r1;
	add.f32 	%f138, %f137, 0f3F000000;
	fma.rn.f32 	%f139, %f133, %f138, 0fBF000000;
	cvt.rn.f32.s32 	%f140, %r2;
	add.f32 	%f141, %f140, 0f3F000000;
	cvt.rmi.f32.f32 	%f255, %f139;
	sub.f32 	%f143, %f139, %f255;
	add.f32 	%f144, %f143, 0f3F800000;
	mul.f32 	%f4, %f144, 0f40490FDB;
	mul.f32 	%f5, %f143, 0f40490FDB;
	add.f32 	%f145, %f143, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f147, %f4, %f4;
	mul.f32 	%f9, %f147, 0f3F000000;
	mov.f32 	%f386, 0f3F800000;
	mov.f32 	%f371, %f386;
	@%p4 bra 	$L__BB263_3;
	sin.approx.f32 	%f148, %f4;
	sin.approx.f32 	%f149, %f8;
	mul.f32 	%f150, %f148, %f149;
	div.rn.f32 	%f371, %f150, %f9;
$L__BB263_3:
	fma.rn.f32 	%f142, %f136, %f141, 0fBF000000;
	add.f32 	%f146, %f143, 0fC0000000;
	mul.f32 	%f6, %f145, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f152, %f5, %f5;
	mul.f32 	%f13, %f152, 0f3F000000;
	mov.f32 	%f372, %f386;
	@%p5 bra 	$L__BB263_5;
	sin.approx.f32 	%f153, %f5;
	sin.approx.f32 	%f154, %f12;
	mul.f32 	%f155, %f153, %f154;
	div.rn.f32 	%f372, %f155, %f13;
$L__BB263_5:
	cvt.rmi.f32.f32 	%f262, %f142;
	mul.f32 	%f7, %f146, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f157, %f6, %f6;
	mul.f32 	%f17, %f157, 0f3F000000;
	mov.f32 	%f373, %f386;
	@%p6 bra 	$L__BB263_7;
	sin.approx.f32 	%f158, %f6;
	sin.approx.f32 	%f159, %f16;
	mul.f32 	%f160, %f158, %f159;
	div.rn.f32 	%f373, %f160, %f17;
$L__BB263_7:
	sub.f32 	%f3, %f142, %f262;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f162, %f7, %f7;
	mul.f32 	%f21, %f162, 0f3F000000;
	mov.f32 	%f374, %f386;
	@%p7 bra 	$L__BB263_9;
	sin.approx.f32 	%f163, %f7;
	sin.approx.f32 	%f164, %f20;
	mul.f32 	%f165, %f163, %f164;
	div.rn.f32 	%f374, %f165, %f21;
$L__BB263_9:
	add.f32 	%f167, %f3, 0f3F800000;
	mul.f32 	%f24, %f167, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f168, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f170, %f24, %f24;
	mul.f32 	%f29, %f170, 0f3F000000;
	mov.f32 	%f375, %f386;
	@%p8 bra 	$L__BB263_11;
	sin.approx.f32 	%f171, %f24;
	sin.approx.f32 	%f172, %f28;
	mul.f32 	%f173, %f171, %f172;
	div.rn.f32 	%f375, %f173, %f29;
$L__BB263_11:
	add.f32 	%f169, %f3, 0fC0000000;
	mul.f32 	%f26, %f168, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f175, %f25, %f25;
	mul.f32 	%f33, %f175, 0f3F000000;
	mov.f32 	%f376, %f386;
	@%p9 bra 	$L__BB263_13;
	sin.approx.f32 	%f176, %f25;
	sin.approx.f32 	%f177, %f32;
	mul.f32 	%f178, %f176, %f177;
	div.rn.f32 	%f376, %f178, %f33;
$L__BB263_13:
	mul.f32 	%f27, %f169, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f180, %f26, %f26;
	mul.f32 	%f37, %f180, 0f3F000000;
	mov.f32 	%f377, %f386;
	@%p10 bra 	$L__BB263_15;
	sin.approx.f32 	%f181, %f26;
	sin.approx.f32 	%f182, %f36;
	mul.f32 	%f183, %f181, %f182;
	div.rn.f32 	%f377, %f183, %f37;
$L__BB263_15:
	ld.param.u64 	%rd5, [Subsample_Lanczos_yuv444p16le_nv12_uv_param_1];
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f185, %f27, %f27;
	mul.f32 	%f41, %f185, 0f3F000000;
	mov.f32 	%f378, %f386;
	@%p11 bra 	$L__BB263_17;
	sin.approx.f32 	%f186, %f27;
	sin.approx.f32 	%f187, %f40;
	mul.f32 	%f188, %f186, %f187;
	div.rn.f32 	%f378, %f188, %f41;
$L__BB263_17:
	add.f32 	%f253, %f255, 0fBF800000;
	add.f32 	%f254, %f262, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f253, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f255, %f254}];
	// end inline asm
	add.f32 	%f257, %f255, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f257, %f254}];
	// end inline asm
	add.f32 	%f259, %f255, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f259, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f253, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f255, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f257, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f259, %f262}];
	// end inline asm
	add.f32 	%f270, %f262, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f253, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f255, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f257, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f259, %f270}];
	// end inline asm
	add.f32 	%f278, %f262, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f253, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f255, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f257, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f259, %f278}];
	// end inline asm
	mov.f32 	%f379, %f386;
	@%p4 bra 	$L__BB263_19;
	sin.approx.f32 	%f222, %f4;
	sin.approx.f32 	%f223, %f8;
	mul.f32 	%f224, %f222, %f223;
	div.rn.f32 	%f379, %f224, %f9;
$L__BB263_19:
	mov.f32 	%f380, %f386;
	@%p5 bra 	$L__BB263_21;
	sin.approx.f32 	%f226, %f5;
	sin.approx.f32 	%f227, %f12;
	mul.f32 	%f228, %f226, %f227;
	div.rn.f32 	%f380, %f228, %f13;
$L__BB263_21:
	mov.f32 	%f381, %f386;
	@%p6 bra 	$L__BB263_23;
	sin.approx.f32 	%f230, %f6;
	sin.approx.f32 	%f231, %f16;
	mul.f32 	%f232, %f230, %f231;
	div.rn.f32 	%f381, %f232, %f17;
$L__BB263_23:
	mov.f32 	%f382, %f386;
	@%p7 bra 	$L__BB263_25;
	sin.approx.f32 	%f234, %f7;
	sin.approx.f32 	%f235, %f20;
	mul.f32 	%f236, %f234, %f235;
	div.rn.f32 	%f382, %f236, %f21;
$L__BB263_25:
	mov.f32 	%f383, %f386;
	@%p8 bra 	$L__BB263_27;
	sin.approx.f32 	%f238, %f24;
	sin.approx.f32 	%f239, %f28;
	mul.f32 	%f240, %f238, %f239;
	div.rn.f32 	%f383, %f240, %f29;
$L__BB263_27:
	mov.f32 	%f384, %f386;
	@%p9 bra 	$L__BB263_29;
	sin.approx.f32 	%f242, %f25;
	sin.approx.f32 	%f243, %f32;
	mul.f32 	%f244, %f242, %f243;
	div.rn.f32 	%f384, %f244, %f33;
$L__BB263_29:
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p16le_nv12_uv_param_5];
	mov.f32 	%f385, %f386;
	@%p10 bra 	$L__BB263_31;
	sin.approx.f32 	%f246, %f26;
	sin.approx.f32 	%f247, %f36;
	mul.f32 	%f248, %f246, %f247;
	div.rn.f32 	%f385, %f248, %f37;
$L__BB263_31:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p16le_nv12_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Lanczos_yuv444p16le_nv12_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd4;
	mov.b32 	%f46, %r17;
	mov.b32 	%f50, %r21;
	mov.b32 	%f55, %r25;
	mov.b32 	%f60, %r29;
	mov.b32 	%f64, %r33;
	mov.b32 	%f68, %r37;
	mov.b32 	%f72, %r41;
	mov.b32 	%f76, %r45;
	mov.b32 	%f81, %r49;
	mov.b32 	%f85, %r53;
	mov.b32 	%f89, %r57;
	mov.b32 	%f93, %r61;
	mov.b32 	%f98, %r65;
	mov.b32 	%f102, %r69;
	mov.b32 	%f106, %r73;
	mov.b32 	%f110, %r77;
	@%p11 bra 	$L__BB263_33;
	sin.approx.f32 	%f250, %f27;
	sin.approx.f32 	%f251, %f40;
	mul.f32 	%f252, %f250, %f251;
	div.rn.f32 	%f386, %f252, %f41;
$L__BB263_33:
	add.f32 	%f285, %f375, %f376;
	add.f32 	%f286, %f285, %f377;
	add.f32 	%f287, %f286, %f378;
	div.rn.f32 	%f288, %f375, %f287;
	add.f32 	%f289, %f371, %f372;
	add.f32 	%f290, %f289, %f373;
	add.f32 	%f291, %f290, %f374;
	div.rn.f32 	%f292, %f371, %f291;
	div.rn.f32 	%f293, %f372, %f291;
	mul.f32 	%f294, %f293, %f50;
	fma.rn.f32 	%f295, %f292, %f46, %f294;
	div.rn.f32 	%f296, %f373, %f291;
	fma.rn.f32 	%f297, %f296, %f55, %f295;
	div.rn.f32 	%f298, %f374, %f291;
	fma.rn.f32 	%f299, %f298, %f60, %f297;
	div.rn.f32 	%f300, %f376, %f287;
	mul.f32 	%f301, %f293, %f68;
	fma.rn.f32 	%f302, %f292, %f64, %f301;
	fma.rn.f32 	%f303, %f296, %f72, %f302;
	fma.rn.f32 	%f304, %f298, %f76, %f303;
	mul.f32 	%f305, %f300, %f304;
	fma.rn.f32 	%f306, %f288, %f299, %f305;
	div.rn.f32 	%f307, %f377, %f287;
	mul.f32 	%f308, %f293, %f85;
	fma.rn.f32 	%f309, %f292, %f81, %f308;
	fma.rn.f32 	%f310, %f296, %f89, %f309;
	fma.rn.f32 	%f311, %f298, %f93, %f310;
	fma.rn.f32 	%f312, %f307, %f311, %f306;
	div.rn.f32 	%f313, %f378, %f287;
	mul.f32 	%f314, %f293, %f102;
	fma.rn.f32 	%f315, %f292, %f98, %f314;
	fma.rn.f32 	%f316, %f296, %f106, %f315;
	fma.rn.f32 	%f317, %f298, %f110, %f316;
	fma.rn.f32 	%f318, %f313, %f317, %f312;
	mul.f32 	%f319, %f318, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f319;
	shr.u16 	%rs2, %rs1, 8;
	add.f32 	%f320, %f379, %f380;
	add.f32 	%f321, %f320, %f381;
	add.f32 	%f322, %f321, %f382;
	div.rn.f32 	%f323, %f382, %f322;
	div.rn.f32 	%f324, %f381, %f322;
	div.rn.f32 	%f325, %f380, %f322;
	div.rn.f32 	%f326, %f379, %f322;
	add.f32 	%f327, %f383, %f384;
	add.f32 	%f328, %f327, %f385;
	add.f32 	%f329, %f328, %f386;
	div.rn.f32 	%f330, %f383, %f329;
	div.rn.f32 	%f331, %f384, %f329;
	div.rn.f32 	%f332, %f385, %f329;
	div.rn.f32 	%f333, %f386, %f329;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f253, %f254}];
	// end inline asm
	mov.b32 	%f334, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f255, %f254}];
	// end inline asm
	mov.b32 	%f335, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f257, %f254}];
	// end inline asm
	mov.b32 	%f336, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f259, %f254}];
	// end inline asm
	mov.b32 	%f337, %r93;
	mul.f32 	%f338, %f325, %f335;
	fma.rn.f32 	%f339, %f326, %f334, %f338;
	fma.rn.f32 	%f340, %f324, %f336, %f339;
	fma.rn.f32 	%f341, %f323, %f337, %f340;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f253, %f262}];
	// end inline asm
	mov.b32 	%f342, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f255, %f262}];
	// end inline asm
	mov.b32 	%f343, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f257, %f262}];
	// end inline asm
	mov.b32 	%f344, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f259, %f262}];
	// end inline asm
	mov.b32 	%f345, %r109;
	mul.f32 	%f346, %f325, %f343;
	fma.rn.f32 	%f347, %f326, %f342, %f346;
	fma.rn.f32 	%f348, %f324, %f344, %f347;
	fma.rn.f32 	%f349, %f323, %f345, %f348;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f253, %f270}];
	// end inline asm
	mov.b32 	%f350, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f255, %f270}];
	// end inline asm
	mov.b32 	%f351, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f257, %f270}];
	// end inline asm
	mov.b32 	%f352, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f259, %f270}];
	// end inline asm
	mov.b32 	%f353, %r125;
	mul.f32 	%f354, %f325, %f351;
	fma.rn.f32 	%f355, %f326, %f350, %f354;
	fma.rn.f32 	%f356, %f324, %f352, %f355;
	fma.rn.f32 	%f357, %f323, %f353, %f356;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f253, %f278}];
	// end inline asm
	mov.b32 	%f358, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f255, %f278}];
	// end inline asm
	mov.b32 	%f359, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f257, %f278}];
	// end inline asm
	mov.b32 	%f360, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f259, %f278}];
	// end inline asm
	mov.b32 	%f361, %r141;
	mul.f32 	%f362, %f325, %f359;
	fma.rn.f32 	%f363, %f326, %f358, %f362;
	fma.rn.f32 	%f364, %f324, %f360, %f363;
	fma.rn.f32 	%f365, %f323, %f361, %f364;
	mul.f32 	%f366, %f331, %f349;
	fma.rn.f32 	%f367, %f330, %f341, %f366;
	fma.rn.f32 	%f368, %f332, %f357, %f367;
	fma.rn.f32 	%f369, %f333, %f365, %f368;
	mul.f32 	%f370, %f369, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs3, %f370;
	shr.u16 	%rs4, %rs3, 8;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 1;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 1;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u8 	[%rd44], {%rs2, %rs4};
$L__BB263_34:
	ret;

}
	// .globl	Subsample_Lanczos_yuv420p_yuv444p
.visible .entry Subsample_Lanczos_yuv420p_yuv444p(
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_param_0,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_param_1,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_param_2,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_param_3,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_param_4,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_param_5,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_param_6,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_param_7,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p_param_8,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p_param_9,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p_param_10,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p_param_11,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p_param_12,
	.param .f32 Subsample_Lanczos_yuv420p_yuv444p_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv420p_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv420p_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB264_18;
	bra.uni 	$L__BB264_1;
$L__BB264_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv420p_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv420p_yuv444p_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB264_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB264_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB264_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB264_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB264_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB264_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB264_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB264_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB264_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB264_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB264_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB264_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv420p_yuv444p_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB264_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB264_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv420p_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv420p_yuv444p_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB264_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB264_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB264_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv420p_yuv444p_uv
.visible .entry Subsample_Lanczos_yuv420p_yuv444p_uv(
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv420p_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<339>;
	.reg .b64 	%rd<44>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv420p_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv420p_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB265_34;
	bra.uni 	$L__BB265_1;
$L__BB265_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv420p_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv420p_yuv444p_uv_param_11];
	cvt.rn.f32.s32 	%f67, %r6;
	cvt.rn.f32.s32 	%f68, %r3;
	div.rn.f32 	%f69, %f67, %f68;
	cvt.rn.f32.s32 	%f70, %r7;
	cvt.rn.f32.s32 	%f71, %r4;
	div.rn.f32 	%f72, %f70, %f71;
	cvt.rn.f32.s32 	%f73, %r1;
	add.f32 	%f74, %f73, 0f3F000000;
	fma.rn.f32 	%f75, %f69, %f74, 0fBF000000;
	cvt.rn.f32.s32 	%f76, %r2;
	add.f32 	%f77, %f76, 0f3F000000;
	cvt.rmi.f32.f32 	%f242, %f75;
	sub.f32 	%f79, %f75, %f242;
	add.f32 	%f80, %f79, 0f3F800000;
	mul.f32 	%f4, %f80, 0f40490FDB;
	mul.f32 	%f5, %f79, 0f40490FDB;
	add.f32 	%f81, %f79, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f83, %f4, %f4;
	mul.f32 	%f9, %f83, 0f3F000000;
	mov.f32 	%f338, 0f3F800000;
	mov.f32 	%f323, %f338;
	@%p4 bra 	$L__BB265_3;
	sin.approx.f32 	%f84, %f4;
	sin.approx.f32 	%f85, %f8;
	mul.f32 	%f86, %f84, %f85;
	div.rn.f32 	%f323, %f86, %f9;
$L__BB265_3:
	fma.rn.f32 	%f78, %f72, %f77, 0fBF000000;
	add.f32 	%f82, %f79, 0fC0000000;
	mul.f32 	%f6, %f81, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f88, %f5, %f5;
	mul.f32 	%f13, %f88, 0f3F000000;
	mov.f32 	%f324, %f338;
	@%p5 bra 	$L__BB265_5;
	sin.approx.f32 	%f89, %f5;
	sin.approx.f32 	%f90, %f12;
	mul.f32 	%f91, %f89, %f90;
	div.rn.f32 	%f324, %f91, %f13;
$L__BB265_5:
	cvt.rmi.f32.f32 	%f249, %f78;
	mul.f32 	%f7, %f82, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f93, %f6, %f6;
	mul.f32 	%f17, %f93, 0f3F000000;
	mov.f32 	%f325, %f338;
	@%p6 bra 	$L__BB265_7;
	sin.approx.f32 	%f94, %f6;
	sin.approx.f32 	%f95, %f16;
	mul.f32 	%f96, %f94, %f95;
	div.rn.f32 	%f325, %f96, %f17;
$L__BB265_7:
	sub.f32 	%f3, %f78, %f249;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f98, %f7, %f7;
	mul.f32 	%f21, %f98, 0f3F000000;
	mov.f32 	%f326, %f338;
	@%p7 bra 	$L__BB265_9;
	sin.approx.f32 	%f99, %f7;
	sin.approx.f32 	%f100, %f20;
	mul.f32 	%f101, %f99, %f100;
	div.rn.f32 	%f326, %f101, %f21;
$L__BB265_9:
	add.f32 	%f103, %f3, 0f3F800000;
	mul.f32 	%f24, %f103, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f104, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f106, %f24, %f24;
	mul.f32 	%f29, %f106, 0f3F000000;
	mov.f32 	%f327, %f338;
	@%p8 bra 	$L__BB265_11;
	sin.approx.f32 	%f107, %f24;
	sin.approx.f32 	%f108, %f28;
	mul.f32 	%f109, %f107, %f108;
	div.rn.f32 	%f327, %f109, %f29;
$L__BB265_11:
	add.f32 	%f105, %f3, 0fC0000000;
	mul.f32 	%f26, %f104, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f111, %f25, %f25;
	mul.f32 	%f33, %f111, 0f3F000000;
	mov.f32 	%f328, %f338;
	@%p9 bra 	$L__BB265_13;
	sin.approx.f32 	%f112, %f25;
	sin.approx.f32 	%f113, %f32;
	mul.f32 	%f114, %f112, %f113;
	div.rn.f32 	%f328, %f114, %f33;
$L__BB265_13:
	ld.param.u64 	%rd7, [Subsample_Lanczos_yuv420p_yuv444p_uv_param_5];
	mul.f32 	%f27, %f105, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f116, %f26, %f26;
	mul.f32 	%f37, %f116, 0f3F000000;
	mov.f32 	%f329, %f338;
	@%p10 bra 	$L__BB265_15;
	sin.approx.f32 	%f117, %f26;
	sin.approx.f32 	%f118, %f36;
	mul.f32 	%f119, %f117, %f118;
	div.rn.f32 	%f329, %f119, %f37;
$L__BB265_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv420p_yuv444p_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Lanczos_yuv420p_yuv444p_uv_param_1];
	cvta.to.global.u64 	%rd2, %rd7;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f121, %f27, %f27;
	mul.f32 	%f41, %f121, 0f3F000000;
	mov.f32 	%f330, %f338;
	@%p11 bra 	$L__BB265_17;
	sin.approx.f32 	%f122, %f27;
	sin.approx.f32 	%f123, %f40;
	mul.f32 	%f124, %f122, %f123;
	div.rn.f32 	%f330, %f124, %f41;
$L__BB265_17:
	add.f32 	%f158, %f323, %f324;
	add.f32 	%f159, %f158, %f325;
	add.f32 	%f160, %f159, %f326;
	div.rn.f32 	%f161, %f326, %f160;
	div.rn.f32 	%f162, %f325, %f160;
	div.rn.f32 	%f163, %f324, %f160;
	div.rn.f32 	%f164, %f323, %f160;
	add.f32 	%f165, %f327, %f328;
	add.f32 	%f166, %f165, %f329;
	add.f32 	%f167, %f166, %f330;
	div.rn.f32 	%f168, %f327, %f167;
	div.rn.f32 	%f169, %f328, %f167;
	div.rn.f32 	%f170, %f329, %f167;
	div.rn.f32 	%f171, %f330, %f167;
	add.f32 	%f240, %f242, 0fBF800000;
	add.f32 	%f241, %f249, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd8, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f172, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f173, %r21;
	add.f32 	%f244, %f242, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd8, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f174, %r25;
	add.f32 	%f246, %f242, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd8, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f175, %r29;
	mul.f32 	%f176, %f163, %f173;
	fma.rn.f32 	%f177, %f164, %f172, %f176;
	fma.rn.f32 	%f178, %f162, %f174, %f177;
	fma.rn.f32 	%f179, %f161, %f175, %f178;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd8, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f180, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd8, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f181, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd8, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f182, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd8, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f183, %r45;
	mul.f32 	%f184, %f163, %f181;
	fma.rn.f32 	%f185, %f164, %f180, %f184;
	fma.rn.f32 	%f186, %f162, %f182, %f185;
	fma.rn.f32 	%f187, %f161, %f183, %f186;
	add.f32 	%f257, %f249, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd8, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f188, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd8, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f189, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd8, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f190, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd8, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f191, %r61;
	mul.f32 	%f192, %f163, %f189;
	fma.rn.f32 	%f193, %f164, %f188, %f192;
	fma.rn.f32 	%f194, %f162, %f190, %f193;
	fma.rn.f32 	%f195, %f161, %f191, %f194;
	add.f32 	%f265, %f249, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd8, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f196, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd8, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f197, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd8, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f198, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd8, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f199, %r77;
	mul.f32 	%f200, %f163, %f197;
	fma.rn.f32 	%f201, %f164, %f196, %f200;
	fma.rn.f32 	%f202, %f162, %f198, %f201;
	fma.rn.f32 	%f203, %f161, %f199, %f202;
	mul.f32 	%f204, %f169, %f187;
	fma.rn.f32 	%f205, %f168, %f179, %f204;
	fma.rn.f32 	%f206, %f170, %f195, %f205;
	fma.rn.f32 	%f207, %f171, %f203, %f206;
	mul.f32 	%f208, %f207, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f208;
	mul.wide.s32 	%rd24, %r2, %r5;
	cvt.s64.s32 	%rd25, %r1;
	add.s64 	%rd3, %rd24, %rd25;
	add.s64 	%rd26, %rd2, %rd3;
	st.global.u8 	[%rd26], %rs1;
	mov.f32 	%f331, %f338;
	@%p4 bra 	$L__BB265_19;
	sin.approx.f32 	%f209, %f4;
	sin.approx.f32 	%f210, %f8;
	mul.f32 	%f211, %f209, %f210;
	div.rn.f32 	%f331, %f211, %f9;
$L__BB265_19:
	mov.f32 	%f332, %f338;
	@%p5 bra 	$L__BB265_21;
	sin.approx.f32 	%f213, %f5;
	sin.approx.f32 	%f214, %f12;
	mul.f32 	%f215, %f213, %f214;
	div.rn.f32 	%f332, %f215, %f13;
$L__BB265_21:
	mov.f32 	%f333, %f338;
	@%p6 bra 	$L__BB265_23;
	sin.approx.f32 	%f217, %f6;
	sin.approx.f32 	%f218, %f16;
	mul.f32 	%f219, %f217, %f218;
	div.rn.f32 	%f333, %f219, %f17;
$L__BB265_23:
	mov.f32 	%f334, %f338;
	@%p7 bra 	$L__BB265_25;
	sin.approx.f32 	%f221, %f7;
	sin.approx.f32 	%f222, %f20;
	mul.f32 	%f223, %f221, %f222;
	div.rn.f32 	%f334, %f223, %f21;
$L__BB265_25:
	mov.f32 	%f335, %f338;
	@%p8 bra 	$L__BB265_27;
	sin.approx.f32 	%f225, %f24;
	sin.approx.f32 	%f226, %f28;
	mul.f32 	%f227, %f225, %f226;
	div.rn.f32 	%f335, %f227, %f29;
$L__BB265_27:
	mov.f32 	%f336, %f338;
	@%p9 bra 	$L__BB265_29;
	sin.approx.f32 	%f229, %f25;
	sin.approx.f32 	%f230, %f32;
	mul.f32 	%f231, %f229, %f230;
	div.rn.f32 	%f336, %f231, %f33;
$L__BB265_29:
	ld.param.u64 	%rd6, [Subsample_Lanczos_yuv420p_yuv444p_uv_param_6];
	mov.f32 	%f337, %f338;
	@%p10 bra 	$L__BB265_31;
	sin.approx.f32 	%f233, %f26;
	sin.approx.f32 	%f234, %f36;
	mul.f32 	%f235, %f233, %f234;
	div.rn.f32 	%f337, %f235, %f37;
$L__BB265_31:
	ld.param.u64 	%rd27, [Subsample_Lanczos_yuv420p_yuv444p_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd6;
	@%p11 bra 	$L__BB265_33;
	sin.approx.f32 	%f237, %f27;
	sin.approx.f32 	%f238, %f40;
	mul.f32 	%f239, %f237, %f238;
	div.rn.f32 	%f338, %f239, %f41;
$L__BB265_33:
	add.f32 	%f272, %f331, %f332;
	add.f32 	%f273, %f272, %f333;
	add.f32 	%f274, %f273, %f334;
	div.rn.f32 	%f275, %f334, %f274;
	div.rn.f32 	%f276, %f333, %f274;
	div.rn.f32 	%f277, %f332, %f274;
	div.rn.f32 	%f278, %f331, %f274;
	add.f32 	%f279, %f335, %f336;
	add.f32 	%f280, %f279, %f337;
	add.f32 	%f281, %f280, %f338;
	div.rn.f32 	%f282, %f335, %f281;
	div.rn.f32 	%f283, %f336, %f281;
	div.rn.f32 	%f284, %f337, %f281;
	div.rn.f32 	%f285, %f338, %f281;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd27, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f286, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd27, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f287, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd27, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f288, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd27, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f289, %r93;
	mul.f32 	%f290, %f277, %f287;
	fma.rn.f32 	%f291, %f278, %f286, %f290;
	fma.rn.f32 	%f292, %f276, %f288, %f291;
	fma.rn.f32 	%f293, %f275, %f289, %f292;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd27, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f294, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd27, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f295, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd27, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f296, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd27, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f297, %r109;
	mul.f32 	%f298, %f277, %f295;
	fma.rn.f32 	%f299, %f278, %f294, %f298;
	fma.rn.f32 	%f300, %f276, %f296, %f299;
	fma.rn.f32 	%f301, %f275, %f297, %f300;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd27, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f302, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd27, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f303, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd27, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f304, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd27, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f305, %r125;
	mul.f32 	%f306, %f277, %f303;
	fma.rn.f32 	%f307, %f278, %f302, %f306;
	fma.rn.f32 	%f308, %f276, %f304, %f307;
	fma.rn.f32 	%f309, %f275, %f305, %f308;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd27, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f310, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd27, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f311, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd27, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f312, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd27, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f313, %r141;
	mul.f32 	%f314, %f277, %f311;
	fma.rn.f32 	%f315, %f278, %f310, %f314;
	fma.rn.f32 	%f316, %f276, %f312, %f315;
	fma.rn.f32 	%f317, %f275, %f313, %f316;
	mul.f32 	%f318, %f283, %f301;
	fma.rn.f32 	%f319, %f282, %f293, %f318;
	fma.rn.f32 	%f320, %f284, %f309, %f319;
	fma.rn.f32 	%f321, %f285, %f317, %f320;
	mul.f32 	%f322, %f321, 0f437F0000;
	cvt.rzi.u16.f32 	%rs2, %f322;
	add.s64 	%rd43, %rd1, %rd3;
	st.global.u8 	[%rd43], %rs2;
$L__BB265_34:
	ret;

}
	// .globl	Subsample_Lanczos_nv12_yuv444p
.visible .entry Subsample_Lanczos_nv12_yuv444p(
	.param .u64 Subsample_Lanczos_nv12_yuv444p_param_0,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_param_1,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_param_2,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_param_3,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_param_4,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_param_5,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_param_6,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_param_7,
	.param .u32 Subsample_Lanczos_nv12_yuv444p_param_8,
	.param .u32 Subsample_Lanczos_nv12_yuv444p_param_9,
	.param .u32 Subsample_Lanczos_nv12_yuv444p_param_10,
	.param .u32 Subsample_Lanczos_nv12_yuv444p_param_11,
	.param .u32 Subsample_Lanczos_nv12_yuv444p_param_12,
	.param .f32 Subsample_Lanczos_nv12_yuv444p_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_nv12_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_nv12_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB266_18;
	bra.uni 	$L__BB266_1;
$L__BB266_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_nv12_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_nv12_yuv444p_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB266_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB266_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB266_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB266_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB266_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB266_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB266_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB266_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB266_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB266_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB266_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB266_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_nv12_yuv444p_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB266_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB266_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_nv12_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_nv12_yuv444p_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB266_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB266_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB266_18:
	ret;

}
	// .globl	Subsample_Lanczos_nv12_yuv444p_uv
.visible .entry Subsample_Lanczos_nv12_yuv444p_uv(
	.param .u64 Subsample_Lanczos_nv12_yuv444p_uv_param_0,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_uv_param_1,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_uv_param_2,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_uv_param_3,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_uv_param_4,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_uv_param_5,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_uv_param_6,
	.param .u64 Subsample_Lanczos_nv12_yuv444p_uv_param_7,
	.param .u32 Subsample_Lanczos_nv12_yuv444p_uv_param_8,
	.param .u32 Subsample_Lanczos_nv12_yuv444p_uv_param_9,
	.param .u32 Subsample_Lanczos_nv12_yuv444p_uv_param_10,
	.param .u32 Subsample_Lanczos_nv12_yuv444p_uv_param_11,
	.param .u32 Subsample_Lanczos_nv12_yuv444p_uv_param_12,
	.param .f32 Subsample_Lanczos_nv12_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<27>;

	ld.param.u32 	%r4, [Subsample_Lanczos_nv12_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_nv12_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB267_18;
	bra.uni 	$L__BB267_1;
$L__BB267_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_nv12_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_nv12_yuv444p_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB267_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB267_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB267_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB267_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB267_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB267_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB267_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB267_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB267_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB267_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB267_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB267_13:
	ld.param.u64 	%rd4, [Subsample_Lanczos_nv12_yuv444p_uv_param_6];
	ld.param.u64 	%rd5, [Subsample_Lanczos_nv12_yuv444p_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB267_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB267_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_nv12_yuv444p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Lanczos_nv12_yuv444p_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd4;
	cvta.to.global.u64 	%rd2, %rd5;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB267_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB267_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f437F0000;
	mul.f32 	%f222, %f220, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	mul.wide.s32 	%rd22, %r2, %r5;
	cvt.s64.s32 	%rd23, %r1;
	add.s64 	%rd24, %rd22, %rd23;
	add.s64 	%rd25, %rd2, %rd24;
	st.global.u8 	[%rd25], %rs1;
	add.s64 	%rd26, %rd1, %rd24;
	st.global.u8 	[%rd26], %rs2;
$L__BB267_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p_yuv444p
.visible .entry Subsample_Lanczos_yuv444p_yuv444p(
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_param_0,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_param_1,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_param_2,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_param_3,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_param_4,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_param_5,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_param_6,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_param_7,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p_param_8,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p_param_9,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p_param_10,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p_param_11,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p_param_12,
	.param .f32 Subsample_Lanczos_yuv444p_yuv444p_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB268_18;
	bra.uni 	$L__BB268_1;
$L__BB268_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p_yuv444p_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB268_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB268_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB268_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB268_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB268_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB268_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB268_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB268_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB268_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB268_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB268_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB268_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv444p_yuv444p_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB268_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB268_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p_yuv444p_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB268_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB268_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs1;
$L__BB268_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p_yuv444p_uv
.visible .entry Subsample_Lanczos_yuv444p_yuv444p_uv(
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv444p_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<339>;
	.reg .b64 	%rd<44>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB269_34;
	bra.uni 	$L__BB269_1;
$L__BB269_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p_yuv444p_uv_param_11];
	cvt.rn.f32.s32 	%f67, %r6;
	cvt.rn.f32.s32 	%f68, %r3;
	div.rn.f32 	%f69, %f67, %f68;
	cvt.rn.f32.s32 	%f70, %r7;
	cvt.rn.f32.s32 	%f71, %r4;
	div.rn.f32 	%f72, %f70, %f71;
	cvt.rn.f32.s32 	%f73, %r1;
	add.f32 	%f74, %f73, 0f3F000000;
	fma.rn.f32 	%f75, %f69, %f74, 0fBF000000;
	cvt.rn.f32.s32 	%f76, %r2;
	add.f32 	%f77, %f76, 0f3F000000;
	cvt.rmi.f32.f32 	%f242, %f75;
	sub.f32 	%f79, %f75, %f242;
	add.f32 	%f80, %f79, 0f3F800000;
	mul.f32 	%f4, %f80, 0f40490FDB;
	mul.f32 	%f5, %f79, 0f40490FDB;
	add.f32 	%f81, %f79, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f83, %f4, %f4;
	mul.f32 	%f9, %f83, 0f3F000000;
	mov.f32 	%f338, 0f3F800000;
	mov.f32 	%f323, %f338;
	@%p4 bra 	$L__BB269_3;
	sin.approx.f32 	%f84, %f4;
	sin.approx.f32 	%f85, %f8;
	mul.f32 	%f86, %f84, %f85;
	div.rn.f32 	%f323, %f86, %f9;
$L__BB269_3:
	fma.rn.f32 	%f78, %f72, %f77, 0fBF000000;
	add.f32 	%f82, %f79, 0fC0000000;
	mul.f32 	%f6, %f81, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f88, %f5, %f5;
	mul.f32 	%f13, %f88, 0f3F000000;
	mov.f32 	%f324, %f338;
	@%p5 bra 	$L__BB269_5;
	sin.approx.f32 	%f89, %f5;
	sin.approx.f32 	%f90, %f12;
	mul.f32 	%f91, %f89, %f90;
	div.rn.f32 	%f324, %f91, %f13;
$L__BB269_5:
	cvt.rmi.f32.f32 	%f249, %f78;
	mul.f32 	%f7, %f82, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f93, %f6, %f6;
	mul.f32 	%f17, %f93, 0f3F000000;
	mov.f32 	%f325, %f338;
	@%p6 bra 	$L__BB269_7;
	sin.approx.f32 	%f94, %f6;
	sin.approx.f32 	%f95, %f16;
	mul.f32 	%f96, %f94, %f95;
	div.rn.f32 	%f325, %f96, %f17;
$L__BB269_7:
	sub.f32 	%f3, %f78, %f249;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f98, %f7, %f7;
	mul.f32 	%f21, %f98, 0f3F000000;
	mov.f32 	%f326, %f338;
	@%p7 bra 	$L__BB269_9;
	sin.approx.f32 	%f99, %f7;
	sin.approx.f32 	%f100, %f20;
	mul.f32 	%f101, %f99, %f100;
	div.rn.f32 	%f326, %f101, %f21;
$L__BB269_9:
	add.f32 	%f103, %f3, 0f3F800000;
	mul.f32 	%f24, %f103, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f104, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f106, %f24, %f24;
	mul.f32 	%f29, %f106, 0f3F000000;
	mov.f32 	%f327, %f338;
	@%p8 bra 	$L__BB269_11;
	sin.approx.f32 	%f107, %f24;
	sin.approx.f32 	%f108, %f28;
	mul.f32 	%f109, %f107, %f108;
	div.rn.f32 	%f327, %f109, %f29;
$L__BB269_11:
	add.f32 	%f105, %f3, 0fC0000000;
	mul.f32 	%f26, %f104, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f111, %f25, %f25;
	mul.f32 	%f33, %f111, 0f3F000000;
	mov.f32 	%f328, %f338;
	@%p9 bra 	$L__BB269_13;
	sin.approx.f32 	%f112, %f25;
	sin.approx.f32 	%f113, %f32;
	mul.f32 	%f114, %f112, %f113;
	div.rn.f32 	%f328, %f114, %f33;
$L__BB269_13:
	ld.param.u64 	%rd7, [Subsample_Lanczos_yuv444p_yuv444p_uv_param_5];
	mul.f32 	%f27, %f105, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f116, %f26, %f26;
	mul.f32 	%f37, %f116, 0f3F000000;
	mov.f32 	%f329, %f338;
	@%p10 bra 	$L__BB269_15;
	sin.approx.f32 	%f117, %f26;
	sin.approx.f32 	%f118, %f36;
	mul.f32 	%f119, %f117, %f118;
	div.rn.f32 	%f329, %f119, %f37;
$L__BB269_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p_yuv444p_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Lanczos_yuv444p_yuv444p_uv_param_1];
	cvta.to.global.u64 	%rd2, %rd7;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f121, %f27, %f27;
	mul.f32 	%f41, %f121, 0f3F000000;
	mov.f32 	%f330, %f338;
	@%p11 bra 	$L__BB269_17;
	sin.approx.f32 	%f122, %f27;
	sin.approx.f32 	%f123, %f40;
	mul.f32 	%f124, %f122, %f123;
	div.rn.f32 	%f330, %f124, %f41;
$L__BB269_17:
	add.f32 	%f158, %f323, %f324;
	add.f32 	%f159, %f158, %f325;
	add.f32 	%f160, %f159, %f326;
	div.rn.f32 	%f161, %f326, %f160;
	div.rn.f32 	%f162, %f325, %f160;
	div.rn.f32 	%f163, %f324, %f160;
	div.rn.f32 	%f164, %f323, %f160;
	add.f32 	%f165, %f327, %f328;
	add.f32 	%f166, %f165, %f329;
	add.f32 	%f167, %f166, %f330;
	div.rn.f32 	%f168, %f327, %f167;
	div.rn.f32 	%f169, %f328, %f167;
	div.rn.f32 	%f170, %f329, %f167;
	div.rn.f32 	%f171, %f330, %f167;
	add.f32 	%f240, %f242, 0fBF800000;
	add.f32 	%f241, %f249, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd8, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f172, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f173, %r21;
	add.f32 	%f244, %f242, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd8, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f174, %r25;
	add.f32 	%f246, %f242, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd8, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f175, %r29;
	mul.f32 	%f176, %f163, %f173;
	fma.rn.f32 	%f177, %f164, %f172, %f176;
	fma.rn.f32 	%f178, %f162, %f174, %f177;
	fma.rn.f32 	%f179, %f161, %f175, %f178;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd8, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f180, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd8, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f181, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd8, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f182, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd8, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f183, %r45;
	mul.f32 	%f184, %f163, %f181;
	fma.rn.f32 	%f185, %f164, %f180, %f184;
	fma.rn.f32 	%f186, %f162, %f182, %f185;
	fma.rn.f32 	%f187, %f161, %f183, %f186;
	add.f32 	%f257, %f249, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd8, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f188, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd8, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f189, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd8, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f190, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd8, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f191, %r61;
	mul.f32 	%f192, %f163, %f189;
	fma.rn.f32 	%f193, %f164, %f188, %f192;
	fma.rn.f32 	%f194, %f162, %f190, %f193;
	fma.rn.f32 	%f195, %f161, %f191, %f194;
	add.f32 	%f265, %f249, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd8, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f196, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd8, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f197, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd8, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f198, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd8, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f199, %r77;
	mul.f32 	%f200, %f163, %f197;
	fma.rn.f32 	%f201, %f164, %f196, %f200;
	fma.rn.f32 	%f202, %f162, %f198, %f201;
	fma.rn.f32 	%f203, %f161, %f199, %f202;
	mul.f32 	%f204, %f169, %f187;
	fma.rn.f32 	%f205, %f168, %f179, %f204;
	fma.rn.f32 	%f206, %f170, %f195, %f205;
	fma.rn.f32 	%f207, %f171, %f203, %f206;
	mul.f32 	%f208, %f207, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f208;
	mul.wide.s32 	%rd24, %r2, %r5;
	cvt.s64.s32 	%rd25, %r1;
	add.s64 	%rd3, %rd24, %rd25;
	add.s64 	%rd26, %rd2, %rd3;
	st.global.u8 	[%rd26], %rs1;
	mov.f32 	%f331, %f338;
	@%p4 bra 	$L__BB269_19;
	sin.approx.f32 	%f209, %f4;
	sin.approx.f32 	%f210, %f8;
	mul.f32 	%f211, %f209, %f210;
	div.rn.f32 	%f331, %f211, %f9;
$L__BB269_19:
	mov.f32 	%f332, %f338;
	@%p5 bra 	$L__BB269_21;
	sin.approx.f32 	%f213, %f5;
	sin.approx.f32 	%f214, %f12;
	mul.f32 	%f215, %f213, %f214;
	div.rn.f32 	%f332, %f215, %f13;
$L__BB269_21:
	mov.f32 	%f333, %f338;
	@%p6 bra 	$L__BB269_23;
	sin.approx.f32 	%f217, %f6;
	sin.approx.f32 	%f218, %f16;
	mul.f32 	%f219, %f217, %f218;
	div.rn.f32 	%f333, %f219, %f17;
$L__BB269_23:
	mov.f32 	%f334, %f338;
	@%p7 bra 	$L__BB269_25;
	sin.approx.f32 	%f221, %f7;
	sin.approx.f32 	%f222, %f20;
	mul.f32 	%f223, %f221, %f222;
	div.rn.f32 	%f334, %f223, %f21;
$L__BB269_25:
	mov.f32 	%f335, %f338;
	@%p8 bra 	$L__BB269_27;
	sin.approx.f32 	%f225, %f24;
	sin.approx.f32 	%f226, %f28;
	mul.f32 	%f227, %f225, %f226;
	div.rn.f32 	%f335, %f227, %f29;
$L__BB269_27:
	mov.f32 	%f336, %f338;
	@%p9 bra 	$L__BB269_29;
	sin.approx.f32 	%f229, %f25;
	sin.approx.f32 	%f230, %f32;
	mul.f32 	%f231, %f229, %f230;
	div.rn.f32 	%f336, %f231, %f33;
$L__BB269_29:
	ld.param.u64 	%rd6, [Subsample_Lanczos_yuv444p_yuv444p_uv_param_6];
	mov.f32 	%f337, %f338;
	@%p10 bra 	$L__BB269_31;
	sin.approx.f32 	%f233, %f26;
	sin.approx.f32 	%f234, %f36;
	mul.f32 	%f235, %f233, %f234;
	div.rn.f32 	%f337, %f235, %f37;
$L__BB269_31:
	ld.param.u64 	%rd27, [Subsample_Lanczos_yuv444p_yuv444p_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd6;
	@%p11 bra 	$L__BB269_33;
	sin.approx.f32 	%f237, %f27;
	sin.approx.f32 	%f238, %f40;
	mul.f32 	%f239, %f237, %f238;
	div.rn.f32 	%f338, %f239, %f41;
$L__BB269_33:
	add.f32 	%f272, %f331, %f332;
	add.f32 	%f273, %f272, %f333;
	add.f32 	%f274, %f273, %f334;
	div.rn.f32 	%f275, %f334, %f274;
	div.rn.f32 	%f276, %f333, %f274;
	div.rn.f32 	%f277, %f332, %f274;
	div.rn.f32 	%f278, %f331, %f274;
	add.f32 	%f279, %f335, %f336;
	add.f32 	%f280, %f279, %f337;
	add.f32 	%f281, %f280, %f338;
	div.rn.f32 	%f282, %f335, %f281;
	div.rn.f32 	%f283, %f336, %f281;
	div.rn.f32 	%f284, %f337, %f281;
	div.rn.f32 	%f285, %f338, %f281;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd27, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f286, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd27, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f287, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd27, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f288, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd27, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f289, %r93;
	mul.f32 	%f290, %f277, %f287;
	fma.rn.f32 	%f291, %f278, %f286, %f290;
	fma.rn.f32 	%f292, %f276, %f288, %f291;
	fma.rn.f32 	%f293, %f275, %f289, %f292;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd27, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f294, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd27, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f295, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd27, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f296, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd27, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f297, %r109;
	mul.f32 	%f298, %f277, %f295;
	fma.rn.f32 	%f299, %f278, %f294, %f298;
	fma.rn.f32 	%f300, %f276, %f296, %f299;
	fma.rn.f32 	%f301, %f275, %f297, %f300;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd27, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f302, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd27, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f303, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd27, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f304, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd27, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f305, %r125;
	mul.f32 	%f306, %f277, %f303;
	fma.rn.f32 	%f307, %f278, %f302, %f306;
	fma.rn.f32 	%f308, %f276, %f304, %f307;
	fma.rn.f32 	%f309, %f275, %f305, %f308;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd27, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f310, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd27, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f311, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd27, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f312, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd27, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f313, %r141;
	mul.f32 	%f314, %f277, %f311;
	fma.rn.f32 	%f315, %f278, %f310, %f314;
	fma.rn.f32 	%f316, %f276, %f312, %f315;
	fma.rn.f32 	%f317, %f275, %f313, %f316;
	mul.f32 	%f318, %f283, %f301;
	fma.rn.f32 	%f319, %f282, %f293, %f318;
	fma.rn.f32 	%f320, %f284, %f309, %f319;
	fma.rn.f32 	%f321, %f285, %f317, %f320;
	mul.f32 	%f322, %f321, 0f437F0000;
	cvt.rzi.u16.f32 	%rs2, %f322;
	add.s64 	%rd43, %rd1, %rd3;
	st.global.u8 	[%rd43], %rs2;
$L__BB269_34:
	ret;

}
	// .globl	Subsample_Lanczos_p010le_yuv444p
.visible .entry Subsample_Lanczos_p010le_yuv444p(
	.param .u64 Subsample_Lanczos_p010le_yuv444p_param_0,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_param_1,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_param_2,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_param_3,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_param_4,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_param_5,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_param_6,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_param_7,
	.param .u32 Subsample_Lanczos_p010le_yuv444p_param_8,
	.param .u32 Subsample_Lanczos_p010le_yuv444p_param_9,
	.param .u32 Subsample_Lanczos_p010le_yuv444p_param_10,
	.param .u32 Subsample_Lanczos_p010le_yuv444p_param_11,
	.param .u32 Subsample_Lanczos_p010le_yuv444p_param_12,
	.param .f32 Subsample_Lanczos_p010le_yuv444p_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p010le_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p010le_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB270_18;
	bra.uni 	$L__BB270_1;
$L__BB270_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p010le_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p010le_yuv444p_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB270_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB270_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB270_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB270_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB270_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB270_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB270_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB270_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB270_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB270_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB270_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB270_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p010le_yuv444p_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB270_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB270_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p010le_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p010le_yuv444p_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB270_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB270_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB270_18:
	ret;

}
	// .globl	Subsample_Lanczos_p010le_yuv444p_uv
.visible .entry Subsample_Lanczos_p010le_yuv444p_uv(
	.param .u64 Subsample_Lanczos_p010le_yuv444p_uv_param_0,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_uv_param_1,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_uv_param_2,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_uv_param_3,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_uv_param_4,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_uv_param_5,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_uv_param_6,
	.param .u64 Subsample_Lanczos_p010le_yuv444p_uv_param_7,
	.param .u32 Subsample_Lanczos_p010le_yuv444p_uv_param_8,
	.param .u32 Subsample_Lanczos_p010le_yuv444p_uv_param_9,
	.param .u32 Subsample_Lanczos_p010le_yuv444p_uv_param_10,
	.param .u32 Subsample_Lanczos_p010le_yuv444p_uv_param_11,
	.param .u32 Subsample_Lanczos_p010le_yuv444p_uv_param_12,
	.param .f32 Subsample_Lanczos_p010le_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<27>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p010le_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p010le_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB271_18;
	bra.uni 	$L__BB271_1;
$L__BB271_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p010le_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p010le_yuv444p_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB271_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB271_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB271_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB271_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB271_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB271_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB271_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB271_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB271_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB271_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB271_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB271_13:
	ld.param.u64 	%rd4, [Subsample_Lanczos_p010le_yuv444p_uv_param_6];
	ld.param.u64 	%rd5, [Subsample_Lanczos_p010le_yuv444p_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB271_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB271_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p010le_yuv444p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Lanczos_p010le_yuv444p_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd4;
	cvta.to.global.u64 	%rd2, %rd5;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB271_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB271_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f477FFF00;
	mul.f32 	%f222, %f220, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	shr.u16 	%rs3, %rs1, 8;
	mul.wide.s32 	%rd22, %r2, %r5;
	cvt.s64.s32 	%rd23, %r1;
	add.s64 	%rd24, %rd22, %rd23;
	add.s64 	%rd25, %rd2, %rd24;
	st.global.u8 	[%rd25], %rs3;
	shr.u16 	%rs4, %rs2, 8;
	add.s64 	%rd26, %rd1, %rd24;
	st.global.u8 	[%rd26], %rs4;
$L__BB271_18:
	ret;

}
	// .globl	Subsample_Lanczos_p016le_yuv444p
.visible .entry Subsample_Lanczos_p016le_yuv444p(
	.param .u64 Subsample_Lanczos_p016le_yuv444p_param_0,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_param_1,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_param_2,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_param_3,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_param_4,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_param_5,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_param_6,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_param_7,
	.param .u32 Subsample_Lanczos_p016le_yuv444p_param_8,
	.param .u32 Subsample_Lanczos_p016le_yuv444p_param_9,
	.param .u32 Subsample_Lanczos_p016le_yuv444p_param_10,
	.param .u32 Subsample_Lanczos_p016le_yuv444p_param_11,
	.param .u32 Subsample_Lanczos_p016le_yuv444p_param_12,
	.param .f32 Subsample_Lanczos_p016le_yuv444p_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p016le_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p016le_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB272_18;
	bra.uni 	$L__BB272_1;
$L__BB272_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p016le_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p016le_yuv444p_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB272_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB272_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB272_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB272_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB272_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB272_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB272_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB272_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB272_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB272_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB272_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB272_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p016le_yuv444p_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB272_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB272_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p016le_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p016le_yuv444p_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB272_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB272_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB272_18:
	ret;

}
	// .globl	Subsample_Lanczos_p016le_yuv444p_uv
.visible .entry Subsample_Lanczos_p016le_yuv444p_uv(
	.param .u64 Subsample_Lanczos_p016le_yuv444p_uv_param_0,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_uv_param_1,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_uv_param_2,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_uv_param_3,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_uv_param_4,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_uv_param_5,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_uv_param_6,
	.param .u64 Subsample_Lanczos_p016le_yuv444p_uv_param_7,
	.param .u32 Subsample_Lanczos_p016le_yuv444p_uv_param_8,
	.param .u32 Subsample_Lanczos_p016le_yuv444p_uv_param_9,
	.param .u32 Subsample_Lanczos_p016le_yuv444p_uv_param_10,
	.param .u32 Subsample_Lanczos_p016le_yuv444p_uv_param_11,
	.param .u32 Subsample_Lanczos_p016le_yuv444p_uv_param_12,
	.param .f32 Subsample_Lanczos_p016le_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<27>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p016le_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p016le_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB273_18;
	bra.uni 	$L__BB273_1;
$L__BB273_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p016le_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p016le_yuv444p_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB273_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB273_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB273_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB273_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB273_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB273_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB273_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB273_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB273_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB273_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB273_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB273_13:
	ld.param.u64 	%rd4, [Subsample_Lanczos_p016le_yuv444p_uv_param_6];
	ld.param.u64 	%rd5, [Subsample_Lanczos_p016le_yuv444p_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB273_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB273_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p016le_yuv444p_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Lanczos_p016le_yuv444p_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd4;
	cvta.to.global.u64 	%rd2, %rd5;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB273_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB273_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f477FFF00;
	mul.f32 	%f222, %f220, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	shr.u16 	%rs3, %rs1, 8;
	mul.wide.s32 	%rd22, %r2, %r5;
	cvt.s64.s32 	%rd23, %r1;
	add.s64 	%rd24, %rd22, %rd23;
	add.s64 	%rd25, %rd2, %rd24;
	st.global.u8 	[%rd25], %rs3;
	shr.u16 	%rs4, %rs2, 8;
	add.s64 	%rd26, %rd1, %rd24;
	st.global.u8 	[%rd26], %rs4;
$L__BB273_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p16le_yuv444p
.visible .entry Subsample_Lanczos_yuv444p16le_yuv444p(
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_param_0,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_param_1,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_param_2,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_param_3,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_param_4,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_param_5,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_param_6,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_param_7,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p_param_8,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p_param_9,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p_param_10,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p_param_11,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p_param_12,
	.param .f32 Subsample_Lanczos_yuv444p16le_yuv444p_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<24>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p16le_yuv444p_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p16le_yuv444p_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB274_18;
	bra.uni 	$L__BB274_1;
$L__BB274_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p16le_yuv444p_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p16le_yuv444p_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB274_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB274_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB274_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB274_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB274_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB274_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB274_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB274_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB274_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB274_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB274_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB274_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv444p16le_yuv444p_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB274_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB274_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p16le_yuv444p_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p16le_yuv444p_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB274_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB274_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd20, %r2, %r5;
	cvt.s64.s32 	%rd21, %r1;
	add.s64 	%rd22, %rd20, %rd21;
	add.s64 	%rd23, %rd1, %rd22;
	st.global.u8 	[%rd23], %rs2;
$L__BB274_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p16le_yuv444p_uv
.visible .entry Subsample_Lanczos_yuv444p16le_yuv444p_uv(
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<339>;
	.reg .b64 	%rd<44>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB275_34;
	bra.uni 	$L__BB275_1;
$L__BB275_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_11];
	cvt.rn.f32.s32 	%f67, %r6;
	cvt.rn.f32.s32 	%f68, %r3;
	div.rn.f32 	%f69, %f67, %f68;
	cvt.rn.f32.s32 	%f70, %r7;
	cvt.rn.f32.s32 	%f71, %r4;
	div.rn.f32 	%f72, %f70, %f71;
	cvt.rn.f32.s32 	%f73, %r1;
	add.f32 	%f74, %f73, 0f3F000000;
	fma.rn.f32 	%f75, %f69, %f74, 0fBF000000;
	cvt.rn.f32.s32 	%f76, %r2;
	add.f32 	%f77, %f76, 0f3F000000;
	cvt.rmi.f32.f32 	%f242, %f75;
	sub.f32 	%f79, %f75, %f242;
	add.f32 	%f80, %f79, 0f3F800000;
	mul.f32 	%f4, %f80, 0f40490FDB;
	mul.f32 	%f5, %f79, 0f40490FDB;
	add.f32 	%f81, %f79, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f83, %f4, %f4;
	mul.f32 	%f9, %f83, 0f3F000000;
	mov.f32 	%f338, 0f3F800000;
	mov.f32 	%f323, %f338;
	@%p4 bra 	$L__BB275_3;
	sin.approx.f32 	%f84, %f4;
	sin.approx.f32 	%f85, %f8;
	mul.f32 	%f86, %f84, %f85;
	div.rn.f32 	%f323, %f86, %f9;
$L__BB275_3:
	fma.rn.f32 	%f78, %f72, %f77, 0fBF000000;
	add.f32 	%f82, %f79, 0fC0000000;
	mul.f32 	%f6, %f81, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f88, %f5, %f5;
	mul.f32 	%f13, %f88, 0f3F000000;
	mov.f32 	%f324, %f338;
	@%p5 bra 	$L__BB275_5;
	sin.approx.f32 	%f89, %f5;
	sin.approx.f32 	%f90, %f12;
	mul.f32 	%f91, %f89, %f90;
	div.rn.f32 	%f324, %f91, %f13;
$L__BB275_5:
	cvt.rmi.f32.f32 	%f249, %f78;
	mul.f32 	%f7, %f82, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f93, %f6, %f6;
	mul.f32 	%f17, %f93, 0f3F000000;
	mov.f32 	%f325, %f338;
	@%p6 bra 	$L__BB275_7;
	sin.approx.f32 	%f94, %f6;
	sin.approx.f32 	%f95, %f16;
	mul.f32 	%f96, %f94, %f95;
	div.rn.f32 	%f325, %f96, %f17;
$L__BB275_7:
	sub.f32 	%f3, %f78, %f249;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f98, %f7, %f7;
	mul.f32 	%f21, %f98, 0f3F000000;
	mov.f32 	%f326, %f338;
	@%p7 bra 	$L__BB275_9;
	sin.approx.f32 	%f99, %f7;
	sin.approx.f32 	%f100, %f20;
	mul.f32 	%f101, %f99, %f100;
	div.rn.f32 	%f326, %f101, %f21;
$L__BB275_9:
	add.f32 	%f103, %f3, 0f3F800000;
	mul.f32 	%f24, %f103, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f104, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f106, %f24, %f24;
	mul.f32 	%f29, %f106, 0f3F000000;
	mov.f32 	%f327, %f338;
	@%p8 bra 	$L__BB275_11;
	sin.approx.f32 	%f107, %f24;
	sin.approx.f32 	%f108, %f28;
	mul.f32 	%f109, %f107, %f108;
	div.rn.f32 	%f327, %f109, %f29;
$L__BB275_11:
	add.f32 	%f105, %f3, 0fC0000000;
	mul.f32 	%f26, %f104, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f111, %f25, %f25;
	mul.f32 	%f33, %f111, 0f3F000000;
	mov.f32 	%f328, %f338;
	@%p9 bra 	$L__BB275_13;
	sin.approx.f32 	%f112, %f25;
	sin.approx.f32 	%f113, %f32;
	mul.f32 	%f114, %f112, %f113;
	div.rn.f32 	%f328, %f114, %f33;
$L__BB275_13:
	ld.param.u64 	%rd7, [Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_5];
	mul.f32 	%f27, %f105, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f116, %f26, %f26;
	mul.f32 	%f37, %f116, 0f3F000000;
	mov.f32 	%f329, %f338;
	@%p10 bra 	$L__BB275_15;
	sin.approx.f32 	%f117, %f26;
	sin.approx.f32 	%f118, %f36;
	mul.f32 	%f119, %f117, %f118;
	div.rn.f32 	%f329, %f119, %f37;
$L__BB275_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_1];
	cvta.to.global.u64 	%rd2, %rd7;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f121, %f27, %f27;
	mul.f32 	%f41, %f121, 0f3F000000;
	mov.f32 	%f330, %f338;
	@%p11 bra 	$L__BB275_17;
	sin.approx.f32 	%f122, %f27;
	sin.approx.f32 	%f123, %f40;
	mul.f32 	%f124, %f122, %f123;
	div.rn.f32 	%f330, %f124, %f41;
$L__BB275_17:
	add.f32 	%f158, %f323, %f324;
	add.f32 	%f159, %f158, %f325;
	add.f32 	%f160, %f159, %f326;
	div.rn.f32 	%f161, %f326, %f160;
	div.rn.f32 	%f162, %f325, %f160;
	div.rn.f32 	%f163, %f324, %f160;
	div.rn.f32 	%f164, %f323, %f160;
	add.f32 	%f165, %f327, %f328;
	add.f32 	%f166, %f165, %f329;
	add.f32 	%f167, %f166, %f330;
	div.rn.f32 	%f168, %f327, %f167;
	div.rn.f32 	%f169, %f328, %f167;
	div.rn.f32 	%f170, %f329, %f167;
	div.rn.f32 	%f171, %f330, %f167;
	add.f32 	%f240, %f242, 0fBF800000;
	add.f32 	%f241, %f249, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd8, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f172, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f173, %r21;
	add.f32 	%f244, %f242, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd8, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f174, %r25;
	add.f32 	%f246, %f242, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd8, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f175, %r29;
	mul.f32 	%f176, %f163, %f173;
	fma.rn.f32 	%f177, %f164, %f172, %f176;
	fma.rn.f32 	%f178, %f162, %f174, %f177;
	fma.rn.f32 	%f179, %f161, %f175, %f178;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd8, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f180, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd8, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f181, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd8, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f182, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd8, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f183, %r45;
	mul.f32 	%f184, %f163, %f181;
	fma.rn.f32 	%f185, %f164, %f180, %f184;
	fma.rn.f32 	%f186, %f162, %f182, %f185;
	fma.rn.f32 	%f187, %f161, %f183, %f186;
	add.f32 	%f257, %f249, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd8, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f188, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd8, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f189, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd8, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f190, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd8, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f191, %r61;
	mul.f32 	%f192, %f163, %f189;
	fma.rn.f32 	%f193, %f164, %f188, %f192;
	fma.rn.f32 	%f194, %f162, %f190, %f193;
	fma.rn.f32 	%f195, %f161, %f191, %f194;
	add.f32 	%f265, %f249, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd8, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f196, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd8, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f197, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd8, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f198, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd8, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f199, %r77;
	mul.f32 	%f200, %f163, %f197;
	fma.rn.f32 	%f201, %f164, %f196, %f200;
	fma.rn.f32 	%f202, %f162, %f198, %f201;
	fma.rn.f32 	%f203, %f161, %f199, %f202;
	mul.f32 	%f204, %f169, %f187;
	fma.rn.f32 	%f205, %f168, %f179, %f204;
	fma.rn.f32 	%f206, %f170, %f195, %f205;
	fma.rn.f32 	%f207, %f171, %f203, %f206;
	mul.f32 	%f208, %f207, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f208;
	shr.u16 	%rs2, %rs1, 8;
	mul.wide.s32 	%rd24, %r2, %r5;
	cvt.s64.s32 	%rd25, %r1;
	add.s64 	%rd3, %rd24, %rd25;
	add.s64 	%rd26, %rd2, %rd3;
	st.global.u8 	[%rd26], %rs2;
	mov.f32 	%f331, %f338;
	@%p4 bra 	$L__BB275_19;
	sin.approx.f32 	%f209, %f4;
	sin.approx.f32 	%f210, %f8;
	mul.f32 	%f211, %f209, %f210;
	div.rn.f32 	%f331, %f211, %f9;
$L__BB275_19:
	mov.f32 	%f332, %f338;
	@%p5 bra 	$L__BB275_21;
	sin.approx.f32 	%f213, %f5;
	sin.approx.f32 	%f214, %f12;
	mul.f32 	%f215, %f213, %f214;
	div.rn.f32 	%f332, %f215, %f13;
$L__BB275_21:
	mov.f32 	%f333, %f338;
	@%p6 bra 	$L__BB275_23;
	sin.approx.f32 	%f217, %f6;
	sin.approx.f32 	%f218, %f16;
	mul.f32 	%f219, %f217, %f218;
	div.rn.f32 	%f333, %f219, %f17;
$L__BB275_23:
	mov.f32 	%f334, %f338;
	@%p7 bra 	$L__BB275_25;
	sin.approx.f32 	%f221, %f7;
	sin.approx.f32 	%f222, %f20;
	mul.f32 	%f223, %f221, %f222;
	div.rn.f32 	%f334, %f223, %f21;
$L__BB275_25:
	mov.f32 	%f335, %f338;
	@%p8 bra 	$L__BB275_27;
	sin.approx.f32 	%f225, %f24;
	sin.approx.f32 	%f226, %f28;
	mul.f32 	%f227, %f225, %f226;
	div.rn.f32 	%f335, %f227, %f29;
$L__BB275_27:
	mov.f32 	%f336, %f338;
	@%p9 bra 	$L__BB275_29;
	sin.approx.f32 	%f229, %f25;
	sin.approx.f32 	%f230, %f32;
	mul.f32 	%f231, %f229, %f230;
	div.rn.f32 	%f336, %f231, %f33;
$L__BB275_29:
	ld.param.u64 	%rd6, [Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_6];
	mov.f32 	%f337, %f338;
	@%p10 bra 	$L__BB275_31;
	sin.approx.f32 	%f233, %f26;
	sin.approx.f32 	%f234, %f36;
	mul.f32 	%f235, %f233, %f234;
	div.rn.f32 	%f337, %f235, %f37;
$L__BB275_31:
	ld.param.u64 	%rd27, [Subsample_Lanczos_yuv444p16le_yuv444p_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd6;
	@%p11 bra 	$L__BB275_33;
	sin.approx.f32 	%f237, %f27;
	sin.approx.f32 	%f238, %f40;
	mul.f32 	%f239, %f237, %f238;
	div.rn.f32 	%f338, %f239, %f41;
$L__BB275_33:
	add.f32 	%f272, %f331, %f332;
	add.f32 	%f273, %f272, %f333;
	add.f32 	%f274, %f273, %f334;
	div.rn.f32 	%f275, %f334, %f274;
	div.rn.f32 	%f276, %f333, %f274;
	div.rn.f32 	%f277, %f332, %f274;
	div.rn.f32 	%f278, %f331, %f274;
	add.f32 	%f279, %f335, %f336;
	add.f32 	%f280, %f279, %f337;
	add.f32 	%f281, %f280, %f338;
	div.rn.f32 	%f282, %f335, %f281;
	div.rn.f32 	%f283, %f336, %f281;
	div.rn.f32 	%f284, %f337, %f281;
	div.rn.f32 	%f285, %f338, %f281;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd27, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f286, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd27, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f287, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd27, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f288, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd27, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f289, %r93;
	mul.f32 	%f290, %f277, %f287;
	fma.rn.f32 	%f291, %f278, %f286, %f290;
	fma.rn.f32 	%f292, %f276, %f288, %f291;
	fma.rn.f32 	%f293, %f275, %f289, %f292;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd27, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f294, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd27, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f295, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd27, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f296, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd27, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f297, %r109;
	mul.f32 	%f298, %f277, %f295;
	fma.rn.f32 	%f299, %f278, %f294, %f298;
	fma.rn.f32 	%f300, %f276, %f296, %f299;
	fma.rn.f32 	%f301, %f275, %f297, %f300;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd27, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f302, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd27, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f303, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd27, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f304, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd27, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f305, %r125;
	mul.f32 	%f306, %f277, %f303;
	fma.rn.f32 	%f307, %f278, %f302, %f306;
	fma.rn.f32 	%f308, %f276, %f304, %f307;
	fma.rn.f32 	%f309, %f275, %f305, %f308;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd27, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f310, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd27, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f311, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd27, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f312, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd27, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f313, %r141;
	mul.f32 	%f314, %f277, %f311;
	fma.rn.f32 	%f315, %f278, %f310, %f314;
	fma.rn.f32 	%f316, %f276, %f312, %f315;
	fma.rn.f32 	%f317, %f275, %f313, %f316;
	mul.f32 	%f318, %f283, %f301;
	fma.rn.f32 	%f319, %f282, %f293, %f318;
	fma.rn.f32 	%f320, %f284, %f309, %f319;
	fma.rn.f32 	%f321, %f285, %f317, %f320;
	mul.f32 	%f322, %f321, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs3, %f322;
	shr.u16 	%rs4, %rs3, 8;
	add.s64 	%rd43, %rd1, %rd3;
	st.global.u8 	[%rd43], %rs4;
$L__BB275_34:
	ret;

}
	// .globl	Subsample_Lanczos_yuv420p_p010le
.visible .entry Subsample_Lanczos_yuv420p_p010le(
	.param .u64 Subsample_Lanczos_yuv420p_p010le_param_0,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_param_1,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_param_2,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_param_3,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_param_4,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_param_5,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_param_6,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_param_7,
	.param .u32 Subsample_Lanczos_yuv420p_p010le_param_8,
	.param .u32 Subsample_Lanczos_yuv420p_p010le_param_9,
	.param .u32 Subsample_Lanczos_yuv420p_p010le_param_10,
	.param .u32 Subsample_Lanczos_yuv420p_p010le_param_11,
	.param .u32 Subsample_Lanczos_yuv420p_p010le_param_12,
	.param .f32 Subsample_Lanczos_yuv420p_p010le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv420p_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv420p_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB276_18;
	bra.uni 	$L__BB276_1;
$L__BB276_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv420p_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv420p_p010le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB276_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB276_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB276_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB276_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB276_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB276_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB276_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB276_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB276_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB276_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB276_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB276_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv420p_p010le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB276_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB276_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv420p_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv420p_p010le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB276_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB276_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.lo.s16 	%rs2, %rs1, 257;
	and.b16  	%rs3, %rs2, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs3;
$L__BB276_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv420p_p010le_uv
.visible .entry Subsample_Lanczos_yuv420p_p010le_uv(
	.param .u64 Subsample_Lanczos_yuv420p_p010le_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv420p_p010le_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv420p_p010le_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv420p_p010le_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv420p_p010le_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv420p_p010le_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv420p_p010le_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv420p_p010le_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<387>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv420p_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv420p_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB277_34;
	bra.uni 	$L__BB277_1;
$L__BB277_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv420p_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv420p_p010le_uv_param_11];
	cvt.rn.f32.s32 	%f131, %r6;
	cvt.rn.f32.s32 	%f132, %r3;
	div.rn.f32 	%f133, %f131, %f132;
	cvt.rn.f32.s32 	%f134, %r7;
	cvt.rn.f32.s32 	%f135, %r4;
	div.rn.f32 	%f136, %f134, %f135;
	cvt.rn.f32.s32 	%f137, %r1;
	add.f32 	%f138, %f137, 0f3F000000;
	fma.rn.f32 	%f139, %f133, %f138, 0fBF000000;
	cvt.rn.f32.s32 	%f140, %r2;
	add.f32 	%f141, %f140, 0f3F000000;
	cvt.rmi.f32.f32 	%f255, %f139;
	sub.f32 	%f143, %f139, %f255;
	add.f32 	%f144, %f143, 0f3F800000;
	mul.f32 	%f4, %f144, 0f40490FDB;
	mul.f32 	%f5, %f143, 0f40490FDB;
	add.f32 	%f145, %f143, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f147, %f4, %f4;
	mul.f32 	%f9, %f147, 0f3F000000;
	mov.f32 	%f386, 0f3F800000;
	mov.f32 	%f371, %f386;
	@%p4 bra 	$L__BB277_3;
	sin.approx.f32 	%f148, %f4;
	sin.approx.f32 	%f149, %f8;
	mul.f32 	%f150, %f148, %f149;
	div.rn.f32 	%f371, %f150, %f9;
$L__BB277_3:
	fma.rn.f32 	%f142, %f136, %f141, 0fBF000000;
	add.f32 	%f146, %f143, 0fC0000000;
	mul.f32 	%f6, %f145, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f152, %f5, %f5;
	mul.f32 	%f13, %f152, 0f3F000000;
	mov.f32 	%f372, %f386;
	@%p5 bra 	$L__BB277_5;
	sin.approx.f32 	%f153, %f5;
	sin.approx.f32 	%f154, %f12;
	mul.f32 	%f155, %f153, %f154;
	div.rn.f32 	%f372, %f155, %f13;
$L__BB277_5:
	cvt.rmi.f32.f32 	%f262, %f142;
	mul.f32 	%f7, %f146, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f157, %f6, %f6;
	mul.f32 	%f17, %f157, 0f3F000000;
	mov.f32 	%f373, %f386;
	@%p6 bra 	$L__BB277_7;
	sin.approx.f32 	%f158, %f6;
	sin.approx.f32 	%f159, %f16;
	mul.f32 	%f160, %f158, %f159;
	div.rn.f32 	%f373, %f160, %f17;
$L__BB277_7:
	sub.f32 	%f3, %f142, %f262;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f162, %f7, %f7;
	mul.f32 	%f21, %f162, 0f3F000000;
	mov.f32 	%f374, %f386;
	@%p7 bra 	$L__BB277_9;
	sin.approx.f32 	%f163, %f7;
	sin.approx.f32 	%f164, %f20;
	mul.f32 	%f165, %f163, %f164;
	div.rn.f32 	%f374, %f165, %f21;
$L__BB277_9:
	add.f32 	%f167, %f3, 0f3F800000;
	mul.f32 	%f24, %f167, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f168, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f170, %f24, %f24;
	mul.f32 	%f29, %f170, 0f3F000000;
	mov.f32 	%f375, %f386;
	@%p8 bra 	$L__BB277_11;
	sin.approx.f32 	%f171, %f24;
	sin.approx.f32 	%f172, %f28;
	mul.f32 	%f173, %f171, %f172;
	div.rn.f32 	%f375, %f173, %f29;
$L__BB277_11:
	add.f32 	%f169, %f3, 0fC0000000;
	mul.f32 	%f26, %f168, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f175, %f25, %f25;
	mul.f32 	%f33, %f175, 0f3F000000;
	mov.f32 	%f376, %f386;
	@%p9 bra 	$L__BB277_13;
	sin.approx.f32 	%f176, %f25;
	sin.approx.f32 	%f177, %f32;
	mul.f32 	%f178, %f176, %f177;
	div.rn.f32 	%f376, %f178, %f33;
$L__BB277_13:
	mul.f32 	%f27, %f169, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f180, %f26, %f26;
	mul.f32 	%f37, %f180, 0f3F000000;
	mov.f32 	%f377, %f386;
	@%p10 bra 	$L__BB277_15;
	sin.approx.f32 	%f181, %f26;
	sin.approx.f32 	%f182, %f36;
	mul.f32 	%f183, %f181, %f182;
	div.rn.f32 	%f377, %f183, %f37;
$L__BB277_15:
	ld.param.u64 	%rd5, [Subsample_Lanczos_yuv420p_p010le_uv_param_1];
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f185, %f27, %f27;
	mul.f32 	%f41, %f185, 0f3F000000;
	mov.f32 	%f378, %f386;
	@%p11 bra 	$L__BB277_17;
	sin.approx.f32 	%f186, %f27;
	sin.approx.f32 	%f187, %f40;
	mul.f32 	%f188, %f186, %f187;
	div.rn.f32 	%f378, %f188, %f41;
$L__BB277_17:
	add.f32 	%f253, %f255, 0fBF800000;
	add.f32 	%f254, %f262, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f253, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f255, %f254}];
	// end inline asm
	add.f32 	%f257, %f255, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f257, %f254}];
	// end inline asm
	add.f32 	%f259, %f255, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f259, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f253, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f255, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f257, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f259, %f262}];
	// end inline asm
	add.f32 	%f270, %f262, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f253, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f255, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f257, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f259, %f270}];
	// end inline asm
	add.f32 	%f278, %f262, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f253, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f255, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f257, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f259, %f278}];
	// end inline asm
	mov.f32 	%f379, %f386;
	@%p4 bra 	$L__BB277_19;
	sin.approx.f32 	%f222, %f4;
	sin.approx.f32 	%f223, %f8;
	mul.f32 	%f224, %f222, %f223;
	div.rn.f32 	%f379, %f224, %f9;
$L__BB277_19:
	mov.f32 	%f380, %f386;
	@%p5 bra 	$L__BB277_21;
	sin.approx.f32 	%f226, %f5;
	sin.approx.f32 	%f227, %f12;
	mul.f32 	%f228, %f226, %f227;
	div.rn.f32 	%f380, %f228, %f13;
$L__BB277_21:
	mov.f32 	%f381, %f386;
	@%p6 bra 	$L__BB277_23;
	sin.approx.f32 	%f230, %f6;
	sin.approx.f32 	%f231, %f16;
	mul.f32 	%f232, %f230, %f231;
	div.rn.f32 	%f381, %f232, %f17;
$L__BB277_23:
	mov.f32 	%f382, %f386;
	@%p7 bra 	$L__BB277_25;
	sin.approx.f32 	%f234, %f7;
	sin.approx.f32 	%f235, %f20;
	mul.f32 	%f236, %f234, %f235;
	div.rn.f32 	%f382, %f236, %f21;
$L__BB277_25:
	mov.f32 	%f383, %f386;
	@%p8 bra 	$L__BB277_27;
	sin.approx.f32 	%f238, %f24;
	sin.approx.f32 	%f239, %f28;
	mul.f32 	%f240, %f238, %f239;
	div.rn.f32 	%f383, %f240, %f29;
$L__BB277_27:
	mov.f32 	%f384, %f386;
	@%p9 bra 	$L__BB277_29;
	sin.approx.f32 	%f242, %f25;
	sin.approx.f32 	%f243, %f32;
	mul.f32 	%f244, %f242, %f243;
	div.rn.f32 	%f384, %f244, %f33;
$L__BB277_29:
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv420p_p010le_uv_param_5];
	mov.f32 	%f385, %f386;
	@%p10 bra 	$L__BB277_31;
	sin.approx.f32 	%f246, %f26;
	sin.approx.f32 	%f247, %f36;
	mul.f32 	%f248, %f246, %f247;
	div.rn.f32 	%f385, %f248, %f37;
$L__BB277_31:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv420p_p010le_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Lanczos_yuv420p_p010le_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd4;
	mov.b32 	%f46, %r17;
	mov.b32 	%f50, %r21;
	mov.b32 	%f55, %r25;
	mov.b32 	%f60, %r29;
	mov.b32 	%f64, %r33;
	mov.b32 	%f68, %r37;
	mov.b32 	%f72, %r41;
	mov.b32 	%f76, %r45;
	mov.b32 	%f81, %r49;
	mov.b32 	%f85, %r53;
	mov.b32 	%f89, %r57;
	mov.b32 	%f93, %r61;
	mov.b32 	%f98, %r65;
	mov.b32 	%f102, %r69;
	mov.b32 	%f106, %r73;
	mov.b32 	%f110, %r77;
	@%p11 bra 	$L__BB277_33;
	sin.approx.f32 	%f250, %f27;
	sin.approx.f32 	%f251, %f40;
	mul.f32 	%f252, %f250, %f251;
	div.rn.f32 	%f386, %f252, %f41;
$L__BB277_33:
	add.f32 	%f285, %f375, %f376;
	add.f32 	%f286, %f285, %f377;
	add.f32 	%f287, %f286, %f378;
	div.rn.f32 	%f288, %f375, %f287;
	add.f32 	%f289, %f371, %f372;
	add.f32 	%f290, %f289, %f373;
	add.f32 	%f291, %f290, %f374;
	div.rn.f32 	%f292, %f371, %f291;
	div.rn.f32 	%f293, %f372, %f291;
	mul.f32 	%f294, %f293, %f50;
	fma.rn.f32 	%f295, %f292, %f46, %f294;
	div.rn.f32 	%f296, %f373, %f291;
	fma.rn.f32 	%f297, %f296, %f55, %f295;
	div.rn.f32 	%f298, %f374, %f291;
	fma.rn.f32 	%f299, %f298, %f60, %f297;
	div.rn.f32 	%f300, %f376, %f287;
	mul.f32 	%f301, %f293, %f68;
	fma.rn.f32 	%f302, %f292, %f64, %f301;
	fma.rn.f32 	%f303, %f296, %f72, %f302;
	fma.rn.f32 	%f304, %f298, %f76, %f303;
	mul.f32 	%f305, %f300, %f304;
	fma.rn.f32 	%f306, %f288, %f299, %f305;
	div.rn.f32 	%f307, %f377, %f287;
	mul.f32 	%f308, %f293, %f85;
	fma.rn.f32 	%f309, %f292, %f81, %f308;
	fma.rn.f32 	%f310, %f296, %f89, %f309;
	fma.rn.f32 	%f311, %f298, %f93, %f310;
	fma.rn.f32 	%f312, %f307, %f311, %f306;
	div.rn.f32 	%f313, %f378, %f287;
	mul.f32 	%f314, %f293, %f102;
	fma.rn.f32 	%f315, %f292, %f98, %f314;
	fma.rn.f32 	%f316, %f296, %f106, %f315;
	fma.rn.f32 	%f317, %f298, %f110, %f316;
	fma.rn.f32 	%f318, %f313, %f317, %f312;
	mul.f32 	%f319, %f318, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f319;
	mul.lo.s16 	%rs2, %rs1, 257;
	and.b16  	%rs3, %rs2, -64;
	add.f32 	%f320, %f379, %f380;
	add.f32 	%f321, %f320, %f381;
	add.f32 	%f322, %f321, %f382;
	div.rn.f32 	%f323, %f382, %f322;
	div.rn.f32 	%f324, %f381, %f322;
	div.rn.f32 	%f325, %f380, %f322;
	div.rn.f32 	%f326, %f379, %f322;
	add.f32 	%f327, %f383, %f384;
	add.f32 	%f328, %f327, %f385;
	add.f32 	%f329, %f328, %f386;
	div.rn.f32 	%f330, %f383, %f329;
	div.rn.f32 	%f331, %f384, %f329;
	div.rn.f32 	%f332, %f385, %f329;
	div.rn.f32 	%f333, %f386, %f329;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f253, %f254}];
	// end inline asm
	mov.b32 	%f334, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f255, %f254}];
	// end inline asm
	mov.b32 	%f335, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f257, %f254}];
	// end inline asm
	mov.b32 	%f336, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f259, %f254}];
	// end inline asm
	mov.b32 	%f337, %r93;
	mul.f32 	%f338, %f325, %f335;
	fma.rn.f32 	%f339, %f326, %f334, %f338;
	fma.rn.f32 	%f340, %f324, %f336, %f339;
	fma.rn.f32 	%f341, %f323, %f337, %f340;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f253, %f262}];
	// end inline asm
	mov.b32 	%f342, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f255, %f262}];
	// end inline asm
	mov.b32 	%f343, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f257, %f262}];
	// end inline asm
	mov.b32 	%f344, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f259, %f262}];
	// end inline asm
	mov.b32 	%f345, %r109;
	mul.f32 	%f346, %f325, %f343;
	fma.rn.f32 	%f347, %f326, %f342, %f346;
	fma.rn.f32 	%f348, %f324, %f344, %f347;
	fma.rn.f32 	%f349, %f323, %f345, %f348;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f253, %f270}];
	// end inline asm
	mov.b32 	%f350, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f255, %f270}];
	// end inline asm
	mov.b32 	%f351, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f257, %f270}];
	// end inline asm
	mov.b32 	%f352, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f259, %f270}];
	// end inline asm
	mov.b32 	%f353, %r125;
	mul.f32 	%f354, %f325, %f351;
	fma.rn.f32 	%f355, %f326, %f350, %f354;
	fma.rn.f32 	%f356, %f324, %f352, %f355;
	fma.rn.f32 	%f357, %f323, %f353, %f356;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f253, %f278}];
	// end inline asm
	mov.b32 	%f358, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f255, %f278}];
	// end inline asm
	mov.b32 	%f359, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f257, %f278}];
	// end inline asm
	mov.b32 	%f360, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f259, %f278}];
	// end inline asm
	mov.b32 	%f361, %r141;
	mul.f32 	%f362, %f325, %f359;
	fma.rn.f32 	%f363, %f326, %f358, %f362;
	fma.rn.f32 	%f364, %f324, %f360, %f363;
	fma.rn.f32 	%f365, %f323, %f361, %f364;
	mul.f32 	%f366, %f331, %f349;
	fma.rn.f32 	%f367, %f330, %f341, %f366;
	fma.rn.f32 	%f368, %f332, %f357, %f367;
	fma.rn.f32 	%f369, %f333, %f365, %f368;
	mul.f32 	%f370, %f369, 0f437F0000;
	cvt.rzi.u16.f32 	%rs4, %f370;
	mul.lo.s16 	%rs5, %rs4, 257;
	and.b16  	%rs6, %rs5, -64;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 2;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 2;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u16 	[%rd44], {%rs3, %rs6};
$L__BB277_34:
	ret;

}
	// .globl	Subsample_Lanczos_nv12_p010le
.visible .entry Subsample_Lanczos_nv12_p010le(
	.param .u64 Subsample_Lanczos_nv12_p010le_param_0,
	.param .u64 Subsample_Lanczos_nv12_p010le_param_1,
	.param .u64 Subsample_Lanczos_nv12_p010le_param_2,
	.param .u64 Subsample_Lanczos_nv12_p010le_param_3,
	.param .u64 Subsample_Lanczos_nv12_p010le_param_4,
	.param .u64 Subsample_Lanczos_nv12_p010le_param_5,
	.param .u64 Subsample_Lanczos_nv12_p010le_param_6,
	.param .u64 Subsample_Lanczos_nv12_p010le_param_7,
	.param .u32 Subsample_Lanczos_nv12_p010le_param_8,
	.param .u32 Subsample_Lanczos_nv12_p010le_param_9,
	.param .u32 Subsample_Lanczos_nv12_p010le_param_10,
	.param .u32 Subsample_Lanczos_nv12_p010le_param_11,
	.param .u32 Subsample_Lanczos_nv12_p010le_param_12,
	.param .f32 Subsample_Lanczos_nv12_p010le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_nv12_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_nv12_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB278_18;
	bra.uni 	$L__BB278_1;
$L__BB278_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_nv12_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_nv12_p010le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB278_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB278_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB278_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB278_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB278_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB278_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB278_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB278_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB278_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB278_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB278_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB278_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_nv12_p010le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB278_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB278_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_nv12_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_nv12_p010le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB278_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB278_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.lo.s16 	%rs2, %rs1, 257;
	and.b16  	%rs3, %rs2, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs3;
$L__BB278_18:
	ret;

}
	// .globl	Subsample_Lanczos_nv12_p010le_uv
.visible .entry Subsample_Lanczos_nv12_p010le_uv(
	.param .u64 Subsample_Lanczos_nv12_p010le_uv_param_0,
	.param .u64 Subsample_Lanczos_nv12_p010le_uv_param_1,
	.param .u64 Subsample_Lanczos_nv12_p010le_uv_param_2,
	.param .u64 Subsample_Lanczos_nv12_p010le_uv_param_3,
	.param .u64 Subsample_Lanczos_nv12_p010le_uv_param_4,
	.param .u64 Subsample_Lanczos_nv12_p010le_uv_param_5,
	.param .u64 Subsample_Lanczos_nv12_p010le_uv_param_6,
	.param .u64 Subsample_Lanczos_nv12_p010le_uv_param_7,
	.param .u32 Subsample_Lanczos_nv12_p010le_uv_param_8,
	.param .u32 Subsample_Lanczos_nv12_p010le_uv_param_9,
	.param .u32 Subsample_Lanczos_nv12_p010le_uv_param_10,
	.param .u32 Subsample_Lanczos_nv12_p010le_uv_param_11,
	.param .u32 Subsample_Lanczos_nv12_p010le_uv_param_12,
	.param .f32 Subsample_Lanczos_nv12_p010le_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_nv12_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_nv12_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB279_18;
	bra.uni 	$L__BB279_1;
$L__BB279_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_nv12_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_nv12_p010le_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB279_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB279_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB279_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB279_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB279_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB279_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB279_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB279_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB279_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB279_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB279_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB279_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_nv12_p010le_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB279_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB279_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_nv12_p010le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_nv12_p010le_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB279_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB279_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f437F0000;
	mul.f32 	%f222, %f220, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	mul.lo.s16 	%rs3, %rs1, 257;
	and.b16  	%rs4, %rs3, -64;
	mul.lo.s16 	%rs5, %rs2, 257;
	and.b16  	%rs6, %rs5, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u16 	[%rd27], {%rs4, %rs6};
$L__BB279_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p_p010le
.visible .entry Subsample_Lanczos_yuv444p_p010le(
	.param .u64 Subsample_Lanczos_yuv444p_p010le_param_0,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_param_1,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_param_2,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_param_3,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_param_4,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_param_5,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_param_6,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_param_7,
	.param .u32 Subsample_Lanczos_yuv444p_p010le_param_8,
	.param .u32 Subsample_Lanczos_yuv444p_p010le_param_9,
	.param .u32 Subsample_Lanczos_yuv444p_p010le_param_10,
	.param .u32 Subsample_Lanczos_yuv444p_p010le_param_11,
	.param .u32 Subsample_Lanczos_yuv444p_p010le_param_12,
	.param .f32 Subsample_Lanczos_yuv444p_p010le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB280_18;
	bra.uni 	$L__BB280_1;
$L__BB280_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p_p010le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB280_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB280_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB280_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB280_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB280_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB280_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB280_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB280_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB280_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB280_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB280_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB280_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv444p_p010le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB280_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB280_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p_p010le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB280_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB280_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.lo.s16 	%rs2, %rs1, 257;
	and.b16  	%rs3, %rs2, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs3;
$L__BB280_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p_p010le_uv
.visible .entry Subsample_Lanczos_yuv444p_p010le_uv(
	.param .u64 Subsample_Lanczos_yuv444p_p010le_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv444p_p010le_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv444p_p010le_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv444p_p010le_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv444p_p010le_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv444p_p010le_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv444p_p010le_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv444p_p010le_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<387>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB281_34;
	bra.uni 	$L__BB281_1;
$L__BB281_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p_p010le_uv_param_11];
	cvt.rn.f32.s32 	%f131, %r6;
	cvt.rn.f32.s32 	%f132, %r3;
	div.rn.f32 	%f133, %f131, %f132;
	cvt.rn.f32.s32 	%f134, %r7;
	cvt.rn.f32.s32 	%f135, %r4;
	div.rn.f32 	%f136, %f134, %f135;
	cvt.rn.f32.s32 	%f137, %r1;
	add.f32 	%f138, %f137, 0f3F000000;
	fma.rn.f32 	%f139, %f133, %f138, 0fBF000000;
	cvt.rn.f32.s32 	%f140, %r2;
	add.f32 	%f141, %f140, 0f3F000000;
	cvt.rmi.f32.f32 	%f255, %f139;
	sub.f32 	%f143, %f139, %f255;
	add.f32 	%f144, %f143, 0f3F800000;
	mul.f32 	%f4, %f144, 0f40490FDB;
	mul.f32 	%f5, %f143, 0f40490FDB;
	add.f32 	%f145, %f143, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f147, %f4, %f4;
	mul.f32 	%f9, %f147, 0f3F000000;
	mov.f32 	%f386, 0f3F800000;
	mov.f32 	%f371, %f386;
	@%p4 bra 	$L__BB281_3;
	sin.approx.f32 	%f148, %f4;
	sin.approx.f32 	%f149, %f8;
	mul.f32 	%f150, %f148, %f149;
	div.rn.f32 	%f371, %f150, %f9;
$L__BB281_3:
	fma.rn.f32 	%f142, %f136, %f141, 0fBF000000;
	add.f32 	%f146, %f143, 0fC0000000;
	mul.f32 	%f6, %f145, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f152, %f5, %f5;
	mul.f32 	%f13, %f152, 0f3F000000;
	mov.f32 	%f372, %f386;
	@%p5 bra 	$L__BB281_5;
	sin.approx.f32 	%f153, %f5;
	sin.approx.f32 	%f154, %f12;
	mul.f32 	%f155, %f153, %f154;
	div.rn.f32 	%f372, %f155, %f13;
$L__BB281_5:
	cvt.rmi.f32.f32 	%f262, %f142;
	mul.f32 	%f7, %f146, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f157, %f6, %f6;
	mul.f32 	%f17, %f157, 0f3F000000;
	mov.f32 	%f373, %f386;
	@%p6 bra 	$L__BB281_7;
	sin.approx.f32 	%f158, %f6;
	sin.approx.f32 	%f159, %f16;
	mul.f32 	%f160, %f158, %f159;
	div.rn.f32 	%f373, %f160, %f17;
$L__BB281_7:
	sub.f32 	%f3, %f142, %f262;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f162, %f7, %f7;
	mul.f32 	%f21, %f162, 0f3F000000;
	mov.f32 	%f374, %f386;
	@%p7 bra 	$L__BB281_9;
	sin.approx.f32 	%f163, %f7;
	sin.approx.f32 	%f164, %f20;
	mul.f32 	%f165, %f163, %f164;
	div.rn.f32 	%f374, %f165, %f21;
$L__BB281_9:
	add.f32 	%f167, %f3, 0f3F800000;
	mul.f32 	%f24, %f167, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f168, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f170, %f24, %f24;
	mul.f32 	%f29, %f170, 0f3F000000;
	mov.f32 	%f375, %f386;
	@%p8 bra 	$L__BB281_11;
	sin.approx.f32 	%f171, %f24;
	sin.approx.f32 	%f172, %f28;
	mul.f32 	%f173, %f171, %f172;
	div.rn.f32 	%f375, %f173, %f29;
$L__BB281_11:
	add.f32 	%f169, %f3, 0fC0000000;
	mul.f32 	%f26, %f168, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f175, %f25, %f25;
	mul.f32 	%f33, %f175, 0f3F000000;
	mov.f32 	%f376, %f386;
	@%p9 bra 	$L__BB281_13;
	sin.approx.f32 	%f176, %f25;
	sin.approx.f32 	%f177, %f32;
	mul.f32 	%f178, %f176, %f177;
	div.rn.f32 	%f376, %f178, %f33;
$L__BB281_13:
	mul.f32 	%f27, %f169, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f180, %f26, %f26;
	mul.f32 	%f37, %f180, 0f3F000000;
	mov.f32 	%f377, %f386;
	@%p10 bra 	$L__BB281_15;
	sin.approx.f32 	%f181, %f26;
	sin.approx.f32 	%f182, %f36;
	mul.f32 	%f183, %f181, %f182;
	div.rn.f32 	%f377, %f183, %f37;
$L__BB281_15:
	ld.param.u64 	%rd5, [Subsample_Lanczos_yuv444p_p010le_uv_param_1];
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f185, %f27, %f27;
	mul.f32 	%f41, %f185, 0f3F000000;
	mov.f32 	%f378, %f386;
	@%p11 bra 	$L__BB281_17;
	sin.approx.f32 	%f186, %f27;
	sin.approx.f32 	%f187, %f40;
	mul.f32 	%f188, %f186, %f187;
	div.rn.f32 	%f378, %f188, %f41;
$L__BB281_17:
	add.f32 	%f253, %f255, 0fBF800000;
	add.f32 	%f254, %f262, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f253, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f255, %f254}];
	// end inline asm
	add.f32 	%f257, %f255, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f257, %f254}];
	// end inline asm
	add.f32 	%f259, %f255, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f259, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f253, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f255, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f257, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f259, %f262}];
	// end inline asm
	add.f32 	%f270, %f262, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f253, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f255, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f257, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f259, %f270}];
	// end inline asm
	add.f32 	%f278, %f262, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f253, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f255, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f257, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f259, %f278}];
	// end inline asm
	mov.f32 	%f379, %f386;
	@%p4 bra 	$L__BB281_19;
	sin.approx.f32 	%f222, %f4;
	sin.approx.f32 	%f223, %f8;
	mul.f32 	%f224, %f222, %f223;
	div.rn.f32 	%f379, %f224, %f9;
$L__BB281_19:
	mov.f32 	%f380, %f386;
	@%p5 bra 	$L__BB281_21;
	sin.approx.f32 	%f226, %f5;
	sin.approx.f32 	%f227, %f12;
	mul.f32 	%f228, %f226, %f227;
	div.rn.f32 	%f380, %f228, %f13;
$L__BB281_21:
	mov.f32 	%f381, %f386;
	@%p6 bra 	$L__BB281_23;
	sin.approx.f32 	%f230, %f6;
	sin.approx.f32 	%f231, %f16;
	mul.f32 	%f232, %f230, %f231;
	div.rn.f32 	%f381, %f232, %f17;
$L__BB281_23:
	mov.f32 	%f382, %f386;
	@%p7 bra 	$L__BB281_25;
	sin.approx.f32 	%f234, %f7;
	sin.approx.f32 	%f235, %f20;
	mul.f32 	%f236, %f234, %f235;
	div.rn.f32 	%f382, %f236, %f21;
$L__BB281_25:
	mov.f32 	%f383, %f386;
	@%p8 bra 	$L__BB281_27;
	sin.approx.f32 	%f238, %f24;
	sin.approx.f32 	%f239, %f28;
	mul.f32 	%f240, %f238, %f239;
	div.rn.f32 	%f383, %f240, %f29;
$L__BB281_27:
	mov.f32 	%f384, %f386;
	@%p9 bra 	$L__BB281_29;
	sin.approx.f32 	%f242, %f25;
	sin.approx.f32 	%f243, %f32;
	mul.f32 	%f244, %f242, %f243;
	div.rn.f32 	%f384, %f244, %f33;
$L__BB281_29:
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p_p010le_uv_param_5];
	mov.f32 	%f385, %f386;
	@%p10 bra 	$L__BB281_31;
	sin.approx.f32 	%f246, %f26;
	sin.approx.f32 	%f247, %f36;
	mul.f32 	%f248, %f246, %f247;
	div.rn.f32 	%f385, %f248, %f37;
$L__BB281_31:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p_p010le_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Lanczos_yuv444p_p010le_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd4;
	mov.b32 	%f46, %r17;
	mov.b32 	%f50, %r21;
	mov.b32 	%f55, %r25;
	mov.b32 	%f60, %r29;
	mov.b32 	%f64, %r33;
	mov.b32 	%f68, %r37;
	mov.b32 	%f72, %r41;
	mov.b32 	%f76, %r45;
	mov.b32 	%f81, %r49;
	mov.b32 	%f85, %r53;
	mov.b32 	%f89, %r57;
	mov.b32 	%f93, %r61;
	mov.b32 	%f98, %r65;
	mov.b32 	%f102, %r69;
	mov.b32 	%f106, %r73;
	mov.b32 	%f110, %r77;
	@%p11 bra 	$L__BB281_33;
	sin.approx.f32 	%f250, %f27;
	sin.approx.f32 	%f251, %f40;
	mul.f32 	%f252, %f250, %f251;
	div.rn.f32 	%f386, %f252, %f41;
$L__BB281_33:
	add.f32 	%f285, %f375, %f376;
	add.f32 	%f286, %f285, %f377;
	add.f32 	%f287, %f286, %f378;
	div.rn.f32 	%f288, %f375, %f287;
	add.f32 	%f289, %f371, %f372;
	add.f32 	%f290, %f289, %f373;
	add.f32 	%f291, %f290, %f374;
	div.rn.f32 	%f292, %f371, %f291;
	div.rn.f32 	%f293, %f372, %f291;
	mul.f32 	%f294, %f293, %f50;
	fma.rn.f32 	%f295, %f292, %f46, %f294;
	div.rn.f32 	%f296, %f373, %f291;
	fma.rn.f32 	%f297, %f296, %f55, %f295;
	div.rn.f32 	%f298, %f374, %f291;
	fma.rn.f32 	%f299, %f298, %f60, %f297;
	div.rn.f32 	%f300, %f376, %f287;
	mul.f32 	%f301, %f293, %f68;
	fma.rn.f32 	%f302, %f292, %f64, %f301;
	fma.rn.f32 	%f303, %f296, %f72, %f302;
	fma.rn.f32 	%f304, %f298, %f76, %f303;
	mul.f32 	%f305, %f300, %f304;
	fma.rn.f32 	%f306, %f288, %f299, %f305;
	div.rn.f32 	%f307, %f377, %f287;
	mul.f32 	%f308, %f293, %f85;
	fma.rn.f32 	%f309, %f292, %f81, %f308;
	fma.rn.f32 	%f310, %f296, %f89, %f309;
	fma.rn.f32 	%f311, %f298, %f93, %f310;
	fma.rn.f32 	%f312, %f307, %f311, %f306;
	div.rn.f32 	%f313, %f378, %f287;
	mul.f32 	%f314, %f293, %f102;
	fma.rn.f32 	%f315, %f292, %f98, %f314;
	fma.rn.f32 	%f316, %f296, %f106, %f315;
	fma.rn.f32 	%f317, %f298, %f110, %f316;
	fma.rn.f32 	%f318, %f313, %f317, %f312;
	mul.f32 	%f319, %f318, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f319;
	mul.lo.s16 	%rs2, %rs1, 257;
	and.b16  	%rs3, %rs2, -64;
	add.f32 	%f320, %f379, %f380;
	add.f32 	%f321, %f320, %f381;
	add.f32 	%f322, %f321, %f382;
	div.rn.f32 	%f323, %f382, %f322;
	div.rn.f32 	%f324, %f381, %f322;
	div.rn.f32 	%f325, %f380, %f322;
	div.rn.f32 	%f326, %f379, %f322;
	add.f32 	%f327, %f383, %f384;
	add.f32 	%f328, %f327, %f385;
	add.f32 	%f329, %f328, %f386;
	div.rn.f32 	%f330, %f383, %f329;
	div.rn.f32 	%f331, %f384, %f329;
	div.rn.f32 	%f332, %f385, %f329;
	div.rn.f32 	%f333, %f386, %f329;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f253, %f254}];
	// end inline asm
	mov.b32 	%f334, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f255, %f254}];
	// end inline asm
	mov.b32 	%f335, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f257, %f254}];
	// end inline asm
	mov.b32 	%f336, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f259, %f254}];
	// end inline asm
	mov.b32 	%f337, %r93;
	mul.f32 	%f338, %f325, %f335;
	fma.rn.f32 	%f339, %f326, %f334, %f338;
	fma.rn.f32 	%f340, %f324, %f336, %f339;
	fma.rn.f32 	%f341, %f323, %f337, %f340;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f253, %f262}];
	// end inline asm
	mov.b32 	%f342, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f255, %f262}];
	// end inline asm
	mov.b32 	%f343, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f257, %f262}];
	// end inline asm
	mov.b32 	%f344, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f259, %f262}];
	// end inline asm
	mov.b32 	%f345, %r109;
	mul.f32 	%f346, %f325, %f343;
	fma.rn.f32 	%f347, %f326, %f342, %f346;
	fma.rn.f32 	%f348, %f324, %f344, %f347;
	fma.rn.f32 	%f349, %f323, %f345, %f348;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f253, %f270}];
	// end inline asm
	mov.b32 	%f350, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f255, %f270}];
	// end inline asm
	mov.b32 	%f351, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f257, %f270}];
	// end inline asm
	mov.b32 	%f352, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f259, %f270}];
	// end inline asm
	mov.b32 	%f353, %r125;
	mul.f32 	%f354, %f325, %f351;
	fma.rn.f32 	%f355, %f326, %f350, %f354;
	fma.rn.f32 	%f356, %f324, %f352, %f355;
	fma.rn.f32 	%f357, %f323, %f353, %f356;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f253, %f278}];
	// end inline asm
	mov.b32 	%f358, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f255, %f278}];
	// end inline asm
	mov.b32 	%f359, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f257, %f278}];
	// end inline asm
	mov.b32 	%f360, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f259, %f278}];
	// end inline asm
	mov.b32 	%f361, %r141;
	mul.f32 	%f362, %f325, %f359;
	fma.rn.f32 	%f363, %f326, %f358, %f362;
	fma.rn.f32 	%f364, %f324, %f360, %f363;
	fma.rn.f32 	%f365, %f323, %f361, %f364;
	mul.f32 	%f366, %f331, %f349;
	fma.rn.f32 	%f367, %f330, %f341, %f366;
	fma.rn.f32 	%f368, %f332, %f357, %f367;
	fma.rn.f32 	%f369, %f333, %f365, %f368;
	mul.f32 	%f370, %f369, 0f437F0000;
	cvt.rzi.u16.f32 	%rs4, %f370;
	mul.lo.s16 	%rs5, %rs4, 257;
	and.b16  	%rs6, %rs5, -64;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 2;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 2;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u16 	[%rd44], {%rs3, %rs6};
$L__BB281_34:
	ret;

}
	// .globl	Subsample_Lanczos_p010le_p010le
.visible .entry Subsample_Lanczos_p010le_p010le(
	.param .u64 Subsample_Lanczos_p010le_p010le_param_0,
	.param .u64 Subsample_Lanczos_p010le_p010le_param_1,
	.param .u64 Subsample_Lanczos_p010le_p010le_param_2,
	.param .u64 Subsample_Lanczos_p010le_p010le_param_3,
	.param .u64 Subsample_Lanczos_p010le_p010le_param_4,
	.param .u64 Subsample_Lanczos_p010le_p010le_param_5,
	.param .u64 Subsample_Lanczos_p010le_p010le_param_6,
	.param .u64 Subsample_Lanczos_p010le_p010le_param_7,
	.param .u32 Subsample_Lanczos_p010le_p010le_param_8,
	.param .u32 Subsample_Lanczos_p010le_p010le_param_9,
	.param .u32 Subsample_Lanczos_p010le_p010le_param_10,
	.param .u32 Subsample_Lanczos_p010le_p010le_param_11,
	.param .u32 Subsample_Lanczos_p010le_p010le_param_12,
	.param .f32 Subsample_Lanczos_p010le_p010le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p010le_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p010le_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB282_18;
	bra.uni 	$L__BB282_1;
$L__BB282_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p010le_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p010le_p010le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB282_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB282_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB282_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB282_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB282_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB282_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB282_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB282_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB282_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB282_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB282_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB282_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p010le_p010le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB282_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB282_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p010le_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p010le_p010le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB282_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB282_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs1;
$L__BB282_18:
	ret;

}
	// .globl	Subsample_Lanczos_p010le_p010le_uv
.visible .entry Subsample_Lanczos_p010le_p010le_uv(
	.param .u64 Subsample_Lanczos_p010le_p010le_uv_param_0,
	.param .u64 Subsample_Lanczos_p010le_p010le_uv_param_1,
	.param .u64 Subsample_Lanczos_p010le_p010le_uv_param_2,
	.param .u64 Subsample_Lanczos_p010le_p010le_uv_param_3,
	.param .u64 Subsample_Lanczos_p010le_p010le_uv_param_4,
	.param .u64 Subsample_Lanczos_p010le_p010le_uv_param_5,
	.param .u64 Subsample_Lanczos_p010le_p010le_uv_param_6,
	.param .u64 Subsample_Lanczos_p010le_p010le_uv_param_7,
	.param .u32 Subsample_Lanczos_p010le_p010le_uv_param_8,
	.param .u32 Subsample_Lanczos_p010le_p010le_uv_param_9,
	.param .u32 Subsample_Lanczos_p010le_p010le_uv_param_10,
	.param .u32 Subsample_Lanczos_p010le_p010le_uv_param_11,
	.param .u32 Subsample_Lanczos_p010le_p010le_uv_param_12,
	.param .f32 Subsample_Lanczos_p010le_p010le_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p010le_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p010le_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB283_18;
	bra.uni 	$L__BB283_1;
$L__BB283_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p010le_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p010le_p010le_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB283_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB283_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB283_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB283_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB283_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB283_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB283_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB283_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB283_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB283_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB283_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB283_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p010le_p010le_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB283_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB283_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p010le_p010le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p010le_p010le_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB283_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB283_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f477FFF00;
	mul.f32 	%f222, %f220, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u16 	[%rd27], {%rs1, %rs2};
$L__BB283_18:
	ret;

}
	// .globl	Subsample_Lanczos_p016le_p010le
.visible .entry Subsample_Lanczos_p016le_p010le(
	.param .u64 Subsample_Lanczos_p016le_p010le_param_0,
	.param .u64 Subsample_Lanczos_p016le_p010le_param_1,
	.param .u64 Subsample_Lanczos_p016le_p010le_param_2,
	.param .u64 Subsample_Lanczos_p016le_p010le_param_3,
	.param .u64 Subsample_Lanczos_p016le_p010le_param_4,
	.param .u64 Subsample_Lanczos_p016le_p010le_param_5,
	.param .u64 Subsample_Lanczos_p016le_p010le_param_6,
	.param .u64 Subsample_Lanczos_p016le_p010le_param_7,
	.param .u32 Subsample_Lanczos_p016le_p010le_param_8,
	.param .u32 Subsample_Lanczos_p016le_p010le_param_9,
	.param .u32 Subsample_Lanczos_p016le_p010le_param_10,
	.param .u32 Subsample_Lanczos_p016le_p010le_param_11,
	.param .u32 Subsample_Lanczos_p016le_p010le_param_12,
	.param .f32 Subsample_Lanczos_p016le_p010le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p016le_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p016le_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB284_18;
	bra.uni 	$L__BB284_1;
$L__BB284_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p016le_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p016le_p010le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB284_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB284_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB284_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB284_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB284_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB284_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB284_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB284_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB284_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB284_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB284_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB284_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p016le_p010le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB284_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB284_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p016le_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p016le_p010le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB284_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB284_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	and.b16  	%rs2, %rs1, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB284_18:
	ret;

}
	// .globl	Subsample_Lanczos_p016le_p010le_uv
.visible .entry Subsample_Lanczos_p016le_p010le_uv(
	.param .u64 Subsample_Lanczos_p016le_p010le_uv_param_0,
	.param .u64 Subsample_Lanczos_p016le_p010le_uv_param_1,
	.param .u64 Subsample_Lanczos_p016le_p010le_uv_param_2,
	.param .u64 Subsample_Lanczos_p016le_p010le_uv_param_3,
	.param .u64 Subsample_Lanczos_p016le_p010le_uv_param_4,
	.param .u64 Subsample_Lanczos_p016le_p010le_uv_param_5,
	.param .u64 Subsample_Lanczos_p016le_p010le_uv_param_6,
	.param .u64 Subsample_Lanczos_p016le_p010le_uv_param_7,
	.param .u32 Subsample_Lanczos_p016le_p010le_uv_param_8,
	.param .u32 Subsample_Lanczos_p016le_p010le_uv_param_9,
	.param .u32 Subsample_Lanczos_p016le_p010le_uv_param_10,
	.param .u32 Subsample_Lanczos_p016le_p010le_uv_param_11,
	.param .u32 Subsample_Lanczos_p016le_p010le_uv_param_12,
	.param .f32 Subsample_Lanczos_p016le_p010le_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p016le_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p016le_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB285_18;
	bra.uni 	$L__BB285_1;
$L__BB285_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p016le_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p016le_p010le_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB285_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB285_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB285_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB285_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB285_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB285_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB285_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB285_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB285_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB285_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB285_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB285_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p016le_p010le_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB285_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB285_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p016le_p010le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p016le_p010le_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB285_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB285_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f477FFF00;
	mul.f32 	%f222, %f220, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	and.b16  	%rs3, %rs1, -64;
	and.b16  	%rs4, %rs2, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u16 	[%rd27], {%rs3, %rs4};
$L__BB285_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p16le_p010le
.visible .entry Subsample_Lanczos_yuv444p16le_p010le(
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_param_0,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_param_1,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_param_2,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_param_3,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_param_4,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_param_5,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_param_6,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_param_7,
	.param .u32 Subsample_Lanczos_yuv444p16le_p010le_param_8,
	.param .u32 Subsample_Lanczos_yuv444p16le_p010le_param_9,
	.param .u32 Subsample_Lanczos_yuv444p16le_p010le_param_10,
	.param .u32 Subsample_Lanczos_yuv444p16le_p010le_param_11,
	.param .u32 Subsample_Lanczos_yuv444p16le_p010le_param_12,
	.param .f32 Subsample_Lanczos_yuv444p16le_p010le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p16le_p010le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p16le_p010le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB286_18;
	bra.uni 	$L__BB286_1;
$L__BB286_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p16le_p010le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p16le_p010le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB286_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB286_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB286_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB286_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB286_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB286_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB286_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB286_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB286_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB286_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB286_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB286_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv444p16le_p010le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB286_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB286_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p16le_p010le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p16le_p010le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB286_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB286_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	and.b16  	%rs2, %rs1, -64;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB286_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p16le_p010le_uv
.visible .entry Subsample_Lanczos_yuv444p16le_p010le_uv(
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv444p16le_p010le_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv444p16le_p010le_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv444p16le_p010le_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv444p16le_p010le_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv444p16le_p010le_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv444p16le_p010le_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv444p16le_p010le_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<387>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p16le_p010le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p16le_p010le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB287_34;
	bra.uni 	$L__BB287_1;
$L__BB287_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p16le_p010le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p16le_p010le_uv_param_11];
	cvt.rn.f32.s32 	%f131, %r6;
	cvt.rn.f32.s32 	%f132, %r3;
	div.rn.f32 	%f133, %f131, %f132;
	cvt.rn.f32.s32 	%f134, %r7;
	cvt.rn.f32.s32 	%f135, %r4;
	div.rn.f32 	%f136, %f134, %f135;
	cvt.rn.f32.s32 	%f137, %r1;
	add.f32 	%f138, %f137, 0f3F000000;
	fma.rn.f32 	%f139, %f133, %f138, 0fBF000000;
	cvt.rn.f32.s32 	%f140, %r2;
	add.f32 	%f141, %f140, 0f3F000000;
	cvt.rmi.f32.f32 	%f255, %f139;
	sub.f32 	%f143, %f139, %f255;
	add.f32 	%f144, %f143, 0f3F800000;
	mul.f32 	%f4, %f144, 0f40490FDB;
	mul.f32 	%f5, %f143, 0f40490FDB;
	add.f32 	%f145, %f143, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f147, %f4, %f4;
	mul.f32 	%f9, %f147, 0f3F000000;
	mov.f32 	%f386, 0f3F800000;
	mov.f32 	%f371, %f386;
	@%p4 bra 	$L__BB287_3;
	sin.approx.f32 	%f148, %f4;
	sin.approx.f32 	%f149, %f8;
	mul.f32 	%f150, %f148, %f149;
	div.rn.f32 	%f371, %f150, %f9;
$L__BB287_3:
	fma.rn.f32 	%f142, %f136, %f141, 0fBF000000;
	add.f32 	%f146, %f143, 0fC0000000;
	mul.f32 	%f6, %f145, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f152, %f5, %f5;
	mul.f32 	%f13, %f152, 0f3F000000;
	mov.f32 	%f372, %f386;
	@%p5 bra 	$L__BB287_5;
	sin.approx.f32 	%f153, %f5;
	sin.approx.f32 	%f154, %f12;
	mul.f32 	%f155, %f153, %f154;
	div.rn.f32 	%f372, %f155, %f13;
$L__BB287_5:
	cvt.rmi.f32.f32 	%f262, %f142;
	mul.f32 	%f7, %f146, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f157, %f6, %f6;
	mul.f32 	%f17, %f157, 0f3F000000;
	mov.f32 	%f373, %f386;
	@%p6 bra 	$L__BB287_7;
	sin.approx.f32 	%f158, %f6;
	sin.approx.f32 	%f159, %f16;
	mul.f32 	%f160, %f158, %f159;
	div.rn.f32 	%f373, %f160, %f17;
$L__BB287_7:
	sub.f32 	%f3, %f142, %f262;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f162, %f7, %f7;
	mul.f32 	%f21, %f162, 0f3F000000;
	mov.f32 	%f374, %f386;
	@%p7 bra 	$L__BB287_9;
	sin.approx.f32 	%f163, %f7;
	sin.approx.f32 	%f164, %f20;
	mul.f32 	%f165, %f163, %f164;
	div.rn.f32 	%f374, %f165, %f21;
$L__BB287_9:
	add.f32 	%f167, %f3, 0f3F800000;
	mul.f32 	%f24, %f167, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f168, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f170, %f24, %f24;
	mul.f32 	%f29, %f170, 0f3F000000;
	mov.f32 	%f375, %f386;
	@%p8 bra 	$L__BB287_11;
	sin.approx.f32 	%f171, %f24;
	sin.approx.f32 	%f172, %f28;
	mul.f32 	%f173, %f171, %f172;
	div.rn.f32 	%f375, %f173, %f29;
$L__BB287_11:
	add.f32 	%f169, %f3, 0fC0000000;
	mul.f32 	%f26, %f168, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f175, %f25, %f25;
	mul.f32 	%f33, %f175, 0f3F000000;
	mov.f32 	%f376, %f386;
	@%p9 bra 	$L__BB287_13;
	sin.approx.f32 	%f176, %f25;
	sin.approx.f32 	%f177, %f32;
	mul.f32 	%f178, %f176, %f177;
	div.rn.f32 	%f376, %f178, %f33;
$L__BB287_13:
	mul.f32 	%f27, %f169, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f180, %f26, %f26;
	mul.f32 	%f37, %f180, 0f3F000000;
	mov.f32 	%f377, %f386;
	@%p10 bra 	$L__BB287_15;
	sin.approx.f32 	%f181, %f26;
	sin.approx.f32 	%f182, %f36;
	mul.f32 	%f183, %f181, %f182;
	div.rn.f32 	%f377, %f183, %f37;
$L__BB287_15:
	ld.param.u64 	%rd5, [Subsample_Lanczos_yuv444p16le_p010le_uv_param_1];
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f185, %f27, %f27;
	mul.f32 	%f41, %f185, 0f3F000000;
	mov.f32 	%f378, %f386;
	@%p11 bra 	$L__BB287_17;
	sin.approx.f32 	%f186, %f27;
	sin.approx.f32 	%f187, %f40;
	mul.f32 	%f188, %f186, %f187;
	div.rn.f32 	%f378, %f188, %f41;
$L__BB287_17:
	add.f32 	%f253, %f255, 0fBF800000;
	add.f32 	%f254, %f262, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f253, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f255, %f254}];
	// end inline asm
	add.f32 	%f257, %f255, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f257, %f254}];
	// end inline asm
	add.f32 	%f259, %f255, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f259, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f253, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f255, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f257, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f259, %f262}];
	// end inline asm
	add.f32 	%f270, %f262, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f253, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f255, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f257, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f259, %f270}];
	// end inline asm
	add.f32 	%f278, %f262, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f253, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f255, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f257, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f259, %f278}];
	// end inline asm
	mov.f32 	%f379, %f386;
	@%p4 bra 	$L__BB287_19;
	sin.approx.f32 	%f222, %f4;
	sin.approx.f32 	%f223, %f8;
	mul.f32 	%f224, %f222, %f223;
	div.rn.f32 	%f379, %f224, %f9;
$L__BB287_19:
	mov.f32 	%f380, %f386;
	@%p5 bra 	$L__BB287_21;
	sin.approx.f32 	%f226, %f5;
	sin.approx.f32 	%f227, %f12;
	mul.f32 	%f228, %f226, %f227;
	div.rn.f32 	%f380, %f228, %f13;
$L__BB287_21:
	mov.f32 	%f381, %f386;
	@%p6 bra 	$L__BB287_23;
	sin.approx.f32 	%f230, %f6;
	sin.approx.f32 	%f231, %f16;
	mul.f32 	%f232, %f230, %f231;
	div.rn.f32 	%f381, %f232, %f17;
$L__BB287_23:
	mov.f32 	%f382, %f386;
	@%p7 bra 	$L__BB287_25;
	sin.approx.f32 	%f234, %f7;
	sin.approx.f32 	%f235, %f20;
	mul.f32 	%f236, %f234, %f235;
	div.rn.f32 	%f382, %f236, %f21;
$L__BB287_25:
	mov.f32 	%f383, %f386;
	@%p8 bra 	$L__BB287_27;
	sin.approx.f32 	%f238, %f24;
	sin.approx.f32 	%f239, %f28;
	mul.f32 	%f240, %f238, %f239;
	div.rn.f32 	%f383, %f240, %f29;
$L__BB287_27:
	mov.f32 	%f384, %f386;
	@%p9 bra 	$L__BB287_29;
	sin.approx.f32 	%f242, %f25;
	sin.approx.f32 	%f243, %f32;
	mul.f32 	%f244, %f242, %f243;
	div.rn.f32 	%f384, %f244, %f33;
$L__BB287_29:
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p16le_p010le_uv_param_5];
	mov.f32 	%f385, %f386;
	@%p10 bra 	$L__BB287_31;
	sin.approx.f32 	%f246, %f26;
	sin.approx.f32 	%f247, %f36;
	mul.f32 	%f248, %f246, %f247;
	div.rn.f32 	%f385, %f248, %f37;
$L__BB287_31:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p16le_p010le_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Lanczos_yuv444p16le_p010le_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd4;
	mov.b32 	%f46, %r17;
	mov.b32 	%f50, %r21;
	mov.b32 	%f55, %r25;
	mov.b32 	%f60, %r29;
	mov.b32 	%f64, %r33;
	mov.b32 	%f68, %r37;
	mov.b32 	%f72, %r41;
	mov.b32 	%f76, %r45;
	mov.b32 	%f81, %r49;
	mov.b32 	%f85, %r53;
	mov.b32 	%f89, %r57;
	mov.b32 	%f93, %r61;
	mov.b32 	%f98, %r65;
	mov.b32 	%f102, %r69;
	mov.b32 	%f106, %r73;
	mov.b32 	%f110, %r77;
	@%p11 bra 	$L__BB287_33;
	sin.approx.f32 	%f250, %f27;
	sin.approx.f32 	%f251, %f40;
	mul.f32 	%f252, %f250, %f251;
	div.rn.f32 	%f386, %f252, %f41;
$L__BB287_33:
	add.f32 	%f285, %f375, %f376;
	add.f32 	%f286, %f285, %f377;
	add.f32 	%f287, %f286, %f378;
	div.rn.f32 	%f288, %f375, %f287;
	add.f32 	%f289, %f371, %f372;
	add.f32 	%f290, %f289, %f373;
	add.f32 	%f291, %f290, %f374;
	div.rn.f32 	%f292, %f371, %f291;
	div.rn.f32 	%f293, %f372, %f291;
	mul.f32 	%f294, %f293, %f50;
	fma.rn.f32 	%f295, %f292, %f46, %f294;
	div.rn.f32 	%f296, %f373, %f291;
	fma.rn.f32 	%f297, %f296, %f55, %f295;
	div.rn.f32 	%f298, %f374, %f291;
	fma.rn.f32 	%f299, %f298, %f60, %f297;
	div.rn.f32 	%f300, %f376, %f287;
	mul.f32 	%f301, %f293, %f68;
	fma.rn.f32 	%f302, %f292, %f64, %f301;
	fma.rn.f32 	%f303, %f296, %f72, %f302;
	fma.rn.f32 	%f304, %f298, %f76, %f303;
	mul.f32 	%f305, %f300, %f304;
	fma.rn.f32 	%f306, %f288, %f299, %f305;
	div.rn.f32 	%f307, %f377, %f287;
	mul.f32 	%f308, %f293, %f85;
	fma.rn.f32 	%f309, %f292, %f81, %f308;
	fma.rn.f32 	%f310, %f296, %f89, %f309;
	fma.rn.f32 	%f311, %f298, %f93, %f310;
	fma.rn.f32 	%f312, %f307, %f311, %f306;
	div.rn.f32 	%f313, %f378, %f287;
	mul.f32 	%f314, %f293, %f102;
	fma.rn.f32 	%f315, %f292, %f98, %f314;
	fma.rn.f32 	%f316, %f296, %f106, %f315;
	fma.rn.f32 	%f317, %f298, %f110, %f316;
	fma.rn.f32 	%f318, %f313, %f317, %f312;
	mul.f32 	%f319, %f318, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f319;
	and.b16  	%rs2, %rs1, -64;
	add.f32 	%f320, %f379, %f380;
	add.f32 	%f321, %f320, %f381;
	add.f32 	%f322, %f321, %f382;
	div.rn.f32 	%f323, %f382, %f322;
	div.rn.f32 	%f324, %f381, %f322;
	div.rn.f32 	%f325, %f380, %f322;
	div.rn.f32 	%f326, %f379, %f322;
	add.f32 	%f327, %f383, %f384;
	add.f32 	%f328, %f327, %f385;
	add.f32 	%f329, %f328, %f386;
	div.rn.f32 	%f330, %f383, %f329;
	div.rn.f32 	%f331, %f384, %f329;
	div.rn.f32 	%f332, %f385, %f329;
	div.rn.f32 	%f333, %f386, %f329;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f253, %f254}];
	// end inline asm
	mov.b32 	%f334, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f255, %f254}];
	// end inline asm
	mov.b32 	%f335, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f257, %f254}];
	// end inline asm
	mov.b32 	%f336, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f259, %f254}];
	// end inline asm
	mov.b32 	%f337, %r93;
	mul.f32 	%f338, %f325, %f335;
	fma.rn.f32 	%f339, %f326, %f334, %f338;
	fma.rn.f32 	%f340, %f324, %f336, %f339;
	fma.rn.f32 	%f341, %f323, %f337, %f340;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f253, %f262}];
	// end inline asm
	mov.b32 	%f342, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f255, %f262}];
	// end inline asm
	mov.b32 	%f343, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f257, %f262}];
	// end inline asm
	mov.b32 	%f344, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f259, %f262}];
	// end inline asm
	mov.b32 	%f345, %r109;
	mul.f32 	%f346, %f325, %f343;
	fma.rn.f32 	%f347, %f326, %f342, %f346;
	fma.rn.f32 	%f348, %f324, %f344, %f347;
	fma.rn.f32 	%f349, %f323, %f345, %f348;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f253, %f270}];
	// end inline asm
	mov.b32 	%f350, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f255, %f270}];
	// end inline asm
	mov.b32 	%f351, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f257, %f270}];
	// end inline asm
	mov.b32 	%f352, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f259, %f270}];
	// end inline asm
	mov.b32 	%f353, %r125;
	mul.f32 	%f354, %f325, %f351;
	fma.rn.f32 	%f355, %f326, %f350, %f354;
	fma.rn.f32 	%f356, %f324, %f352, %f355;
	fma.rn.f32 	%f357, %f323, %f353, %f356;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f253, %f278}];
	// end inline asm
	mov.b32 	%f358, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f255, %f278}];
	// end inline asm
	mov.b32 	%f359, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f257, %f278}];
	// end inline asm
	mov.b32 	%f360, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f259, %f278}];
	// end inline asm
	mov.b32 	%f361, %r141;
	mul.f32 	%f362, %f325, %f359;
	fma.rn.f32 	%f363, %f326, %f358, %f362;
	fma.rn.f32 	%f364, %f324, %f360, %f363;
	fma.rn.f32 	%f365, %f323, %f361, %f364;
	mul.f32 	%f366, %f331, %f349;
	fma.rn.f32 	%f367, %f330, %f341, %f366;
	fma.rn.f32 	%f368, %f332, %f357, %f367;
	fma.rn.f32 	%f369, %f333, %f365, %f368;
	mul.f32 	%f370, %f369, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs3, %f370;
	and.b16  	%rs4, %rs3, -64;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 2;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 2;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u16 	[%rd44], {%rs2, %rs4};
$L__BB287_34:
	ret;

}
	// .globl	Subsample_Lanczos_yuv420p_p016le
.visible .entry Subsample_Lanczos_yuv420p_p016le(
	.param .u64 Subsample_Lanczos_yuv420p_p016le_param_0,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_param_1,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_param_2,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_param_3,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_param_4,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_param_5,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_param_6,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_param_7,
	.param .u32 Subsample_Lanczos_yuv420p_p016le_param_8,
	.param .u32 Subsample_Lanczos_yuv420p_p016le_param_9,
	.param .u32 Subsample_Lanczos_yuv420p_p016le_param_10,
	.param .u32 Subsample_Lanczos_yuv420p_p016le_param_11,
	.param .u32 Subsample_Lanczos_yuv420p_p016le_param_12,
	.param .f32 Subsample_Lanczos_yuv420p_p016le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv420p_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv420p_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB288_18;
	bra.uni 	$L__BB288_1;
$L__BB288_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv420p_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv420p_p016le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB288_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB288_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB288_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB288_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB288_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB288_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB288_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB288_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB288_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB288_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB288_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB288_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv420p_p016le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB288_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB288_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv420p_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv420p_p016le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB288_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB288_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB288_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv420p_p016le_uv
.visible .entry Subsample_Lanczos_yuv420p_p016le_uv(
	.param .u64 Subsample_Lanczos_yuv420p_p016le_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv420p_p016le_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv420p_p016le_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv420p_p016le_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv420p_p016le_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv420p_p016le_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv420p_p016le_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv420p_p016le_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<387>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv420p_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv420p_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB289_34;
	bra.uni 	$L__BB289_1;
$L__BB289_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv420p_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv420p_p016le_uv_param_11];
	cvt.rn.f32.s32 	%f131, %r6;
	cvt.rn.f32.s32 	%f132, %r3;
	div.rn.f32 	%f133, %f131, %f132;
	cvt.rn.f32.s32 	%f134, %r7;
	cvt.rn.f32.s32 	%f135, %r4;
	div.rn.f32 	%f136, %f134, %f135;
	cvt.rn.f32.s32 	%f137, %r1;
	add.f32 	%f138, %f137, 0f3F000000;
	fma.rn.f32 	%f139, %f133, %f138, 0fBF000000;
	cvt.rn.f32.s32 	%f140, %r2;
	add.f32 	%f141, %f140, 0f3F000000;
	cvt.rmi.f32.f32 	%f255, %f139;
	sub.f32 	%f143, %f139, %f255;
	add.f32 	%f144, %f143, 0f3F800000;
	mul.f32 	%f4, %f144, 0f40490FDB;
	mul.f32 	%f5, %f143, 0f40490FDB;
	add.f32 	%f145, %f143, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f147, %f4, %f4;
	mul.f32 	%f9, %f147, 0f3F000000;
	mov.f32 	%f386, 0f3F800000;
	mov.f32 	%f371, %f386;
	@%p4 bra 	$L__BB289_3;
	sin.approx.f32 	%f148, %f4;
	sin.approx.f32 	%f149, %f8;
	mul.f32 	%f150, %f148, %f149;
	div.rn.f32 	%f371, %f150, %f9;
$L__BB289_3:
	fma.rn.f32 	%f142, %f136, %f141, 0fBF000000;
	add.f32 	%f146, %f143, 0fC0000000;
	mul.f32 	%f6, %f145, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f152, %f5, %f5;
	mul.f32 	%f13, %f152, 0f3F000000;
	mov.f32 	%f372, %f386;
	@%p5 bra 	$L__BB289_5;
	sin.approx.f32 	%f153, %f5;
	sin.approx.f32 	%f154, %f12;
	mul.f32 	%f155, %f153, %f154;
	div.rn.f32 	%f372, %f155, %f13;
$L__BB289_5:
	cvt.rmi.f32.f32 	%f262, %f142;
	mul.f32 	%f7, %f146, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f157, %f6, %f6;
	mul.f32 	%f17, %f157, 0f3F000000;
	mov.f32 	%f373, %f386;
	@%p6 bra 	$L__BB289_7;
	sin.approx.f32 	%f158, %f6;
	sin.approx.f32 	%f159, %f16;
	mul.f32 	%f160, %f158, %f159;
	div.rn.f32 	%f373, %f160, %f17;
$L__BB289_7:
	sub.f32 	%f3, %f142, %f262;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f162, %f7, %f7;
	mul.f32 	%f21, %f162, 0f3F000000;
	mov.f32 	%f374, %f386;
	@%p7 bra 	$L__BB289_9;
	sin.approx.f32 	%f163, %f7;
	sin.approx.f32 	%f164, %f20;
	mul.f32 	%f165, %f163, %f164;
	div.rn.f32 	%f374, %f165, %f21;
$L__BB289_9:
	add.f32 	%f167, %f3, 0f3F800000;
	mul.f32 	%f24, %f167, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f168, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f170, %f24, %f24;
	mul.f32 	%f29, %f170, 0f3F000000;
	mov.f32 	%f375, %f386;
	@%p8 bra 	$L__BB289_11;
	sin.approx.f32 	%f171, %f24;
	sin.approx.f32 	%f172, %f28;
	mul.f32 	%f173, %f171, %f172;
	div.rn.f32 	%f375, %f173, %f29;
$L__BB289_11:
	add.f32 	%f169, %f3, 0fC0000000;
	mul.f32 	%f26, %f168, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f175, %f25, %f25;
	mul.f32 	%f33, %f175, 0f3F000000;
	mov.f32 	%f376, %f386;
	@%p9 bra 	$L__BB289_13;
	sin.approx.f32 	%f176, %f25;
	sin.approx.f32 	%f177, %f32;
	mul.f32 	%f178, %f176, %f177;
	div.rn.f32 	%f376, %f178, %f33;
$L__BB289_13:
	mul.f32 	%f27, %f169, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f180, %f26, %f26;
	mul.f32 	%f37, %f180, 0f3F000000;
	mov.f32 	%f377, %f386;
	@%p10 bra 	$L__BB289_15;
	sin.approx.f32 	%f181, %f26;
	sin.approx.f32 	%f182, %f36;
	mul.f32 	%f183, %f181, %f182;
	div.rn.f32 	%f377, %f183, %f37;
$L__BB289_15:
	ld.param.u64 	%rd5, [Subsample_Lanczos_yuv420p_p016le_uv_param_1];
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f185, %f27, %f27;
	mul.f32 	%f41, %f185, 0f3F000000;
	mov.f32 	%f378, %f386;
	@%p11 bra 	$L__BB289_17;
	sin.approx.f32 	%f186, %f27;
	sin.approx.f32 	%f187, %f40;
	mul.f32 	%f188, %f186, %f187;
	div.rn.f32 	%f378, %f188, %f41;
$L__BB289_17:
	add.f32 	%f253, %f255, 0fBF800000;
	add.f32 	%f254, %f262, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f253, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f255, %f254}];
	// end inline asm
	add.f32 	%f257, %f255, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f257, %f254}];
	// end inline asm
	add.f32 	%f259, %f255, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f259, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f253, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f255, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f257, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f259, %f262}];
	// end inline asm
	add.f32 	%f270, %f262, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f253, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f255, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f257, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f259, %f270}];
	// end inline asm
	add.f32 	%f278, %f262, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f253, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f255, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f257, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f259, %f278}];
	// end inline asm
	mov.f32 	%f379, %f386;
	@%p4 bra 	$L__BB289_19;
	sin.approx.f32 	%f222, %f4;
	sin.approx.f32 	%f223, %f8;
	mul.f32 	%f224, %f222, %f223;
	div.rn.f32 	%f379, %f224, %f9;
$L__BB289_19:
	mov.f32 	%f380, %f386;
	@%p5 bra 	$L__BB289_21;
	sin.approx.f32 	%f226, %f5;
	sin.approx.f32 	%f227, %f12;
	mul.f32 	%f228, %f226, %f227;
	div.rn.f32 	%f380, %f228, %f13;
$L__BB289_21:
	mov.f32 	%f381, %f386;
	@%p6 bra 	$L__BB289_23;
	sin.approx.f32 	%f230, %f6;
	sin.approx.f32 	%f231, %f16;
	mul.f32 	%f232, %f230, %f231;
	div.rn.f32 	%f381, %f232, %f17;
$L__BB289_23:
	mov.f32 	%f382, %f386;
	@%p7 bra 	$L__BB289_25;
	sin.approx.f32 	%f234, %f7;
	sin.approx.f32 	%f235, %f20;
	mul.f32 	%f236, %f234, %f235;
	div.rn.f32 	%f382, %f236, %f21;
$L__BB289_25:
	mov.f32 	%f383, %f386;
	@%p8 bra 	$L__BB289_27;
	sin.approx.f32 	%f238, %f24;
	sin.approx.f32 	%f239, %f28;
	mul.f32 	%f240, %f238, %f239;
	div.rn.f32 	%f383, %f240, %f29;
$L__BB289_27:
	mov.f32 	%f384, %f386;
	@%p9 bra 	$L__BB289_29;
	sin.approx.f32 	%f242, %f25;
	sin.approx.f32 	%f243, %f32;
	mul.f32 	%f244, %f242, %f243;
	div.rn.f32 	%f384, %f244, %f33;
$L__BB289_29:
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv420p_p016le_uv_param_5];
	mov.f32 	%f385, %f386;
	@%p10 bra 	$L__BB289_31;
	sin.approx.f32 	%f246, %f26;
	sin.approx.f32 	%f247, %f36;
	mul.f32 	%f248, %f246, %f247;
	div.rn.f32 	%f385, %f248, %f37;
$L__BB289_31:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv420p_p016le_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Lanczos_yuv420p_p016le_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd4;
	mov.b32 	%f46, %r17;
	mov.b32 	%f50, %r21;
	mov.b32 	%f55, %r25;
	mov.b32 	%f60, %r29;
	mov.b32 	%f64, %r33;
	mov.b32 	%f68, %r37;
	mov.b32 	%f72, %r41;
	mov.b32 	%f76, %r45;
	mov.b32 	%f81, %r49;
	mov.b32 	%f85, %r53;
	mov.b32 	%f89, %r57;
	mov.b32 	%f93, %r61;
	mov.b32 	%f98, %r65;
	mov.b32 	%f102, %r69;
	mov.b32 	%f106, %r73;
	mov.b32 	%f110, %r77;
	@%p11 bra 	$L__BB289_33;
	sin.approx.f32 	%f250, %f27;
	sin.approx.f32 	%f251, %f40;
	mul.f32 	%f252, %f250, %f251;
	div.rn.f32 	%f386, %f252, %f41;
$L__BB289_33:
	add.f32 	%f285, %f375, %f376;
	add.f32 	%f286, %f285, %f377;
	add.f32 	%f287, %f286, %f378;
	div.rn.f32 	%f288, %f375, %f287;
	add.f32 	%f289, %f371, %f372;
	add.f32 	%f290, %f289, %f373;
	add.f32 	%f291, %f290, %f374;
	div.rn.f32 	%f292, %f371, %f291;
	div.rn.f32 	%f293, %f372, %f291;
	mul.f32 	%f294, %f293, %f50;
	fma.rn.f32 	%f295, %f292, %f46, %f294;
	div.rn.f32 	%f296, %f373, %f291;
	fma.rn.f32 	%f297, %f296, %f55, %f295;
	div.rn.f32 	%f298, %f374, %f291;
	fma.rn.f32 	%f299, %f298, %f60, %f297;
	div.rn.f32 	%f300, %f376, %f287;
	mul.f32 	%f301, %f293, %f68;
	fma.rn.f32 	%f302, %f292, %f64, %f301;
	fma.rn.f32 	%f303, %f296, %f72, %f302;
	fma.rn.f32 	%f304, %f298, %f76, %f303;
	mul.f32 	%f305, %f300, %f304;
	fma.rn.f32 	%f306, %f288, %f299, %f305;
	div.rn.f32 	%f307, %f377, %f287;
	mul.f32 	%f308, %f293, %f85;
	fma.rn.f32 	%f309, %f292, %f81, %f308;
	fma.rn.f32 	%f310, %f296, %f89, %f309;
	fma.rn.f32 	%f311, %f298, %f93, %f310;
	fma.rn.f32 	%f312, %f307, %f311, %f306;
	div.rn.f32 	%f313, %f378, %f287;
	mul.f32 	%f314, %f293, %f102;
	fma.rn.f32 	%f315, %f292, %f98, %f314;
	fma.rn.f32 	%f316, %f296, %f106, %f315;
	fma.rn.f32 	%f317, %f298, %f110, %f316;
	fma.rn.f32 	%f318, %f313, %f317, %f312;
	mul.f32 	%f319, %f318, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f319;
	mul.lo.s16 	%rs2, %rs1, 257;
	add.f32 	%f320, %f379, %f380;
	add.f32 	%f321, %f320, %f381;
	add.f32 	%f322, %f321, %f382;
	div.rn.f32 	%f323, %f382, %f322;
	div.rn.f32 	%f324, %f381, %f322;
	div.rn.f32 	%f325, %f380, %f322;
	div.rn.f32 	%f326, %f379, %f322;
	add.f32 	%f327, %f383, %f384;
	add.f32 	%f328, %f327, %f385;
	add.f32 	%f329, %f328, %f386;
	div.rn.f32 	%f330, %f383, %f329;
	div.rn.f32 	%f331, %f384, %f329;
	div.rn.f32 	%f332, %f385, %f329;
	div.rn.f32 	%f333, %f386, %f329;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f253, %f254}];
	// end inline asm
	mov.b32 	%f334, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f255, %f254}];
	// end inline asm
	mov.b32 	%f335, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f257, %f254}];
	// end inline asm
	mov.b32 	%f336, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f259, %f254}];
	// end inline asm
	mov.b32 	%f337, %r93;
	mul.f32 	%f338, %f325, %f335;
	fma.rn.f32 	%f339, %f326, %f334, %f338;
	fma.rn.f32 	%f340, %f324, %f336, %f339;
	fma.rn.f32 	%f341, %f323, %f337, %f340;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f253, %f262}];
	// end inline asm
	mov.b32 	%f342, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f255, %f262}];
	// end inline asm
	mov.b32 	%f343, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f257, %f262}];
	// end inline asm
	mov.b32 	%f344, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f259, %f262}];
	// end inline asm
	mov.b32 	%f345, %r109;
	mul.f32 	%f346, %f325, %f343;
	fma.rn.f32 	%f347, %f326, %f342, %f346;
	fma.rn.f32 	%f348, %f324, %f344, %f347;
	fma.rn.f32 	%f349, %f323, %f345, %f348;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f253, %f270}];
	// end inline asm
	mov.b32 	%f350, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f255, %f270}];
	// end inline asm
	mov.b32 	%f351, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f257, %f270}];
	// end inline asm
	mov.b32 	%f352, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f259, %f270}];
	// end inline asm
	mov.b32 	%f353, %r125;
	mul.f32 	%f354, %f325, %f351;
	fma.rn.f32 	%f355, %f326, %f350, %f354;
	fma.rn.f32 	%f356, %f324, %f352, %f355;
	fma.rn.f32 	%f357, %f323, %f353, %f356;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f253, %f278}];
	// end inline asm
	mov.b32 	%f358, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f255, %f278}];
	// end inline asm
	mov.b32 	%f359, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f257, %f278}];
	// end inline asm
	mov.b32 	%f360, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f259, %f278}];
	// end inline asm
	mov.b32 	%f361, %r141;
	mul.f32 	%f362, %f325, %f359;
	fma.rn.f32 	%f363, %f326, %f358, %f362;
	fma.rn.f32 	%f364, %f324, %f360, %f363;
	fma.rn.f32 	%f365, %f323, %f361, %f364;
	mul.f32 	%f366, %f331, %f349;
	fma.rn.f32 	%f367, %f330, %f341, %f366;
	fma.rn.f32 	%f368, %f332, %f357, %f367;
	fma.rn.f32 	%f369, %f333, %f365, %f368;
	mul.f32 	%f370, %f369, 0f437F0000;
	cvt.rzi.u16.f32 	%rs3, %f370;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 2;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 2;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u16 	[%rd44], {%rs2, %rs4};
$L__BB289_34:
	ret;

}
	// .globl	Subsample_Lanczos_nv12_p016le
.visible .entry Subsample_Lanczos_nv12_p016le(
	.param .u64 Subsample_Lanczos_nv12_p016le_param_0,
	.param .u64 Subsample_Lanczos_nv12_p016le_param_1,
	.param .u64 Subsample_Lanczos_nv12_p016le_param_2,
	.param .u64 Subsample_Lanczos_nv12_p016le_param_3,
	.param .u64 Subsample_Lanczos_nv12_p016le_param_4,
	.param .u64 Subsample_Lanczos_nv12_p016le_param_5,
	.param .u64 Subsample_Lanczos_nv12_p016le_param_6,
	.param .u64 Subsample_Lanczos_nv12_p016le_param_7,
	.param .u32 Subsample_Lanczos_nv12_p016le_param_8,
	.param .u32 Subsample_Lanczos_nv12_p016le_param_9,
	.param .u32 Subsample_Lanczos_nv12_p016le_param_10,
	.param .u32 Subsample_Lanczos_nv12_p016le_param_11,
	.param .u32 Subsample_Lanczos_nv12_p016le_param_12,
	.param .f32 Subsample_Lanczos_nv12_p016le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_nv12_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_nv12_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB290_18;
	bra.uni 	$L__BB290_1;
$L__BB290_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_nv12_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_nv12_p016le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB290_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB290_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB290_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB290_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB290_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB290_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB290_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB290_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB290_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB290_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB290_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB290_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_nv12_p016le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB290_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB290_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_nv12_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_nv12_p016le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB290_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB290_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB290_18:
	ret;

}
	// .globl	Subsample_Lanczos_nv12_p016le_uv
.visible .entry Subsample_Lanczos_nv12_p016le_uv(
	.param .u64 Subsample_Lanczos_nv12_p016le_uv_param_0,
	.param .u64 Subsample_Lanczos_nv12_p016le_uv_param_1,
	.param .u64 Subsample_Lanczos_nv12_p016le_uv_param_2,
	.param .u64 Subsample_Lanczos_nv12_p016le_uv_param_3,
	.param .u64 Subsample_Lanczos_nv12_p016le_uv_param_4,
	.param .u64 Subsample_Lanczos_nv12_p016le_uv_param_5,
	.param .u64 Subsample_Lanczos_nv12_p016le_uv_param_6,
	.param .u64 Subsample_Lanczos_nv12_p016le_uv_param_7,
	.param .u32 Subsample_Lanczos_nv12_p016le_uv_param_8,
	.param .u32 Subsample_Lanczos_nv12_p016le_uv_param_9,
	.param .u32 Subsample_Lanczos_nv12_p016le_uv_param_10,
	.param .u32 Subsample_Lanczos_nv12_p016le_uv_param_11,
	.param .u32 Subsample_Lanczos_nv12_p016le_uv_param_12,
	.param .f32 Subsample_Lanczos_nv12_p016le_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_nv12_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_nv12_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB291_18;
	bra.uni 	$L__BB291_1;
$L__BB291_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_nv12_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_nv12_p016le_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB291_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB291_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB291_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB291_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB291_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB291_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB291_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB291_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB291_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB291_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB291_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB291_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_nv12_p016le_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB291_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB291_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_nv12_p016le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_nv12_p016le_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB291_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB291_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f437F0000;
	mul.f32 	%f222, %f220, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	mul.lo.s16 	%rs3, %rs1, 257;
	mul.lo.s16 	%rs4, %rs2, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u16 	[%rd27], {%rs3, %rs4};
$L__BB291_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p_p016le
.visible .entry Subsample_Lanczos_yuv444p_p016le(
	.param .u64 Subsample_Lanczos_yuv444p_p016le_param_0,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_param_1,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_param_2,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_param_3,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_param_4,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_param_5,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_param_6,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_param_7,
	.param .u32 Subsample_Lanczos_yuv444p_p016le_param_8,
	.param .u32 Subsample_Lanczos_yuv444p_p016le_param_9,
	.param .u32 Subsample_Lanczos_yuv444p_p016le_param_10,
	.param .u32 Subsample_Lanczos_yuv444p_p016le_param_11,
	.param .u32 Subsample_Lanczos_yuv444p_p016le_param_12,
	.param .f32 Subsample_Lanczos_yuv444p_p016le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB292_18;
	bra.uni 	$L__BB292_1;
$L__BB292_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p_p016le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB292_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB292_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB292_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB292_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB292_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB292_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB292_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB292_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB292_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB292_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB292_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB292_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv444p_p016le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB292_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB292_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p_p016le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB292_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB292_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB292_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p_p016le_uv
.visible .entry Subsample_Lanczos_yuv444p_p016le_uv(
	.param .u64 Subsample_Lanczos_yuv444p_p016le_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv444p_p016le_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv444p_p016le_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv444p_p016le_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv444p_p016le_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv444p_p016le_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv444p_p016le_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv444p_p016le_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<387>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB293_34;
	bra.uni 	$L__BB293_1;
$L__BB293_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p_p016le_uv_param_11];
	cvt.rn.f32.s32 	%f131, %r6;
	cvt.rn.f32.s32 	%f132, %r3;
	div.rn.f32 	%f133, %f131, %f132;
	cvt.rn.f32.s32 	%f134, %r7;
	cvt.rn.f32.s32 	%f135, %r4;
	div.rn.f32 	%f136, %f134, %f135;
	cvt.rn.f32.s32 	%f137, %r1;
	add.f32 	%f138, %f137, 0f3F000000;
	fma.rn.f32 	%f139, %f133, %f138, 0fBF000000;
	cvt.rn.f32.s32 	%f140, %r2;
	add.f32 	%f141, %f140, 0f3F000000;
	cvt.rmi.f32.f32 	%f255, %f139;
	sub.f32 	%f143, %f139, %f255;
	add.f32 	%f144, %f143, 0f3F800000;
	mul.f32 	%f4, %f144, 0f40490FDB;
	mul.f32 	%f5, %f143, 0f40490FDB;
	add.f32 	%f145, %f143, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f147, %f4, %f4;
	mul.f32 	%f9, %f147, 0f3F000000;
	mov.f32 	%f386, 0f3F800000;
	mov.f32 	%f371, %f386;
	@%p4 bra 	$L__BB293_3;
	sin.approx.f32 	%f148, %f4;
	sin.approx.f32 	%f149, %f8;
	mul.f32 	%f150, %f148, %f149;
	div.rn.f32 	%f371, %f150, %f9;
$L__BB293_3:
	fma.rn.f32 	%f142, %f136, %f141, 0fBF000000;
	add.f32 	%f146, %f143, 0fC0000000;
	mul.f32 	%f6, %f145, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f152, %f5, %f5;
	mul.f32 	%f13, %f152, 0f3F000000;
	mov.f32 	%f372, %f386;
	@%p5 bra 	$L__BB293_5;
	sin.approx.f32 	%f153, %f5;
	sin.approx.f32 	%f154, %f12;
	mul.f32 	%f155, %f153, %f154;
	div.rn.f32 	%f372, %f155, %f13;
$L__BB293_5:
	cvt.rmi.f32.f32 	%f262, %f142;
	mul.f32 	%f7, %f146, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f157, %f6, %f6;
	mul.f32 	%f17, %f157, 0f3F000000;
	mov.f32 	%f373, %f386;
	@%p6 bra 	$L__BB293_7;
	sin.approx.f32 	%f158, %f6;
	sin.approx.f32 	%f159, %f16;
	mul.f32 	%f160, %f158, %f159;
	div.rn.f32 	%f373, %f160, %f17;
$L__BB293_7:
	sub.f32 	%f3, %f142, %f262;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f162, %f7, %f7;
	mul.f32 	%f21, %f162, 0f3F000000;
	mov.f32 	%f374, %f386;
	@%p7 bra 	$L__BB293_9;
	sin.approx.f32 	%f163, %f7;
	sin.approx.f32 	%f164, %f20;
	mul.f32 	%f165, %f163, %f164;
	div.rn.f32 	%f374, %f165, %f21;
$L__BB293_9:
	add.f32 	%f167, %f3, 0f3F800000;
	mul.f32 	%f24, %f167, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f168, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f170, %f24, %f24;
	mul.f32 	%f29, %f170, 0f3F000000;
	mov.f32 	%f375, %f386;
	@%p8 bra 	$L__BB293_11;
	sin.approx.f32 	%f171, %f24;
	sin.approx.f32 	%f172, %f28;
	mul.f32 	%f173, %f171, %f172;
	div.rn.f32 	%f375, %f173, %f29;
$L__BB293_11:
	add.f32 	%f169, %f3, 0fC0000000;
	mul.f32 	%f26, %f168, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f175, %f25, %f25;
	mul.f32 	%f33, %f175, 0f3F000000;
	mov.f32 	%f376, %f386;
	@%p9 bra 	$L__BB293_13;
	sin.approx.f32 	%f176, %f25;
	sin.approx.f32 	%f177, %f32;
	mul.f32 	%f178, %f176, %f177;
	div.rn.f32 	%f376, %f178, %f33;
$L__BB293_13:
	mul.f32 	%f27, %f169, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f180, %f26, %f26;
	mul.f32 	%f37, %f180, 0f3F000000;
	mov.f32 	%f377, %f386;
	@%p10 bra 	$L__BB293_15;
	sin.approx.f32 	%f181, %f26;
	sin.approx.f32 	%f182, %f36;
	mul.f32 	%f183, %f181, %f182;
	div.rn.f32 	%f377, %f183, %f37;
$L__BB293_15:
	ld.param.u64 	%rd5, [Subsample_Lanczos_yuv444p_p016le_uv_param_1];
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f185, %f27, %f27;
	mul.f32 	%f41, %f185, 0f3F000000;
	mov.f32 	%f378, %f386;
	@%p11 bra 	$L__BB293_17;
	sin.approx.f32 	%f186, %f27;
	sin.approx.f32 	%f187, %f40;
	mul.f32 	%f188, %f186, %f187;
	div.rn.f32 	%f378, %f188, %f41;
$L__BB293_17:
	add.f32 	%f253, %f255, 0fBF800000;
	add.f32 	%f254, %f262, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f253, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f255, %f254}];
	// end inline asm
	add.f32 	%f257, %f255, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f257, %f254}];
	// end inline asm
	add.f32 	%f259, %f255, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f259, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f253, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f255, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f257, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f259, %f262}];
	// end inline asm
	add.f32 	%f270, %f262, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f253, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f255, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f257, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f259, %f270}];
	// end inline asm
	add.f32 	%f278, %f262, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f253, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f255, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f257, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f259, %f278}];
	// end inline asm
	mov.f32 	%f379, %f386;
	@%p4 bra 	$L__BB293_19;
	sin.approx.f32 	%f222, %f4;
	sin.approx.f32 	%f223, %f8;
	mul.f32 	%f224, %f222, %f223;
	div.rn.f32 	%f379, %f224, %f9;
$L__BB293_19:
	mov.f32 	%f380, %f386;
	@%p5 bra 	$L__BB293_21;
	sin.approx.f32 	%f226, %f5;
	sin.approx.f32 	%f227, %f12;
	mul.f32 	%f228, %f226, %f227;
	div.rn.f32 	%f380, %f228, %f13;
$L__BB293_21:
	mov.f32 	%f381, %f386;
	@%p6 bra 	$L__BB293_23;
	sin.approx.f32 	%f230, %f6;
	sin.approx.f32 	%f231, %f16;
	mul.f32 	%f232, %f230, %f231;
	div.rn.f32 	%f381, %f232, %f17;
$L__BB293_23:
	mov.f32 	%f382, %f386;
	@%p7 bra 	$L__BB293_25;
	sin.approx.f32 	%f234, %f7;
	sin.approx.f32 	%f235, %f20;
	mul.f32 	%f236, %f234, %f235;
	div.rn.f32 	%f382, %f236, %f21;
$L__BB293_25:
	mov.f32 	%f383, %f386;
	@%p8 bra 	$L__BB293_27;
	sin.approx.f32 	%f238, %f24;
	sin.approx.f32 	%f239, %f28;
	mul.f32 	%f240, %f238, %f239;
	div.rn.f32 	%f383, %f240, %f29;
$L__BB293_27:
	mov.f32 	%f384, %f386;
	@%p9 bra 	$L__BB293_29;
	sin.approx.f32 	%f242, %f25;
	sin.approx.f32 	%f243, %f32;
	mul.f32 	%f244, %f242, %f243;
	div.rn.f32 	%f384, %f244, %f33;
$L__BB293_29:
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p_p016le_uv_param_5];
	mov.f32 	%f385, %f386;
	@%p10 bra 	$L__BB293_31;
	sin.approx.f32 	%f246, %f26;
	sin.approx.f32 	%f247, %f36;
	mul.f32 	%f248, %f246, %f247;
	div.rn.f32 	%f385, %f248, %f37;
$L__BB293_31:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p_p016le_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Lanczos_yuv444p_p016le_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd4;
	mov.b32 	%f46, %r17;
	mov.b32 	%f50, %r21;
	mov.b32 	%f55, %r25;
	mov.b32 	%f60, %r29;
	mov.b32 	%f64, %r33;
	mov.b32 	%f68, %r37;
	mov.b32 	%f72, %r41;
	mov.b32 	%f76, %r45;
	mov.b32 	%f81, %r49;
	mov.b32 	%f85, %r53;
	mov.b32 	%f89, %r57;
	mov.b32 	%f93, %r61;
	mov.b32 	%f98, %r65;
	mov.b32 	%f102, %r69;
	mov.b32 	%f106, %r73;
	mov.b32 	%f110, %r77;
	@%p11 bra 	$L__BB293_33;
	sin.approx.f32 	%f250, %f27;
	sin.approx.f32 	%f251, %f40;
	mul.f32 	%f252, %f250, %f251;
	div.rn.f32 	%f386, %f252, %f41;
$L__BB293_33:
	add.f32 	%f285, %f375, %f376;
	add.f32 	%f286, %f285, %f377;
	add.f32 	%f287, %f286, %f378;
	div.rn.f32 	%f288, %f375, %f287;
	add.f32 	%f289, %f371, %f372;
	add.f32 	%f290, %f289, %f373;
	add.f32 	%f291, %f290, %f374;
	div.rn.f32 	%f292, %f371, %f291;
	div.rn.f32 	%f293, %f372, %f291;
	mul.f32 	%f294, %f293, %f50;
	fma.rn.f32 	%f295, %f292, %f46, %f294;
	div.rn.f32 	%f296, %f373, %f291;
	fma.rn.f32 	%f297, %f296, %f55, %f295;
	div.rn.f32 	%f298, %f374, %f291;
	fma.rn.f32 	%f299, %f298, %f60, %f297;
	div.rn.f32 	%f300, %f376, %f287;
	mul.f32 	%f301, %f293, %f68;
	fma.rn.f32 	%f302, %f292, %f64, %f301;
	fma.rn.f32 	%f303, %f296, %f72, %f302;
	fma.rn.f32 	%f304, %f298, %f76, %f303;
	mul.f32 	%f305, %f300, %f304;
	fma.rn.f32 	%f306, %f288, %f299, %f305;
	div.rn.f32 	%f307, %f377, %f287;
	mul.f32 	%f308, %f293, %f85;
	fma.rn.f32 	%f309, %f292, %f81, %f308;
	fma.rn.f32 	%f310, %f296, %f89, %f309;
	fma.rn.f32 	%f311, %f298, %f93, %f310;
	fma.rn.f32 	%f312, %f307, %f311, %f306;
	div.rn.f32 	%f313, %f378, %f287;
	mul.f32 	%f314, %f293, %f102;
	fma.rn.f32 	%f315, %f292, %f98, %f314;
	fma.rn.f32 	%f316, %f296, %f106, %f315;
	fma.rn.f32 	%f317, %f298, %f110, %f316;
	fma.rn.f32 	%f318, %f313, %f317, %f312;
	mul.f32 	%f319, %f318, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f319;
	mul.lo.s16 	%rs2, %rs1, 257;
	add.f32 	%f320, %f379, %f380;
	add.f32 	%f321, %f320, %f381;
	add.f32 	%f322, %f321, %f382;
	div.rn.f32 	%f323, %f382, %f322;
	div.rn.f32 	%f324, %f381, %f322;
	div.rn.f32 	%f325, %f380, %f322;
	div.rn.f32 	%f326, %f379, %f322;
	add.f32 	%f327, %f383, %f384;
	add.f32 	%f328, %f327, %f385;
	add.f32 	%f329, %f328, %f386;
	div.rn.f32 	%f330, %f383, %f329;
	div.rn.f32 	%f331, %f384, %f329;
	div.rn.f32 	%f332, %f385, %f329;
	div.rn.f32 	%f333, %f386, %f329;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f253, %f254}];
	// end inline asm
	mov.b32 	%f334, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f255, %f254}];
	// end inline asm
	mov.b32 	%f335, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f257, %f254}];
	// end inline asm
	mov.b32 	%f336, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f259, %f254}];
	// end inline asm
	mov.b32 	%f337, %r93;
	mul.f32 	%f338, %f325, %f335;
	fma.rn.f32 	%f339, %f326, %f334, %f338;
	fma.rn.f32 	%f340, %f324, %f336, %f339;
	fma.rn.f32 	%f341, %f323, %f337, %f340;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f253, %f262}];
	// end inline asm
	mov.b32 	%f342, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f255, %f262}];
	// end inline asm
	mov.b32 	%f343, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f257, %f262}];
	// end inline asm
	mov.b32 	%f344, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f259, %f262}];
	// end inline asm
	mov.b32 	%f345, %r109;
	mul.f32 	%f346, %f325, %f343;
	fma.rn.f32 	%f347, %f326, %f342, %f346;
	fma.rn.f32 	%f348, %f324, %f344, %f347;
	fma.rn.f32 	%f349, %f323, %f345, %f348;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f253, %f270}];
	// end inline asm
	mov.b32 	%f350, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f255, %f270}];
	// end inline asm
	mov.b32 	%f351, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f257, %f270}];
	// end inline asm
	mov.b32 	%f352, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f259, %f270}];
	// end inline asm
	mov.b32 	%f353, %r125;
	mul.f32 	%f354, %f325, %f351;
	fma.rn.f32 	%f355, %f326, %f350, %f354;
	fma.rn.f32 	%f356, %f324, %f352, %f355;
	fma.rn.f32 	%f357, %f323, %f353, %f356;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f253, %f278}];
	// end inline asm
	mov.b32 	%f358, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f255, %f278}];
	// end inline asm
	mov.b32 	%f359, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f257, %f278}];
	// end inline asm
	mov.b32 	%f360, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f259, %f278}];
	// end inline asm
	mov.b32 	%f361, %r141;
	mul.f32 	%f362, %f325, %f359;
	fma.rn.f32 	%f363, %f326, %f358, %f362;
	fma.rn.f32 	%f364, %f324, %f360, %f363;
	fma.rn.f32 	%f365, %f323, %f361, %f364;
	mul.f32 	%f366, %f331, %f349;
	fma.rn.f32 	%f367, %f330, %f341, %f366;
	fma.rn.f32 	%f368, %f332, %f357, %f367;
	fma.rn.f32 	%f369, %f333, %f365, %f368;
	mul.f32 	%f370, %f369, 0f437F0000;
	cvt.rzi.u16.f32 	%rs3, %f370;
	mul.lo.s16 	%rs4, %rs3, 257;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 2;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 2;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u16 	[%rd44], {%rs2, %rs4};
$L__BB293_34:
	ret;

}
	// .globl	Subsample_Lanczos_p010le_p016le
.visible .entry Subsample_Lanczos_p010le_p016le(
	.param .u64 Subsample_Lanczos_p010le_p016le_param_0,
	.param .u64 Subsample_Lanczos_p010le_p016le_param_1,
	.param .u64 Subsample_Lanczos_p010le_p016le_param_2,
	.param .u64 Subsample_Lanczos_p010le_p016le_param_3,
	.param .u64 Subsample_Lanczos_p010le_p016le_param_4,
	.param .u64 Subsample_Lanczos_p010le_p016le_param_5,
	.param .u64 Subsample_Lanczos_p010le_p016le_param_6,
	.param .u64 Subsample_Lanczos_p010le_p016le_param_7,
	.param .u32 Subsample_Lanczos_p010le_p016le_param_8,
	.param .u32 Subsample_Lanczos_p010le_p016le_param_9,
	.param .u32 Subsample_Lanczos_p010le_p016le_param_10,
	.param .u32 Subsample_Lanczos_p010le_p016le_param_11,
	.param .u32 Subsample_Lanczos_p010le_p016le_param_12,
	.param .f32 Subsample_Lanczos_p010le_p016le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p010le_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p010le_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB294_18;
	bra.uni 	$L__BB294_1;
$L__BB294_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p010le_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p010le_p016le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB294_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB294_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB294_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB294_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB294_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB294_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB294_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB294_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB294_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB294_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB294_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB294_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p010le_p016le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB294_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB294_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p010le_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p010le_p016le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB294_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB294_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	shr.u16 	%rs2, %rs1, 10;
	or.b16  	%rs3, %rs2, %rs1;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs3;
$L__BB294_18:
	ret;

}
	// .globl	Subsample_Lanczos_p010le_p016le_uv
.visible .entry Subsample_Lanczos_p010le_p016le_uv(
	.param .u64 Subsample_Lanczos_p010le_p016le_uv_param_0,
	.param .u64 Subsample_Lanczos_p010le_p016le_uv_param_1,
	.param .u64 Subsample_Lanczos_p010le_p016le_uv_param_2,
	.param .u64 Subsample_Lanczos_p010le_p016le_uv_param_3,
	.param .u64 Subsample_Lanczos_p010le_p016le_uv_param_4,
	.param .u64 Subsample_Lanczos_p010le_p016le_uv_param_5,
	.param .u64 Subsample_Lanczos_p010le_p016le_uv_param_6,
	.param .u64 Subsample_Lanczos_p010le_p016le_uv_param_7,
	.param .u32 Subsample_Lanczos_p010le_p016le_uv_param_8,
	.param .u32 Subsample_Lanczos_p010le_p016le_uv_param_9,
	.param .u32 Subsample_Lanczos_p010le_p016le_uv_param_10,
	.param .u32 Subsample_Lanczos_p010le_p016le_uv_param_11,
	.param .u32 Subsample_Lanczos_p010le_p016le_uv_param_12,
	.param .f32 Subsample_Lanczos_p010le_p016le_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p010le_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p010le_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB295_18;
	bra.uni 	$L__BB295_1;
$L__BB295_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p010le_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p010le_p016le_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB295_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB295_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB295_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB295_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB295_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB295_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB295_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB295_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB295_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB295_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB295_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB295_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p010le_p016le_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB295_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB295_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p010le_p016le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p010le_p016le_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB295_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB295_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f477FFF00;
	mul.f32 	%f222, %f220, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	shr.u16 	%rs3, %rs1, 10;
	or.b16  	%rs4, %rs3, %rs1;
	shr.u16 	%rs5, %rs2, 10;
	or.b16  	%rs6, %rs5, %rs2;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u16 	[%rd27], {%rs4, %rs6};
$L__BB295_18:
	ret;

}
	// .globl	Subsample_Lanczos_p016le_p016le
.visible .entry Subsample_Lanczos_p016le_p016le(
	.param .u64 Subsample_Lanczos_p016le_p016le_param_0,
	.param .u64 Subsample_Lanczos_p016le_p016le_param_1,
	.param .u64 Subsample_Lanczos_p016le_p016le_param_2,
	.param .u64 Subsample_Lanczos_p016le_p016le_param_3,
	.param .u64 Subsample_Lanczos_p016le_p016le_param_4,
	.param .u64 Subsample_Lanczos_p016le_p016le_param_5,
	.param .u64 Subsample_Lanczos_p016le_p016le_param_6,
	.param .u64 Subsample_Lanczos_p016le_p016le_param_7,
	.param .u32 Subsample_Lanczos_p016le_p016le_param_8,
	.param .u32 Subsample_Lanczos_p016le_p016le_param_9,
	.param .u32 Subsample_Lanczos_p016le_p016le_param_10,
	.param .u32 Subsample_Lanczos_p016le_p016le_param_11,
	.param .u32 Subsample_Lanczos_p016le_p016le_param_12,
	.param .f32 Subsample_Lanczos_p016le_p016le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p016le_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p016le_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB296_18;
	bra.uni 	$L__BB296_1;
$L__BB296_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p016le_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p016le_p016le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB296_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB296_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB296_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB296_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB296_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB296_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB296_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB296_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB296_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB296_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB296_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB296_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p016le_p016le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB296_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB296_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p016le_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p016le_p016le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB296_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB296_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs1;
$L__BB296_18:
	ret;

}
	// .globl	Subsample_Lanczos_p016le_p016le_uv
.visible .entry Subsample_Lanczos_p016le_p016le_uv(
	.param .u64 Subsample_Lanczos_p016le_p016le_uv_param_0,
	.param .u64 Subsample_Lanczos_p016le_p016le_uv_param_1,
	.param .u64 Subsample_Lanczos_p016le_p016le_uv_param_2,
	.param .u64 Subsample_Lanczos_p016le_p016le_uv_param_3,
	.param .u64 Subsample_Lanczos_p016le_p016le_uv_param_4,
	.param .u64 Subsample_Lanczos_p016le_p016le_uv_param_5,
	.param .u64 Subsample_Lanczos_p016le_p016le_uv_param_6,
	.param .u64 Subsample_Lanczos_p016le_p016le_uv_param_7,
	.param .u32 Subsample_Lanczos_p016le_p016le_uv_param_8,
	.param .u32 Subsample_Lanczos_p016le_p016le_uv_param_9,
	.param .u32 Subsample_Lanczos_p016le_p016le_uv_param_10,
	.param .u32 Subsample_Lanczos_p016le_p016le_uv_param_11,
	.param .u32 Subsample_Lanczos_p016le_p016le_uv_param_12,
	.param .f32 Subsample_Lanczos_p016le_p016le_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p016le_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p016le_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB297_18;
	bra.uni 	$L__BB297_1;
$L__BB297_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p016le_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p016le_p016le_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB297_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB297_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB297_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB297_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB297_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB297_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB297_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB297_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB297_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB297_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB297_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB297_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p016le_p016le_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB297_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB297_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p016le_p016le_uv_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p016le_p016le_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB297_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB297_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f477FFF00;
	mul.f32 	%f222, %f220, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 2;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 2;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.v2.u16 	[%rd27], {%rs1, %rs2};
$L__BB297_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p16le_p016le
.visible .entry Subsample_Lanczos_yuv444p16le_p016le(
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_param_0,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_param_1,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_param_2,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_param_3,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_param_4,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_param_5,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_param_6,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_param_7,
	.param .u32 Subsample_Lanczos_yuv444p16le_p016le_param_8,
	.param .u32 Subsample_Lanczos_yuv444p16le_p016le_param_9,
	.param .u32 Subsample_Lanczos_yuv444p16le_p016le_param_10,
	.param .u32 Subsample_Lanczos_yuv444p16le_p016le_param_11,
	.param .u32 Subsample_Lanczos_yuv444p16le_p016le_param_12,
	.param .f32 Subsample_Lanczos_yuv444p16le_p016le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p16le_p016le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p16le_p016le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB298_18;
	bra.uni 	$L__BB298_1;
$L__BB298_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p16le_p016le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p16le_p016le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB298_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB298_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB298_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB298_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB298_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB298_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB298_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB298_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB298_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB298_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB298_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB298_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv444p16le_p016le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB298_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB298_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p16le_p016le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p16le_p016le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB298_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB298_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs1;
$L__BB298_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p16le_p016le_uv
.visible .entry Subsample_Lanczos_yuv444p16le_p016le_uv(
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv444p16le_p016le_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv444p16le_p016le_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv444p16le_p016le_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv444p16le_p016le_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv444p16le_p016le_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv444p16le_p016le_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv444p16le_p016le_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<387>;
	.reg .b64 	%rd<45>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p16le_p016le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p16le_p016le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB299_34;
	bra.uni 	$L__BB299_1;
$L__BB299_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p16le_p016le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p16le_p016le_uv_param_11];
	cvt.rn.f32.s32 	%f131, %r6;
	cvt.rn.f32.s32 	%f132, %r3;
	div.rn.f32 	%f133, %f131, %f132;
	cvt.rn.f32.s32 	%f134, %r7;
	cvt.rn.f32.s32 	%f135, %r4;
	div.rn.f32 	%f136, %f134, %f135;
	cvt.rn.f32.s32 	%f137, %r1;
	add.f32 	%f138, %f137, 0f3F000000;
	fma.rn.f32 	%f139, %f133, %f138, 0fBF000000;
	cvt.rn.f32.s32 	%f140, %r2;
	add.f32 	%f141, %f140, 0f3F000000;
	cvt.rmi.f32.f32 	%f255, %f139;
	sub.f32 	%f143, %f139, %f255;
	add.f32 	%f144, %f143, 0f3F800000;
	mul.f32 	%f4, %f144, 0f40490FDB;
	mul.f32 	%f5, %f143, 0f40490FDB;
	add.f32 	%f145, %f143, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f147, %f4, %f4;
	mul.f32 	%f9, %f147, 0f3F000000;
	mov.f32 	%f386, 0f3F800000;
	mov.f32 	%f371, %f386;
	@%p4 bra 	$L__BB299_3;
	sin.approx.f32 	%f148, %f4;
	sin.approx.f32 	%f149, %f8;
	mul.f32 	%f150, %f148, %f149;
	div.rn.f32 	%f371, %f150, %f9;
$L__BB299_3:
	fma.rn.f32 	%f142, %f136, %f141, 0fBF000000;
	add.f32 	%f146, %f143, 0fC0000000;
	mul.f32 	%f6, %f145, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f152, %f5, %f5;
	mul.f32 	%f13, %f152, 0f3F000000;
	mov.f32 	%f372, %f386;
	@%p5 bra 	$L__BB299_5;
	sin.approx.f32 	%f153, %f5;
	sin.approx.f32 	%f154, %f12;
	mul.f32 	%f155, %f153, %f154;
	div.rn.f32 	%f372, %f155, %f13;
$L__BB299_5:
	cvt.rmi.f32.f32 	%f262, %f142;
	mul.f32 	%f7, %f146, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f157, %f6, %f6;
	mul.f32 	%f17, %f157, 0f3F000000;
	mov.f32 	%f373, %f386;
	@%p6 bra 	$L__BB299_7;
	sin.approx.f32 	%f158, %f6;
	sin.approx.f32 	%f159, %f16;
	mul.f32 	%f160, %f158, %f159;
	div.rn.f32 	%f373, %f160, %f17;
$L__BB299_7:
	sub.f32 	%f3, %f142, %f262;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f162, %f7, %f7;
	mul.f32 	%f21, %f162, 0f3F000000;
	mov.f32 	%f374, %f386;
	@%p7 bra 	$L__BB299_9;
	sin.approx.f32 	%f163, %f7;
	sin.approx.f32 	%f164, %f20;
	mul.f32 	%f165, %f163, %f164;
	div.rn.f32 	%f374, %f165, %f21;
$L__BB299_9:
	add.f32 	%f167, %f3, 0f3F800000;
	mul.f32 	%f24, %f167, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f168, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f170, %f24, %f24;
	mul.f32 	%f29, %f170, 0f3F000000;
	mov.f32 	%f375, %f386;
	@%p8 bra 	$L__BB299_11;
	sin.approx.f32 	%f171, %f24;
	sin.approx.f32 	%f172, %f28;
	mul.f32 	%f173, %f171, %f172;
	div.rn.f32 	%f375, %f173, %f29;
$L__BB299_11:
	add.f32 	%f169, %f3, 0fC0000000;
	mul.f32 	%f26, %f168, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f175, %f25, %f25;
	mul.f32 	%f33, %f175, 0f3F000000;
	mov.f32 	%f376, %f386;
	@%p9 bra 	$L__BB299_13;
	sin.approx.f32 	%f176, %f25;
	sin.approx.f32 	%f177, %f32;
	mul.f32 	%f178, %f176, %f177;
	div.rn.f32 	%f376, %f178, %f33;
$L__BB299_13:
	mul.f32 	%f27, %f169, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f180, %f26, %f26;
	mul.f32 	%f37, %f180, 0f3F000000;
	mov.f32 	%f377, %f386;
	@%p10 bra 	$L__BB299_15;
	sin.approx.f32 	%f181, %f26;
	sin.approx.f32 	%f182, %f36;
	mul.f32 	%f183, %f181, %f182;
	div.rn.f32 	%f377, %f183, %f37;
$L__BB299_15:
	ld.param.u64 	%rd5, [Subsample_Lanczos_yuv444p16le_p016le_uv_param_1];
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f185, %f27, %f27;
	mul.f32 	%f41, %f185, 0f3F000000;
	mov.f32 	%f378, %f386;
	@%p11 bra 	$L__BB299_17;
	sin.approx.f32 	%f186, %f27;
	sin.approx.f32 	%f187, %f40;
	mul.f32 	%f188, %f186, %f187;
	div.rn.f32 	%f378, %f188, %f41;
$L__BB299_17:
	add.f32 	%f253, %f255, 0fBF800000;
	add.f32 	%f254, %f262, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd5, {%f253, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd5, {%f255, %f254}];
	// end inline asm
	add.f32 	%f257, %f255, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd5, {%f257, %f254}];
	// end inline asm
	add.f32 	%f259, %f255, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd5, {%f259, %f254}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd5, {%f253, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd5, {%f255, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd5, {%f257, %f262}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd5, {%f259, %f262}];
	// end inline asm
	add.f32 	%f270, %f262, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd5, {%f253, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd5, {%f255, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd5, {%f257, %f270}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd5, {%f259, %f270}];
	// end inline asm
	add.f32 	%f278, %f262, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd5, {%f253, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd5, {%f255, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd5, {%f257, %f278}];
	// end inline asm
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd5, {%f259, %f278}];
	// end inline asm
	mov.f32 	%f379, %f386;
	@%p4 bra 	$L__BB299_19;
	sin.approx.f32 	%f222, %f4;
	sin.approx.f32 	%f223, %f8;
	mul.f32 	%f224, %f222, %f223;
	div.rn.f32 	%f379, %f224, %f9;
$L__BB299_19:
	mov.f32 	%f380, %f386;
	@%p5 bra 	$L__BB299_21;
	sin.approx.f32 	%f226, %f5;
	sin.approx.f32 	%f227, %f12;
	mul.f32 	%f228, %f226, %f227;
	div.rn.f32 	%f380, %f228, %f13;
$L__BB299_21:
	mov.f32 	%f381, %f386;
	@%p6 bra 	$L__BB299_23;
	sin.approx.f32 	%f230, %f6;
	sin.approx.f32 	%f231, %f16;
	mul.f32 	%f232, %f230, %f231;
	div.rn.f32 	%f381, %f232, %f17;
$L__BB299_23:
	mov.f32 	%f382, %f386;
	@%p7 bra 	$L__BB299_25;
	sin.approx.f32 	%f234, %f7;
	sin.approx.f32 	%f235, %f20;
	mul.f32 	%f236, %f234, %f235;
	div.rn.f32 	%f382, %f236, %f21;
$L__BB299_25:
	mov.f32 	%f383, %f386;
	@%p8 bra 	$L__BB299_27;
	sin.approx.f32 	%f238, %f24;
	sin.approx.f32 	%f239, %f28;
	mul.f32 	%f240, %f238, %f239;
	div.rn.f32 	%f383, %f240, %f29;
$L__BB299_27:
	mov.f32 	%f384, %f386;
	@%p9 bra 	$L__BB299_29;
	sin.approx.f32 	%f242, %f25;
	sin.approx.f32 	%f243, %f32;
	mul.f32 	%f244, %f242, %f243;
	div.rn.f32 	%f384, %f244, %f33;
$L__BB299_29:
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p16le_p016le_uv_param_5];
	mov.f32 	%f385, %f386;
	@%p10 bra 	$L__BB299_31;
	sin.approx.f32 	%f246, %f26;
	sin.approx.f32 	%f247, %f36;
	mul.f32 	%f248, %f246, %f247;
	div.rn.f32 	%f385, %f248, %f37;
$L__BB299_31:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p16le_p016le_uv_param_10];
	ld.param.u64 	%rd21, [Subsample_Lanczos_yuv444p16le_p016le_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd4;
	mov.b32 	%f46, %r17;
	mov.b32 	%f50, %r21;
	mov.b32 	%f55, %r25;
	mov.b32 	%f60, %r29;
	mov.b32 	%f64, %r33;
	mov.b32 	%f68, %r37;
	mov.b32 	%f72, %r41;
	mov.b32 	%f76, %r45;
	mov.b32 	%f81, %r49;
	mov.b32 	%f85, %r53;
	mov.b32 	%f89, %r57;
	mov.b32 	%f93, %r61;
	mov.b32 	%f98, %r65;
	mov.b32 	%f102, %r69;
	mov.b32 	%f106, %r73;
	mov.b32 	%f110, %r77;
	@%p11 bra 	$L__BB299_33;
	sin.approx.f32 	%f250, %f27;
	sin.approx.f32 	%f251, %f40;
	mul.f32 	%f252, %f250, %f251;
	div.rn.f32 	%f386, %f252, %f41;
$L__BB299_33:
	add.f32 	%f285, %f375, %f376;
	add.f32 	%f286, %f285, %f377;
	add.f32 	%f287, %f286, %f378;
	div.rn.f32 	%f288, %f375, %f287;
	add.f32 	%f289, %f371, %f372;
	add.f32 	%f290, %f289, %f373;
	add.f32 	%f291, %f290, %f374;
	div.rn.f32 	%f292, %f371, %f291;
	div.rn.f32 	%f293, %f372, %f291;
	mul.f32 	%f294, %f293, %f50;
	fma.rn.f32 	%f295, %f292, %f46, %f294;
	div.rn.f32 	%f296, %f373, %f291;
	fma.rn.f32 	%f297, %f296, %f55, %f295;
	div.rn.f32 	%f298, %f374, %f291;
	fma.rn.f32 	%f299, %f298, %f60, %f297;
	div.rn.f32 	%f300, %f376, %f287;
	mul.f32 	%f301, %f293, %f68;
	fma.rn.f32 	%f302, %f292, %f64, %f301;
	fma.rn.f32 	%f303, %f296, %f72, %f302;
	fma.rn.f32 	%f304, %f298, %f76, %f303;
	mul.f32 	%f305, %f300, %f304;
	fma.rn.f32 	%f306, %f288, %f299, %f305;
	div.rn.f32 	%f307, %f377, %f287;
	mul.f32 	%f308, %f293, %f85;
	fma.rn.f32 	%f309, %f292, %f81, %f308;
	fma.rn.f32 	%f310, %f296, %f89, %f309;
	fma.rn.f32 	%f311, %f298, %f93, %f310;
	fma.rn.f32 	%f312, %f307, %f311, %f306;
	div.rn.f32 	%f313, %f378, %f287;
	mul.f32 	%f314, %f293, %f102;
	fma.rn.f32 	%f315, %f292, %f98, %f314;
	fma.rn.f32 	%f316, %f296, %f106, %f315;
	fma.rn.f32 	%f317, %f298, %f110, %f316;
	fma.rn.f32 	%f318, %f313, %f317, %f312;
	mul.f32 	%f319, %f318, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f319;
	add.f32 	%f320, %f379, %f380;
	add.f32 	%f321, %f320, %f381;
	add.f32 	%f322, %f321, %f382;
	div.rn.f32 	%f323, %f382, %f322;
	div.rn.f32 	%f324, %f381, %f322;
	div.rn.f32 	%f325, %f380, %f322;
	div.rn.f32 	%f326, %f379, %f322;
	add.f32 	%f327, %f383, %f384;
	add.f32 	%f328, %f327, %f385;
	add.f32 	%f329, %f328, %f386;
	div.rn.f32 	%f330, %f383, %f329;
	div.rn.f32 	%f331, %f384, %f329;
	div.rn.f32 	%f332, %f385, %f329;
	div.rn.f32 	%f333, %f386, %f329;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd21, {%f253, %f254}];
	// end inline asm
	mov.b32 	%f334, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd21, {%f255, %f254}];
	// end inline asm
	mov.b32 	%f335, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd21, {%f257, %f254}];
	// end inline asm
	mov.b32 	%f336, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd21, {%f259, %f254}];
	// end inline asm
	mov.b32 	%f337, %r93;
	mul.f32 	%f338, %f325, %f335;
	fma.rn.f32 	%f339, %f326, %f334, %f338;
	fma.rn.f32 	%f340, %f324, %f336, %f339;
	fma.rn.f32 	%f341, %f323, %f337, %f340;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd21, {%f253, %f262}];
	// end inline asm
	mov.b32 	%f342, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd21, {%f255, %f262}];
	// end inline asm
	mov.b32 	%f343, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd21, {%f257, %f262}];
	// end inline asm
	mov.b32 	%f344, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd21, {%f259, %f262}];
	// end inline asm
	mov.b32 	%f345, %r109;
	mul.f32 	%f346, %f325, %f343;
	fma.rn.f32 	%f347, %f326, %f342, %f346;
	fma.rn.f32 	%f348, %f324, %f344, %f347;
	fma.rn.f32 	%f349, %f323, %f345, %f348;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd21, {%f253, %f270}];
	// end inline asm
	mov.b32 	%f350, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd21, {%f255, %f270}];
	// end inline asm
	mov.b32 	%f351, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd21, {%f257, %f270}];
	// end inline asm
	mov.b32 	%f352, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd21, {%f259, %f270}];
	// end inline asm
	mov.b32 	%f353, %r125;
	mul.f32 	%f354, %f325, %f351;
	fma.rn.f32 	%f355, %f326, %f350, %f354;
	fma.rn.f32 	%f356, %f324, %f352, %f355;
	fma.rn.f32 	%f357, %f323, %f353, %f356;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd21, {%f253, %f278}];
	// end inline asm
	mov.b32 	%f358, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd21, {%f255, %f278}];
	// end inline asm
	mov.b32 	%f359, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd21, {%f257, %f278}];
	// end inline asm
	mov.b32 	%f360, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd21, {%f259, %f278}];
	// end inline asm
	mov.b32 	%f361, %r141;
	mul.f32 	%f362, %f325, %f359;
	fma.rn.f32 	%f363, %f326, %f358, %f362;
	fma.rn.f32 	%f364, %f324, %f360, %f363;
	fma.rn.f32 	%f365, %f323, %f361, %f364;
	mul.f32 	%f366, %f331, %f349;
	fma.rn.f32 	%f367, %f330, %f341, %f366;
	fma.rn.f32 	%f368, %f332, %f357, %f367;
	fma.rn.f32 	%f369, %f333, %f365, %f368;
	mul.f32 	%f370, %f369, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs2, %f370;
	cvt.s64.s32 	%rd37, %r2;
	cvt.s64.s32 	%rd38, %r5;
	shr.u64 	%rd39, %rd38, 2;
	mul.lo.s64 	%rd40, %rd39, %rd37;
	cvt.s64.s32 	%rd41, %r1;
	add.s64 	%rd42, %rd40, %rd41;
	shl.b64 	%rd43, %rd42, 2;
	add.s64 	%rd44, %rd1, %rd43;
	st.global.v2.u16 	[%rd44], {%rs1, %rs2};
$L__BB299_34:
	ret;

}
	// .globl	Subsample_Lanczos_yuv420p_yuv444p16le
.visible .entry Subsample_Lanczos_yuv420p_yuv444p16le(
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_param_0,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_param_1,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_param_2,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_param_3,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_param_4,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_param_5,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_param_6,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_param_7,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p16le_param_8,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p16le_param_9,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p16le_param_10,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p16le_param_11,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p16le_param_12,
	.param .f32 Subsample_Lanczos_yuv420p_yuv444p16le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv420p_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv420p_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB300_18;
	bra.uni 	$L__BB300_1;
$L__BB300_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv420p_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv420p_yuv444p16le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB300_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB300_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB300_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB300_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB300_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB300_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB300_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB300_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB300_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB300_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB300_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB300_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv420p_yuv444p16le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB300_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB300_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv420p_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv420p_yuv444p16le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB300_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB300_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB300_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv420p_yuv444p16le_uv
.visible .entry Subsample_Lanczos_yuv420p_yuv444p16le_uv(
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<339>;
	.reg .b64 	%rd<49>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB301_34;
	bra.uni 	$L__BB301_1;
$L__BB301_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_11];
	cvt.rn.f32.s32 	%f67, %r6;
	cvt.rn.f32.s32 	%f68, %r3;
	div.rn.f32 	%f69, %f67, %f68;
	cvt.rn.f32.s32 	%f70, %r7;
	cvt.rn.f32.s32 	%f71, %r4;
	div.rn.f32 	%f72, %f70, %f71;
	cvt.rn.f32.s32 	%f73, %r1;
	add.f32 	%f74, %f73, 0f3F000000;
	fma.rn.f32 	%f75, %f69, %f74, 0fBF000000;
	cvt.rn.f32.s32 	%f76, %r2;
	add.f32 	%f77, %f76, 0f3F000000;
	cvt.rmi.f32.f32 	%f242, %f75;
	sub.f32 	%f79, %f75, %f242;
	add.f32 	%f80, %f79, 0f3F800000;
	mul.f32 	%f4, %f80, 0f40490FDB;
	mul.f32 	%f5, %f79, 0f40490FDB;
	add.f32 	%f81, %f79, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f83, %f4, %f4;
	mul.f32 	%f9, %f83, 0f3F000000;
	mov.f32 	%f338, 0f3F800000;
	mov.f32 	%f323, %f338;
	@%p4 bra 	$L__BB301_3;
	sin.approx.f32 	%f84, %f4;
	sin.approx.f32 	%f85, %f8;
	mul.f32 	%f86, %f84, %f85;
	div.rn.f32 	%f323, %f86, %f9;
$L__BB301_3:
	fma.rn.f32 	%f78, %f72, %f77, 0fBF000000;
	add.f32 	%f82, %f79, 0fC0000000;
	mul.f32 	%f6, %f81, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f88, %f5, %f5;
	mul.f32 	%f13, %f88, 0f3F000000;
	mov.f32 	%f324, %f338;
	@%p5 bra 	$L__BB301_5;
	sin.approx.f32 	%f89, %f5;
	sin.approx.f32 	%f90, %f12;
	mul.f32 	%f91, %f89, %f90;
	div.rn.f32 	%f324, %f91, %f13;
$L__BB301_5:
	cvt.rmi.f32.f32 	%f249, %f78;
	mul.f32 	%f7, %f82, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f93, %f6, %f6;
	mul.f32 	%f17, %f93, 0f3F000000;
	mov.f32 	%f325, %f338;
	@%p6 bra 	$L__BB301_7;
	sin.approx.f32 	%f94, %f6;
	sin.approx.f32 	%f95, %f16;
	mul.f32 	%f96, %f94, %f95;
	div.rn.f32 	%f325, %f96, %f17;
$L__BB301_7:
	sub.f32 	%f3, %f78, %f249;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f98, %f7, %f7;
	mul.f32 	%f21, %f98, 0f3F000000;
	mov.f32 	%f326, %f338;
	@%p7 bra 	$L__BB301_9;
	sin.approx.f32 	%f99, %f7;
	sin.approx.f32 	%f100, %f20;
	mul.f32 	%f101, %f99, %f100;
	div.rn.f32 	%f326, %f101, %f21;
$L__BB301_9:
	add.f32 	%f103, %f3, 0f3F800000;
	mul.f32 	%f24, %f103, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f104, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f106, %f24, %f24;
	mul.f32 	%f29, %f106, 0f3F000000;
	mov.f32 	%f327, %f338;
	@%p8 bra 	$L__BB301_11;
	sin.approx.f32 	%f107, %f24;
	sin.approx.f32 	%f108, %f28;
	mul.f32 	%f109, %f107, %f108;
	div.rn.f32 	%f327, %f109, %f29;
$L__BB301_11:
	add.f32 	%f105, %f3, 0fC0000000;
	mul.f32 	%f26, %f104, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f111, %f25, %f25;
	mul.f32 	%f33, %f111, 0f3F000000;
	mov.f32 	%f328, %f338;
	@%p9 bra 	$L__BB301_13;
	sin.approx.f32 	%f112, %f25;
	sin.approx.f32 	%f113, %f32;
	mul.f32 	%f114, %f112, %f113;
	div.rn.f32 	%f328, %f114, %f33;
$L__BB301_13:
	ld.param.u64 	%rd7, [Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_5];
	mul.f32 	%f27, %f105, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f116, %f26, %f26;
	mul.f32 	%f37, %f116, 0f3F000000;
	mov.f32 	%f329, %f338;
	@%p10 bra 	$L__BB301_15;
	sin.approx.f32 	%f117, %f26;
	sin.approx.f32 	%f118, %f36;
	mul.f32 	%f119, %f117, %f118;
	div.rn.f32 	%f329, %f119, %f37;
$L__BB301_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_1];
	cvta.to.global.u64 	%rd2, %rd7;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f121, %f27, %f27;
	mul.f32 	%f41, %f121, 0f3F000000;
	mov.f32 	%f330, %f338;
	@%p11 bra 	$L__BB301_17;
	sin.approx.f32 	%f122, %f27;
	sin.approx.f32 	%f123, %f40;
	mul.f32 	%f124, %f122, %f123;
	div.rn.f32 	%f330, %f124, %f41;
$L__BB301_17:
	add.f32 	%f158, %f323, %f324;
	add.f32 	%f159, %f158, %f325;
	add.f32 	%f160, %f159, %f326;
	div.rn.f32 	%f161, %f326, %f160;
	div.rn.f32 	%f162, %f325, %f160;
	div.rn.f32 	%f163, %f324, %f160;
	div.rn.f32 	%f164, %f323, %f160;
	add.f32 	%f165, %f327, %f328;
	add.f32 	%f166, %f165, %f329;
	add.f32 	%f167, %f166, %f330;
	div.rn.f32 	%f168, %f327, %f167;
	div.rn.f32 	%f169, %f328, %f167;
	div.rn.f32 	%f170, %f329, %f167;
	div.rn.f32 	%f171, %f330, %f167;
	add.f32 	%f240, %f242, 0fBF800000;
	add.f32 	%f241, %f249, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd8, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f172, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f173, %r21;
	add.f32 	%f244, %f242, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd8, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f174, %r25;
	add.f32 	%f246, %f242, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd8, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f175, %r29;
	mul.f32 	%f176, %f163, %f173;
	fma.rn.f32 	%f177, %f164, %f172, %f176;
	fma.rn.f32 	%f178, %f162, %f174, %f177;
	fma.rn.f32 	%f179, %f161, %f175, %f178;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd8, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f180, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd8, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f181, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd8, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f182, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd8, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f183, %r45;
	mul.f32 	%f184, %f163, %f181;
	fma.rn.f32 	%f185, %f164, %f180, %f184;
	fma.rn.f32 	%f186, %f162, %f182, %f185;
	fma.rn.f32 	%f187, %f161, %f183, %f186;
	add.f32 	%f257, %f249, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd8, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f188, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd8, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f189, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd8, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f190, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd8, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f191, %r61;
	mul.f32 	%f192, %f163, %f189;
	fma.rn.f32 	%f193, %f164, %f188, %f192;
	fma.rn.f32 	%f194, %f162, %f190, %f193;
	fma.rn.f32 	%f195, %f161, %f191, %f194;
	add.f32 	%f265, %f249, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd8, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f196, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd8, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f197, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd8, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f198, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd8, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f199, %r77;
	mul.f32 	%f200, %f163, %f197;
	fma.rn.f32 	%f201, %f164, %f196, %f200;
	fma.rn.f32 	%f202, %f162, %f198, %f201;
	fma.rn.f32 	%f203, %f161, %f199, %f202;
	mul.f32 	%f204, %f169, %f187;
	fma.rn.f32 	%f205, %f168, %f179, %f204;
	fma.rn.f32 	%f206, %f170, %f195, %f205;
	fma.rn.f32 	%f207, %f171, %f203, %f206;
	mul.f32 	%f208, %f207, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f208;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd24, %r2;
	cvt.s64.s32 	%rd25, %r5;
	shr.u64 	%rd26, %rd25, 1;
	mul.lo.s64 	%rd27, %rd26, %rd24;
	cvt.s64.s32 	%rd28, %r1;
	add.s64 	%rd3, %rd27, %rd28;
	shl.b64 	%rd29, %rd3, 1;
	add.s64 	%rd30, %rd2, %rd29;
	st.global.u16 	[%rd30], %rs2;
	mov.f32 	%f331, %f338;
	@%p4 bra 	$L__BB301_19;
	sin.approx.f32 	%f209, %f4;
	sin.approx.f32 	%f210, %f8;
	mul.f32 	%f211, %f209, %f210;
	div.rn.f32 	%f331, %f211, %f9;
$L__BB301_19:
	mov.f32 	%f332, %f338;
	@%p5 bra 	$L__BB301_21;
	sin.approx.f32 	%f213, %f5;
	sin.approx.f32 	%f214, %f12;
	mul.f32 	%f215, %f213, %f214;
	div.rn.f32 	%f332, %f215, %f13;
$L__BB301_21:
	mov.f32 	%f333, %f338;
	@%p6 bra 	$L__BB301_23;
	sin.approx.f32 	%f217, %f6;
	sin.approx.f32 	%f218, %f16;
	mul.f32 	%f219, %f217, %f218;
	div.rn.f32 	%f333, %f219, %f17;
$L__BB301_23:
	mov.f32 	%f334, %f338;
	@%p7 bra 	$L__BB301_25;
	sin.approx.f32 	%f221, %f7;
	sin.approx.f32 	%f222, %f20;
	mul.f32 	%f223, %f221, %f222;
	div.rn.f32 	%f334, %f223, %f21;
$L__BB301_25:
	mov.f32 	%f335, %f338;
	@%p8 bra 	$L__BB301_27;
	sin.approx.f32 	%f225, %f24;
	sin.approx.f32 	%f226, %f28;
	mul.f32 	%f227, %f225, %f226;
	div.rn.f32 	%f335, %f227, %f29;
$L__BB301_27:
	mov.f32 	%f336, %f338;
	@%p9 bra 	$L__BB301_29;
	sin.approx.f32 	%f229, %f25;
	sin.approx.f32 	%f230, %f32;
	mul.f32 	%f231, %f229, %f230;
	div.rn.f32 	%f336, %f231, %f33;
$L__BB301_29:
	ld.param.u64 	%rd6, [Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_6];
	mov.f32 	%f337, %f338;
	@%p10 bra 	$L__BB301_31;
	sin.approx.f32 	%f233, %f26;
	sin.approx.f32 	%f234, %f36;
	mul.f32 	%f235, %f233, %f234;
	div.rn.f32 	%f337, %f235, %f37;
$L__BB301_31:
	ld.param.u64 	%rd31, [Subsample_Lanczos_yuv420p_yuv444p16le_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd6;
	@%p11 bra 	$L__BB301_33;
	sin.approx.f32 	%f237, %f27;
	sin.approx.f32 	%f238, %f40;
	mul.f32 	%f239, %f237, %f238;
	div.rn.f32 	%f338, %f239, %f41;
$L__BB301_33:
	add.f32 	%f272, %f331, %f332;
	add.f32 	%f273, %f272, %f333;
	add.f32 	%f274, %f273, %f334;
	div.rn.f32 	%f275, %f334, %f274;
	div.rn.f32 	%f276, %f333, %f274;
	div.rn.f32 	%f277, %f332, %f274;
	div.rn.f32 	%f278, %f331, %f274;
	add.f32 	%f279, %f335, %f336;
	add.f32 	%f280, %f279, %f337;
	add.f32 	%f281, %f280, %f338;
	div.rn.f32 	%f282, %f335, %f281;
	div.rn.f32 	%f283, %f336, %f281;
	div.rn.f32 	%f284, %f337, %f281;
	div.rn.f32 	%f285, %f338, %f281;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd31, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f286, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd31, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f287, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd31, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f288, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd31, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f289, %r93;
	mul.f32 	%f290, %f277, %f287;
	fma.rn.f32 	%f291, %f278, %f286, %f290;
	fma.rn.f32 	%f292, %f276, %f288, %f291;
	fma.rn.f32 	%f293, %f275, %f289, %f292;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd31, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f294, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd31, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f295, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd31, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f296, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd31, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f297, %r109;
	mul.f32 	%f298, %f277, %f295;
	fma.rn.f32 	%f299, %f278, %f294, %f298;
	fma.rn.f32 	%f300, %f276, %f296, %f299;
	fma.rn.f32 	%f301, %f275, %f297, %f300;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd31, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f302, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd31, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f303, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd31, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f304, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd31, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f305, %r125;
	mul.f32 	%f306, %f277, %f303;
	fma.rn.f32 	%f307, %f278, %f302, %f306;
	fma.rn.f32 	%f308, %f276, %f304, %f307;
	fma.rn.f32 	%f309, %f275, %f305, %f308;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd31, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f310, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd31, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f311, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd31, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f312, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd31, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f313, %r141;
	mul.f32 	%f314, %f277, %f311;
	fma.rn.f32 	%f315, %f278, %f310, %f314;
	fma.rn.f32 	%f316, %f276, %f312, %f315;
	fma.rn.f32 	%f317, %f275, %f313, %f316;
	mul.f32 	%f318, %f283, %f301;
	fma.rn.f32 	%f319, %f282, %f293, %f318;
	fma.rn.f32 	%f320, %f284, %f309, %f319;
	fma.rn.f32 	%f321, %f285, %f317, %f320;
	mul.f32 	%f322, %f321, 0f437F0000;
	cvt.rzi.u16.f32 	%rs3, %f322;
	mul.lo.s16 	%rs4, %rs3, 257;
	add.s64 	%rd48, %rd1, %rd29;
	st.global.u16 	[%rd48], %rs4;
$L__BB301_34:
	ret;

}
	// .globl	Subsample_Lanczos_nv12_yuv444p16le
.visible .entry Subsample_Lanczos_nv12_yuv444p16le(
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_param_0,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_param_1,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_param_2,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_param_3,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_param_4,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_param_5,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_param_6,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_param_7,
	.param .u32 Subsample_Lanczos_nv12_yuv444p16le_param_8,
	.param .u32 Subsample_Lanczos_nv12_yuv444p16le_param_9,
	.param .u32 Subsample_Lanczos_nv12_yuv444p16le_param_10,
	.param .u32 Subsample_Lanczos_nv12_yuv444p16le_param_11,
	.param .u32 Subsample_Lanczos_nv12_yuv444p16le_param_12,
	.param .f32 Subsample_Lanczos_nv12_yuv444p16le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_nv12_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_nv12_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB302_18;
	bra.uni 	$L__BB302_1;
$L__BB302_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_nv12_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_nv12_yuv444p16le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB302_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB302_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB302_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB302_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB302_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB302_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB302_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB302_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB302_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB302_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB302_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB302_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_nv12_yuv444p16le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB302_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB302_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_nv12_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_nv12_yuv444p16le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB302_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB302_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB302_18:
	ret;

}
	// .globl	Subsample_Lanczos_nv12_yuv444p16le_uv
.visible .entry Subsample_Lanczos_nv12_yuv444p16le_uv(
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Lanczos_nv12_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Lanczos_nv12_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Lanczos_nv12_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Lanczos_nv12_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Lanczos_nv12_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Lanczos_nv12_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Lanczos_nv12_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<31>;

	ld.param.u32 	%r4, [Subsample_Lanczos_nv12_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_nv12_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB303_18;
	bra.uni 	$L__BB303_1;
$L__BB303_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_nv12_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_nv12_yuv444p16le_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB303_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB303_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB303_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB303_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB303_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB303_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB303_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB303_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB303_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB303_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB303_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB303_13:
	ld.param.u64 	%rd4, [Subsample_Lanczos_nv12_yuv444p16le_uv_param_6];
	ld.param.u64 	%rd5, [Subsample_Lanczos_nv12_yuv444p16le_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB303_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB303_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_nv12_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Lanczos_nv12_yuv444p16le_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd4;
	cvta.to.global.u64 	%rd2, %rd5;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB303_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB303_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f437F0000;
	mul.f32 	%f222, %f220, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	mul.lo.s16 	%rs3, %rs1, 257;
	cvt.s64.s32 	%rd22, %r2;
	cvt.s64.s32 	%rd23, %r5;
	shr.u64 	%rd24, %rd23, 1;
	mul.lo.s64 	%rd25, %rd24, %rd22;
	cvt.s64.s32 	%rd26, %r1;
	add.s64 	%rd27, %rd25, %rd26;
	shl.b64 	%rd28, %rd27, 1;
	add.s64 	%rd29, %rd2, %rd28;
	st.global.u16 	[%rd29], %rs3;
	mul.lo.s16 	%rs4, %rs2, 257;
	add.s64 	%rd30, %rd1, %rd28;
	st.global.u16 	[%rd30], %rs4;
$L__BB303_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p_yuv444p16le
.visible .entry Subsample_Lanczos_yuv444p_yuv444p16le(
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_param_0,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_param_1,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_param_2,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_param_3,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_param_4,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_param_5,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_param_6,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_param_7,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p16le_param_8,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p16le_param_9,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p16le_param_10,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p16le_param_11,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p16le_param_12,
	.param .f32 Subsample_Lanczos_yuv444p_yuv444p16le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB304_18;
	bra.uni 	$L__BB304_1;
$L__BB304_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p_yuv444p16le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB304_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB304_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB304_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB304_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB304_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB304_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB304_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB304_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB304_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB304_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB304_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB304_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv444p_yuv444p16le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB304_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB304_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p_yuv444p16le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB304_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB304_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f185;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs2;
$L__BB304_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p_yuv444p16le_uv
.visible .entry Subsample_Lanczos_yuv444p_yuv444p16le_uv(
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<339>;
	.reg .b64 	%rd<49>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB305_34;
	bra.uni 	$L__BB305_1;
$L__BB305_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_11];
	cvt.rn.f32.s32 	%f67, %r6;
	cvt.rn.f32.s32 	%f68, %r3;
	div.rn.f32 	%f69, %f67, %f68;
	cvt.rn.f32.s32 	%f70, %r7;
	cvt.rn.f32.s32 	%f71, %r4;
	div.rn.f32 	%f72, %f70, %f71;
	cvt.rn.f32.s32 	%f73, %r1;
	add.f32 	%f74, %f73, 0f3F000000;
	fma.rn.f32 	%f75, %f69, %f74, 0fBF000000;
	cvt.rn.f32.s32 	%f76, %r2;
	add.f32 	%f77, %f76, 0f3F000000;
	cvt.rmi.f32.f32 	%f242, %f75;
	sub.f32 	%f79, %f75, %f242;
	add.f32 	%f80, %f79, 0f3F800000;
	mul.f32 	%f4, %f80, 0f40490FDB;
	mul.f32 	%f5, %f79, 0f40490FDB;
	add.f32 	%f81, %f79, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f83, %f4, %f4;
	mul.f32 	%f9, %f83, 0f3F000000;
	mov.f32 	%f338, 0f3F800000;
	mov.f32 	%f323, %f338;
	@%p4 bra 	$L__BB305_3;
	sin.approx.f32 	%f84, %f4;
	sin.approx.f32 	%f85, %f8;
	mul.f32 	%f86, %f84, %f85;
	div.rn.f32 	%f323, %f86, %f9;
$L__BB305_3:
	fma.rn.f32 	%f78, %f72, %f77, 0fBF000000;
	add.f32 	%f82, %f79, 0fC0000000;
	mul.f32 	%f6, %f81, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f88, %f5, %f5;
	mul.f32 	%f13, %f88, 0f3F000000;
	mov.f32 	%f324, %f338;
	@%p5 bra 	$L__BB305_5;
	sin.approx.f32 	%f89, %f5;
	sin.approx.f32 	%f90, %f12;
	mul.f32 	%f91, %f89, %f90;
	div.rn.f32 	%f324, %f91, %f13;
$L__BB305_5:
	cvt.rmi.f32.f32 	%f249, %f78;
	mul.f32 	%f7, %f82, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f93, %f6, %f6;
	mul.f32 	%f17, %f93, 0f3F000000;
	mov.f32 	%f325, %f338;
	@%p6 bra 	$L__BB305_7;
	sin.approx.f32 	%f94, %f6;
	sin.approx.f32 	%f95, %f16;
	mul.f32 	%f96, %f94, %f95;
	div.rn.f32 	%f325, %f96, %f17;
$L__BB305_7:
	sub.f32 	%f3, %f78, %f249;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f98, %f7, %f7;
	mul.f32 	%f21, %f98, 0f3F000000;
	mov.f32 	%f326, %f338;
	@%p7 bra 	$L__BB305_9;
	sin.approx.f32 	%f99, %f7;
	sin.approx.f32 	%f100, %f20;
	mul.f32 	%f101, %f99, %f100;
	div.rn.f32 	%f326, %f101, %f21;
$L__BB305_9:
	add.f32 	%f103, %f3, 0f3F800000;
	mul.f32 	%f24, %f103, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f104, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f106, %f24, %f24;
	mul.f32 	%f29, %f106, 0f3F000000;
	mov.f32 	%f327, %f338;
	@%p8 bra 	$L__BB305_11;
	sin.approx.f32 	%f107, %f24;
	sin.approx.f32 	%f108, %f28;
	mul.f32 	%f109, %f107, %f108;
	div.rn.f32 	%f327, %f109, %f29;
$L__BB305_11:
	add.f32 	%f105, %f3, 0fC0000000;
	mul.f32 	%f26, %f104, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f111, %f25, %f25;
	mul.f32 	%f33, %f111, 0f3F000000;
	mov.f32 	%f328, %f338;
	@%p9 bra 	$L__BB305_13;
	sin.approx.f32 	%f112, %f25;
	sin.approx.f32 	%f113, %f32;
	mul.f32 	%f114, %f112, %f113;
	div.rn.f32 	%f328, %f114, %f33;
$L__BB305_13:
	ld.param.u64 	%rd7, [Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_5];
	mul.f32 	%f27, %f105, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f116, %f26, %f26;
	mul.f32 	%f37, %f116, 0f3F000000;
	mov.f32 	%f329, %f338;
	@%p10 bra 	$L__BB305_15;
	sin.approx.f32 	%f117, %f26;
	sin.approx.f32 	%f118, %f36;
	mul.f32 	%f119, %f117, %f118;
	div.rn.f32 	%f329, %f119, %f37;
$L__BB305_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_1];
	cvta.to.global.u64 	%rd2, %rd7;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f121, %f27, %f27;
	mul.f32 	%f41, %f121, 0f3F000000;
	mov.f32 	%f330, %f338;
	@%p11 bra 	$L__BB305_17;
	sin.approx.f32 	%f122, %f27;
	sin.approx.f32 	%f123, %f40;
	mul.f32 	%f124, %f122, %f123;
	div.rn.f32 	%f330, %f124, %f41;
$L__BB305_17:
	add.f32 	%f158, %f323, %f324;
	add.f32 	%f159, %f158, %f325;
	add.f32 	%f160, %f159, %f326;
	div.rn.f32 	%f161, %f326, %f160;
	div.rn.f32 	%f162, %f325, %f160;
	div.rn.f32 	%f163, %f324, %f160;
	div.rn.f32 	%f164, %f323, %f160;
	add.f32 	%f165, %f327, %f328;
	add.f32 	%f166, %f165, %f329;
	add.f32 	%f167, %f166, %f330;
	div.rn.f32 	%f168, %f327, %f167;
	div.rn.f32 	%f169, %f328, %f167;
	div.rn.f32 	%f170, %f329, %f167;
	div.rn.f32 	%f171, %f330, %f167;
	add.f32 	%f240, %f242, 0fBF800000;
	add.f32 	%f241, %f249, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd8, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f172, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f173, %r21;
	add.f32 	%f244, %f242, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd8, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f174, %r25;
	add.f32 	%f246, %f242, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd8, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f175, %r29;
	mul.f32 	%f176, %f163, %f173;
	fma.rn.f32 	%f177, %f164, %f172, %f176;
	fma.rn.f32 	%f178, %f162, %f174, %f177;
	fma.rn.f32 	%f179, %f161, %f175, %f178;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd8, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f180, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd8, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f181, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd8, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f182, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd8, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f183, %r45;
	mul.f32 	%f184, %f163, %f181;
	fma.rn.f32 	%f185, %f164, %f180, %f184;
	fma.rn.f32 	%f186, %f162, %f182, %f185;
	fma.rn.f32 	%f187, %f161, %f183, %f186;
	add.f32 	%f257, %f249, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd8, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f188, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd8, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f189, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd8, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f190, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd8, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f191, %r61;
	mul.f32 	%f192, %f163, %f189;
	fma.rn.f32 	%f193, %f164, %f188, %f192;
	fma.rn.f32 	%f194, %f162, %f190, %f193;
	fma.rn.f32 	%f195, %f161, %f191, %f194;
	add.f32 	%f265, %f249, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd8, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f196, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd8, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f197, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd8, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f198, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd8, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f199, %r77;
	mul.f32 	%f200, %f163, %f197;
	fma.rn.f32 	%f201, %f164, %f196, %f200;
	fma.rn.f32 	%f202, %f162, %f198, %f201;
	fma.rn.f32 	%f203, %f161, %f199, %f202;
	mul.f32 	%f204, %f169, %f187;
	fma.rn.f32 	%f205, %f168, %f179, %f204;
	fma.rn.f32 	%f206, %f170, %f195, %f205;
	fma.rn.f32 	%f207, %f171, %f203, %f206;
	mul.f32 	%f208, %f207, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f208;
	mul.lo.s16 	%rs2, %rs1, 257;
	cvt.s64.s32 	%rd24, %r2;
	cvt.s64.s32 	%rd25, %r5;
	shr.u64 	%rd26, %rd25, 1;
	mul.lo.s64 	%rd27, %rd26, %rd24;
	cvt.s64.s32 	%rd28, %r1;
	add.s64 	%rd3, %rd27, %rd28;
	shl.b64 	%rd29, %rd3, 1;
	add.s64 	%rd30, %rd2, %rd29;
	st.global.u16 	[%rd30], %rs2;
	mov.f32 	%f331, %f338;
	@%p4 bra 	$L__BB305_19;
	sin.approx.f32 	%f209, %f4;
	sin.approx.f32 	%f210, %f8;
	mul.f32 	%f211, %f209, %f210;
	div.rn.f32 	%f331, %f211, %f9;
$L__BB305_19:
	mov.f32 	%f332, %f338;
	@%p5 bra 	$L__BB305_21;
	sin.approx.f32 	%f213, %f5;
	sin.approx.f32 	%f214, %f12;
	mul.f32 	%f215, %f213, %f214;
	div.rn.f32 	%f332, %f215, %f13;
$L__BB305_21:
	mov.f32 	%f333, %f338;
	@%p6 bra 	$L__BB305_23;
	sin.approx.f32 	%f217, %f6;
	sin.approx.f32 	%f218, %f16;
	mul.f32 	%f219, %f217, %f218;
	div.rn.f32 	%f333, %f219, %f17;
$L__BB305_23:
	mov.f32 	%f334, %f338;
	@%p7 bra 	$L__BB305_25;
	sin.approx.f32 	%f221, %f7;
	sin.approx.f32 	%f222, %f20;
	mul.f32 	%f223, %f221, %f222;
	div.rn.f32 	%f334, %f223, %f21;
$L__BB305_25:
	mov.f32 	%f335, %f338;
	@%p8 bra 	$L__BB305_27;
	sin.approx.f32 	%f225, %f24;
	sin.approx.f32 	%f226, %f28;
	mul.f32 	%f227, %f225, %f226;
	div.rn.f32 	%f335, %f227, %f29;
$L__BB305_27:
	mov.f32 	%f336, %f338;
	@%p9 bra 	$L__BB305_29;
	sin.approx.f32 	%f229, %f25;
	sin.approx.f32 	%f230, %f32;
	mul.f32 	%f231, %f229, %f230;
	div.rn.f32 	%f336, %f231, %f33;
$L__BB305_29:
	ld.param.u64 	%rd6, [Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_6];
	mov.f32 	%f337, %f338;
	@%p10 bra 	$L__BB305_31;
	sin.approx.f32 	%f233, %f26;
	sin.approx.f32 	%f234, %f36;
	mul.f32 	%f235, %f233, %f234;
	div.rn.f32 	%f337, %f235, %f37;
$L__BB305_31:
	ld.param.u64 	%rd31, [Subsample_Lanczos_yuv444p_yuv444p16le_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd6;
	@%p11 bra 	$L__BB305_33;
	sin.approx.f32 	%f237, %f27;
	sin.approx.f32 	%f238, %f40;
	mul.f32 	%f239, %f237, %f238;
	div.rn.f32 	%f338, %f239, %f41;
$L__BB305_33:
	add.f32 	%f272, %f331, %f332;
	add.f32 	%f273, %f272, %f333;
	add.f32 	%f274, %f273, %f334;
	div.rn.f32 	%f275, %f334, %f274;
	div.rn.f32 	%f276, %f333, %f274;
	div.rn.f32 	%f277, %f332, %f274;
	div.rn.f32 	%f278, %f331, %f274;
	add.f32 	%f279, %f335, %f336;
	add.f32 	%f280, %f279, %f337;
	add.f32 	%f281, %f280, %f338;
	div.rn.f32 	%f282, %f335, %f281;
	div.rn.f32 	%f283, %f336, %f281;
	div.rn.f32 	%f284, %f337, %f281;
	div.rn.f32 	%f285, %f338, %f281;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd31, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f286, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd31, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f287, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd31, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f288, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd31, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f289, %r93;
	mul.f32 	%f290, %f277, %f287;
	fma.rn.f32 	%f291, %f278, %f286, %f290;
	fma.rn.f32 	%f292, %f276, %f288, %f291;
	fma.rn.f32 	%f293, %f275, %f289, %f292;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd31, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f294, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd31, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f295, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd31, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f296, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd31, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f297, %r109;
	mul.f32 	%f298, %f277, %f295;
	fma.rn.f32 	%f299, %f278, %f294, %f298;
	fma.rn.f32 	%f300, %f276, %f296, %f299;
	fma.rn.f32 	%f301, %f275, %f297, %f300;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd31, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f302, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd31, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f303, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd31, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f304, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd31, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f305, %r125;
	mul.f32 	%f306, %f277, %f303;
	fma.rn.f32 	%f307, %f278, %f302, %f306;
	fma.rn.f32 	%f308, %f276, %f304, %f307;
	fma.rn.f32 	%f309, %f275, %f305, %f308;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd31, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f310, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd31, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f311, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd31, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f312, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd31, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f313, %r141;
	mul.f32 	%f314, %f277, %f311;
	fma.rn.f32 	%f315, %f278, %f310, %f314;
	fma.rn.f32 	%f316, %f276, %f312, %f315;
	fma.rn.f32 	%f317, %f275, %f313, %f316;
	mul.f32 	%f318, %f283, %f301;
	fma.rn.f32 	%f319, %f282, %f293, %f318;
	fma.rn.f32 	%f320, %f284, %f309, %f319;
	fma.rn.f32 	%f321, %f285, %f317, %f320;
	mul.f32 	%f322, %f321, 0f437F0000;
	cvt.rzi.u16.f32 	%rs3, %f322;
	mul.lo.s16 	%rs4, %rs3, 257;
	add.s64 	%rd48, %rd1, %rd29;
	st.global.u16 	[%rd48], %rs4;
$L__BB305_34:
	ret;

}
	// .globl	Subsample_Lanczos_p010le_yuv444p16le
.visible .entry Subsample_Lanczos_p010le_yuv444p16le(
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_param_0,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_param_1,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_param_2,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_param_3,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_param_4,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_param_5,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_param_6,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_param_7,
	.param .u32 Subsample_Lanczos_p010le_yuv444p16le_param_8,
	.param .u32 Subsample_Lanczos_p010le_yuv444p16le_param_9,
	.param .u32 Subsample_Lanczos_p010le_yuv444p16le_param_10,
	.param .u32 Subsample_Lanczos_p010le_yuv444p16le_param_11,
	.param .u32 Subsample_Lanczos_p010le_yuv444p16le_param_12,
	.param .f32 Subsample_Lanczos_p010le_yuv444p16le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<4>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p010le_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p010le_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB306_18;
	bra.uni 	$L__BB306_1;
$L__BB306_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p010le_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p010le_yuv444p16le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB306_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB306_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB306_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB306_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB306_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB306_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB306_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB306_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB306_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB306_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB306_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB306_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p010le_yuv444p16le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB306_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB306_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p010le_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p010le_yuv444p16le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB306_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB306_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	shr.u16 	%rs2, %rs1, 10;
	or.b16  	%rs3, %rs2, %rs1;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs3;
$L__BB306_18:
	ret;

}
	// .globl	Subsample_Lanczos_p010le_yuv444p16le_uv
.visible .entry Subsample_Lanczos_p010le_yuv444p16le_uv(
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Lanczos_p010le_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Lanczos_p010le_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Lanczos_p010le_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Lanczos_p010le_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Lanczos_p010le_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Lanczos_p010le_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Lanczos_p010le_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<7>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<31>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p010le_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p010le_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB307_18;
	bra.uni 	$L__BB307_1;
$L__BB307_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p010le_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p010le_yuv444p16le_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB307_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB307_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB307_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB307_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB307_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB307_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB307_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB307_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB307_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB307_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB307_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB307_13:
	ld.param.u64 	%rd4, [Subsample_Lanczos_p010le_yuv444p16le_uv_param_6];
	ld.param.u64 	%rd5, [Subsample_Lanczos_p010le_yuv444p16le_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB307_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB307_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p010le_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Lanczos_p010le_yuv444p16le_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd4;
	cvta.to.global.u64 	%rd2, %rd5;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB307_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB307_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f477FFF00;
	mul.f32 	%f222, %f220, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	shr.u16 	%rs3, %rs1, 10;
	or.b16  	%rs4, %rs3, %rs1;
	cvt.s64.s32 	%rd22, %r2;
	cvt.s64.s32 	%rd23, %r5;
	shr.u64 	%rd24, %rd23, 1;
	mul.lo.s64 	%rd25, %rd24, %rd22;
	cvt.s64.s32 	%rd26, %r1;
	add.s64 	%rd27, %rd25, %rd26;
	shl.b64 	%rd28, %rd27, 1;
	add.s64 	%rd29, %rd2, %rd28;
	st.global.u16 	[%rd29], %rs4;
	shr.u16 	%rs5, %rs2, 10;
	or.b16  	%rs6, %rs5, %rs2;
	add.s64 	%rd30, %rd1, %rd28;
	st.global.u16 	[%rd30], %rs6;
$L__BB307_18:
	ret;

}
	// .globl	Subsample_Lanczos_p016le_yuv444p16le
.visible .entry Subsample_Lanczos_p016le_yuv444p16le(
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_param_0,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_param_1,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_param_2,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_param_3,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_param_4,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_param_5,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_param_6,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_param_7,
	.param .u32 Subsample_Lanczos_p016le_yuv444p16le_param_8,
	.param .u32 Subsample_Lanczos_p016le_yuv444p16le_param_9,
	.param .u32 Subsample_Lanczos_p016le_yuv444p16le_param_10,
	.param .u32 Subsample_Lanczos_p016le_yuv444p16le_param_11,
	.param .u32 Subsample_Lanczos_p016le_yuv444p16le_param_12,
	.param .f32 Subsample_Lanczos_p016le_yuv444p16le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p016le_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p016le_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB308_18;
	bra.uni 	$L__BB308_1;
$L__BB308_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p016le_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p016le_yuv444p16le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB308_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB308_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB308_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB308_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB308_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB308_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB308_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB308_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB308_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB308_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB308_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB308_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_p016le_yuv444p16le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB308_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB308_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p016le_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_p016le_yuv444p16le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB308_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB308_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs1;
$L__BB308_18:
	ret;

}
	// .globl	Subsample_Lanczos_p016le_yuv444p16le_uv
.visible .entry Subsample_Lanczos_p016le_yuv444p16le_uv(
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Lanczos_p016le_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Lanczos_p016le_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Lanczos_p016le_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Lanczos_p016le_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Lanczos_p016le_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Lanczos_p016le_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Lanczos_p016le_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<231>;
	.reg .b64 	%rd<31>;

	ld.param.u32 	%r4, [Subsample_Lanczos_p016le_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_p016le_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB309_18;
	bra.uni 	$L__BB309_1;
$L__BB309_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_p016le_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_p016le_yuv444p16le_uv_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f230, 0f3F800000;
	mov.f32 	%f223, %f230;
	@%p4 bra 	$L__BB309_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f223, %f64, %f9;
$L__BB309_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f224, %f230;
	@%p5 bra 	$L__BB309_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f224, %f69, %f13;
$L__BB309_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f225, %f230;
	@%p6 bra 	$L__BB309_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f225, %f74, %f17;
$L__BB309_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f226, %f230;
	@%p7 bra 	$L__BB309_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f226, %f79, %f21;
$L__BB309_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f227, %f230;
	@%p8 bra 	$L__BB309_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f227, %f87, %f29;
$L__BB309_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f228, %f230;
	@%p9 bra 	$L__BB309_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f228, %f92, %f33;
$L__BB309_13:
	ld.param.u64 	%rd4, [Subsample_Lanczos_p016le_yuv444p16le_uv_param_6];
	ld.param.u64 	%rd5, [Subsample_Lanczos_p016le_yuv444p16le_uv_param_5];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f229, %f230;
	@%p10 bra 	$L__BB309_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f229, %f97, %f37;
$L__BB309_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_p016le_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd6, [Subsample_Lanczos_p016le_yuv444p16le_uv_param_1];
	cvta.to.global.u64 	%rd1, %rd4;
	cvta.to.global.u64 	%rd2, %rd5;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB309_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f230, %f102, %f41;
$L__BB309_17:
	add.f32 	%f135, %f223, %f224;
	add.f32 	%f136, %f135, %f225;
	add.f32 	%f137, %f136, %f226;
	div.rn.f32 	%f138, %f226, %f137;
	div.rn.f32 	%f139, %f225, %f137;
	div.rn.f32 	%f140, %f224, %f137;
	div.rn.f32 	%f141, %f223, %f137;
	add.f32 	%f142, %f227, %f228;
	add.f32 	%f143, %f142, %f229;
	add.f32 	%f144, %f143, %f230;
	div.rn.f32 	%f145, %f227, %f144;
	div.rn.f32 	%f146, %f228, %f144;
	div.rn.f32 	%f147, %f229, %f144;
	div.rn.f32 	%f148, %f230, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd6, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r18;
	mov.b32 	%f150, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd6, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f151, %r22;
	mov.b32 	%f152, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd6, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f153, %r26;
	mov.b32 	%f154, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd6, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f155, %r30;
	mov.b32 	%f156, %r29;
	mul.f32 	%f157, %f140, %f152;
	mul.f32 	%f158, %f140, %f151;
	fma.rn.f32 	%f159, %f141, %f150, %f157;
	fma.rn.f32 	%f160, %f141, %f149, %f158;
	fma.rn.f32 	%f161, %f139, %f154, %f159;
	fma.rn.f32 	%f162, %f139, %f153, %f160;
	fma.rn.f32 	%f163, %f138, %f156, %f161;
	fma.rn.f32 	%f164, %f138, %f155, %f162;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd6, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f165, %r34;
	mov.b32 	%f166, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd6, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f167, %r38;
	mov.b32 	%f168, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd6, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f169, %r42;
	mov.b32 	%f170, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd6, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f171, %r46;
	mov.b32 	%f172, %r45;
	mul.f32 	%f173, %f140, %f168;
	mul.f32 	%f174, %f140, %f167;
	fma.rn.f32 	%f175, %f141, %f166, %f173;
	fma.rn.f32 	%f176, %f141, %f165, %f174;
	fma.rn.f32 	%f177, %f139, %f170, %f175;
	fma.rn.f32 	%f178, %f139, %f169, %f176;
	fma.rn.f32 	%f179, %f138, %f172, %f177;
	fma.rn.f32 	%f180, %f138, %f171, %f178;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd6, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f181, %r50;
	mov.b32 	%f182, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd6, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f183, %r54;
	mov.b32 	%f184, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd6, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f185, %r58;
	mov.b32 	%f186, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd6, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f187, %r62;
	mov.b32 	%f188, %r61;
	mul.f32 	%f189, %f140, %f184;
	mul.f32 	%f190, %f140, %f183;
	fma.rn.f32 	%f191, %f141, %f182, %f189;
	fma.rn.f32 	%f192, %f141, %f181, %f190;
	fma.rn.f32 	%f193, %f139, %f186, %f191;
	fma.rn.f32 	%f194, %f139, %f185, %f192;
	fma.rn.f32 	%f195, %f138, %f188, %f193;
	fma.rn.f32 	%f196, %f138, %f187, %f194;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd6, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f197, %r66;
	mov.b32 	%f198, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd6, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f199, %r70;
	mov.b32 	%f200, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd6, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f201, %r74;
	mov.b32 	%f202, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd6, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f203, %r78;
	mov.b32 	%f204, %r77;
	mul.f32 	%f205, %f140, %f200;
	mul.f32 	%f206, %f140, %f199;
	fma.rn.f32 	%f207, %f141, %f198, %f205;
	fma.rn.f32 	%f208, %f141, %f197, %f206;
	fma.rn.f32 	%f209, %f139, %f202, %f207;
	fma.rn.f32 	%f210, %f139, %f201, %f208;
	fma.rn.f32 	%f211, %f138, %f204, %f209;
	fma.rn.f32 	%f212, %f138, %f203, %f210;
	mul.f32 	%f213, %f146, %f179;
	mul.f32 	%f214, %f146, %f180;
	fma.rn.f32 	%f215, %f145, %f163, %f213;
	fma.rn.f32 	%f216, %f145, %f164, %f214;
	fma.rn.f32 	%f217, %f147, %f195, %f215;
	fma.rn.f32 	%f218, %f147, %f196, %f216;
	fma.rn.f32 	%f219, %f148, %f211, %f217;
	fma.rn.f32 	%f220, %f148, %f212, %f218;
	mul.f32 	%f221, %f219, 0f477FFF00;
	mul.f32 	%f222, %f220, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f221;
	cvt.rzi.u16.f32 	%rs2, %f222;
	cvt.s64.s32 	%rd22, %r2;
	cvt.s64.s32 	%rd23, %r5;
	shr.u64 	%rd24, %rd23, 1;
	mul.lo.s64 	%rd25, %rd24, %rd22;
	cvt.s64.s32 	%rd26, %r1;
	add.s64 	%rd27, %rd25, %rd26;
	shl.b64 	%rd28, %rd27, 1;
	add.s64 	%rd29, %rd2, %rd28;
	st.global.u16 	[%rd29], %rs1;
	add.s64 	%rd30, %rd1, %rd28;
	st.global.u16 	[%rd30], %rs2;
$L__BB309_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p16le_yuv444p16le
.visible .entry Subsample_Lanczos_yuv444p16le_yuv444p16le(
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_0,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_1,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_2,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_3,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_4,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_5,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_6,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_7,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_8,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_9,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_10,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_11,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_12,
	.param .f32 Subsample_Lanczos_yuv444p16le_yuv444p16le_param_13
)
{
	.reg .pred 	%p<12>;
	.reg .b16 	%rs<2>;
	.reg .b32 	%r<81>;
	.reg .f32 	%f<194>;
	.reg .b64 	%rd<28>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p16le_yuv444p16le_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p16le_yuv444p16le_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB310_18;
	bra.uni 	$L__BB310_1;
$L__BB310_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p16le_yuv444p16le_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p16le_yuv444p16le_param_11];
	cvt.rn.f32.s32 	%f45, %r6;
	cvt.rn.f32.s32 	%f46, %r3;
	div.rn.f32 	%f47, %f45, %f46;
	cvt.rn.f32.s32 	%f48, %r7;
	cvt.rn.f32.s32 	%f49, %r4;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r1;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f47, %f52, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r2;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mov.f32 	%f193, 0f3F800000;
	mov.f32 	%f186, %f193;
	@%p4 bra 	$L__BB310_3;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f186, %f64, %f9;
$L__BB310_3:
	fma.rn.f32 	%f56, %f50, %f55, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mov.f32 	%f187, %f193;
	@%p5 bra 	$L__BB310_5;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f187, %f69, %f13;
$L__BB310_5:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mov.f32 	%f188, %f193;
	@%p6 bra 	$L__BB310_7;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f188, %f74, %f17;
$L__BB310_7:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mov.f32 	%f189, %f193;
	@%p7 bra 	$L__BB310_9;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f189, %f79, %f21;
$L__BB310_9:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mov.f32 	%f190, %f193;
	@%p8 bra 	$L__BB310_11;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f190, %f87, %f29;
$L__BB310_11:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mov.f32 	%f191, %f193;
	@%p9 bra 	$L__BB310_13;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f191, %f92, %f33;
$L__BB310_13:
	ld.param.u64 	%rd3, [Subsample_Lanczos_yuv444p16le_yuv444p16le_param_4];
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mov.f32 	%f192, %f193;
	@%p10 bra 	$L__BB310_15;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f192, %f97, %f37;
$L__BB310_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p16le_yuv444p16le_param_10];
	ld.param.u64 	%rd4, [Subsample_Lanczos_yuv444p16le_yuv444p16le_param_0];
	cvta.to.global.u64 	%rd1, %rd3;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	@%p11 bra 	$L__BB310_17;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f193, %f102, %f41;
$L__BB310_17:
	add.f32 	%f135, %f186, %f187;
	add.f32 	%f136, %f135, %f188;
	add.f32 	%f137, %f136, %f189;
	div.rn.f32 	%f138, %f189, %f137;
	div.rn.f32 	%f139, %f188, %f137;
	div.rn.f32 	%f140, %f187, %f137;
	div.rn.f32 	%f141, %f186, %f137;
	add.f32 	%f142, %f190, %f191;
	add.f32 	%f143, %f142, %f192;
	add.f32 	%f144, %f143, %f193;
	div.rn.f32 	%f145, %f190, %f144;
	div.rn.f32 	%f146, %f191, %f144;
	div.rn.f32 	%f147, %f192, %f144;
	div.rn.f32 	%f148, %f193, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd4, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd4, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f150, %r21;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd4, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f151, %r25;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd4, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f152, %r29;
	mul.f32 	%f153, %f140, %f150;
	fma.rn.f32 	%f154, %f141, %f149, %f153;
	fma.rn.f32 	%f155, %f139, %f151, %f154;
	fma.rn.f32 	%f156, %f138, %f152, %f155;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd4, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f157, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd4, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f158, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd4, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f159, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd4, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f160, %r45;
	mul.f32 	%f161, %f140, %f158;
	fma.rn.f32 	%f162, %f141, %f157, %f161;
	fma.rn.f32 	%f163, %f139, %f159, %f162;
	fma.rn.f32 	%f164, %f138, %f160, %f163;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd4, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f165, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd4, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f166, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd4, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f167, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd4, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f168, %r61;
	mul.f32 	%f169, %f140, %f166;
	fma.rn.f32 	%f170, %f141, %f165, %f169;
	fma.rn.f32 	%f171, %f139, %f167, %f170;
	fma.rn.f32 	%f172, %f138, %f168, %f171;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd4, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f173, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd4, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f174, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd4, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f175, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd4, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f176, %r77;
	mul.f32 	%f177, %f140, %f174;
	fma.rn.f32 	%f178, %f141, %f173, %f177;
	fma.rn.f32 	%f179, %f139, %f175, %f178;
	fma.rn.f32 	%f180, %f138, %f176, %f179;
	mul.f32 	%f181, %f146, %f164;
	fma.rn.f32 	%f182, %f145, %f156, %f181;
	fma.rn.f32 	%f183, %f147, %f172, %f182;
	fma.rn.f32 	%f184, %f148, %f180, %f183;
	mul.f32 	%f185, %f184, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f185;
	cvt.s64.s32 	%rd20, %r2;
	cvt.s64.s32 	%rd21, %r5;
	shr.u64 	%rd22, %rd21, 1;
	mul.lo.s64 	%rd23, %rd22, %rd20;
	cvt.s64.s32 	%rd24, %r1;
	add.s64 	%rd25, %rd23, %rd24;
	shl.b64 	%rd26, %rd25, 1;
	add.s64 	%rd27, %rd1, %rd26;
	st.global.u16 	[%rd27], %rs1;
$L__BB310_18:
	ret;

}
	// .globl	Subsample_Lanczos_yuv444p16le_yuv444p16le_uv
.visible .entry Subsample_Lanczos_yuv444p16le_yuv444p16le_uv(
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_0,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_1,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_2,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_3,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_4,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_5,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_6,
	.param .u64 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_7,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_8,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_9,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_10,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_11,
	.param .u32 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_12,
	.param .f32 Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_13
)
{
	.reg .pred 	%p<20>;
	.reg .b16 	%rs<3>;
	.reg .b32 	%r<145>;
	.reg .f32 	%f<339>;
	.reg .b64 	%rd<49>;

	ld.param.u32 	%r4, [Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB311_34;
	bra.uni 	$L__BB311_1;
$L__BB311_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_11];
	cvt.rn.f32.s32 	%f67, %r6;
	cvt.rn.f32.s32 	%f68, %r3;
	div.rn.f32 	%f69, %f67, %f68;
	cvt.rn.f32.s32 	%f70, %r7;
	cvt.rn.f32.s32 	%f71, %r4;
	div.rn.f32 	%f72, %f70, %f71;
	cvt.rn.f32.s32 	%f73, %r1;
	add.f32 	%f74, %f73, 0f3F000000;
	fma.rn.f32 	%f75, %f69, %f74, 0fBF000000;
	cvt.rn.f32.s32 	%f76, %r2;
	add.f32 	%f77, %f76, 0f3F000000;
	cvt.rmi.f32.f32 	%f242, %f75;
	sub.f32 	%f79, %f75, %f242;
	add.f32 	%f80, %f79, 0f3F800000;
	mul.f32 	%f4, %f80, 0f40490FDB;
	mul.f32 	%f5, %f79, 0f40490FDB;
	add.f32 	%f81, %f79, 0fBF800000;
	setp.eq.f32 	%p4, %f4, 0f00000000;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f83, %f4, %f4;
	mul.f32 	%f9, %f83, 0f3F000000;
	mov.f32 	%f338, 0f3F800000;
	mov.f32 	%f323, %f338;
	@%p4 bra 	$L__BB311_3;
	sin.approx.f32 	%f84, %f4;
	sin.approx.f32 	%f85, %f8;
	mul.f32 	%f86, %f84, %f85;
	div.rn.f32 	%f323, %f86, %f9;
$L__BB311_3:
	fma.rn.f32 	%f78, %f72, %f77, 0fBF000000;
	add.f32 	%f82, %f79, 0fC0000000;
	mul.f32 	%f6, %f81, 0f40490FDB;
	setp.eq.f32 	%p5, %f5, 0f00000000;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f88, %f5, %f5;
	mul.f32 	%f13, %f88, 0f3F000000;
	mov.f32 	%f324, %f338;
	@%p5 bra 	$L__BB311_5;
	sin.approx.f32 	%f89, %f5;
	sin.approx.f32 	%f90, %f12;
	mul.f32 	%f91, %f89, %f90;
	div.rn.f32 	%f324, %f91, %f13;
$L__BB311_5:
	cvt.rmi.f32.f32 	%f249, %f78;
	mul.f32 	%f7, %f82, 0f40490FDB;
	setp.eq.f32 	%p6, %f6, 0f00000000;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f93, %f6, %f6;
	mul.f32 	%f17, %f93, 0f3F000000;
	mov.f32 	%f325, %f338;
	@%p6 bra 	$L__BB311_7;
	sin.approx.f32 	%f94, %f6;
	sin.approx.f32 	%f95, %f16;
	mul.f32 	%f96, %f94, %f95;
	div.rn.f32 	%f325, %f96, %f17;
$L__BB311_7:
	sub.f32 	%f3, %f78, %f249;
	setp.eq.f32 	%p7, %f7, 0f00000000;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f98, %f7, %f7;
	mul.f32 	%f21, %f98, 0f3F000000;
	mov.f32 	%f326, %f338;
	@%p7 bra 	$L__BB311_9;
	sin.approx.f32 	%f99, %f7;
	sin.approx.f32 	%f100, %f20;
	mul.f32 	%f101, %f99, %f100;
	div.rn.f32 	%f326, %f101, %f21;
$L__BB311_9:
	add.f32 	%f103, %f3, 0f3F800000;
	mul.f32 	%f24, %f103, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f104, %f3, 0fBF800000;
	setp.eq.f32 	%p8, %f24, 0f00000000;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f106, %f24, %f24;
	mul.f32 	%f29, %f106, 0f3F000000;
	mov.f32 	%f327, %f338;
	@%p8 bra 	$L__BB311_11;
	sin.approx.f32 	%f107, %f24;
	sin.approx.f32 	%f108, %f28;
	mul.f32 	%f109, %f107, %f108;
	div.rn.f32 	%f327, %f109, %f29;
$L__BB311_11:
	add.f32 	%f105, %f3, 0fC0000000;
	mul.f32 	%f26, %f104, 0f40490FDB;
	setp.eq.f32 	%p9, %f25, 0f00000000;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f111, %f25, %f25;
	mul.f32 	%f33, %f111, 0f3F000000;
	mov.f32 	%f328, %f338;
	@%p9 bra 	$L__BB311_13;
	sin.approx.f32 	%f112, %f25;
	sin.approx.f32 	%f113, %f32;
	mul.f32 	%f114, %f112, %f113;
	div.rn.f32 	%f328, %f114, %f33;
$L__BB311_13:
	ld.param.u64 	%rd7, [Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_5];
	mul.f32 	%f27, %f105, 0f40490FDB;
	setp.eq.f32 	%p10, %f26, 0f00000000;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f116, %f26, %f26;
	mul.f32 	%f37, %f116, 0f3F000000;
	mov.f32 	%f329, %f338;
	@%p10 bra 	$L__BB311_15;
	sin.approx.f32 	%f117, %f26;
	sin.approx.f32 	%f118, %f36;
	mul.f32 	%f119, %f117, %f118;
	div.rn.f32 	%f329, %f119, %f37;
$L__BB311_15:
	ld.param.u32 	%r5, [Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_10];
	ld.param.u64 	%rd8, [Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_1];
	cvta.to.global.u64 	%rd2, %rd7;
	setp.eq.f32 	%p11, %f27, 0f00000000;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f121, %f27, %f27;
	mul.f32 	%f41, %f121, 0f3F000000;
	mov.f32 	%f330, %f338;
	@%p11 bra 	$L__BB311_17;
	sin.approx.f32 	%f122, %f27;
	sin.approx.f32 	%f123, %f40;
	mul.f32 	%f124, %f122, %f123;
	div.rn.f32 	%f330, %f124, %f41;
$L__BB311_17:
	add.f32 	%f158, %f323, %f324;
	add.f32 	%f159, %f158, %f325;
	add.f32 	%f160, %f159, %f326;
	div.rn.f32 	%f161, %f326, %f160;
	div.rn.f32 	%f162, %f325, %f160;
	div.rn.f32 	%f163, %f324, %f160;
	div.rn.f32 	%f164, %f323, %f160;
	add.f32 	%f165, %f327, %f328;
	add.f32 	%f166, %f165, %f329;
	add.f32 	%f167, %f166, %f330;
	div.rn.f32 	%f168, %f327, %f167;
	div.rn.f32 	%f169, %f328, %f167;
	div.rn.f32 	%f170, %f329, %f167;
	div.rn.f32 	%f171, %f330, %f167;
	add.f32 	%f240, %f242, 0fBF800000;
	add.f32 	%f241, %f249, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r17, %r18, %r19, %r20}, [%rd8, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f172, %r17;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r21, %r22, %r23, %r24}, [%rd8, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f173, %r21;
	add.f32 	%f244, %f242, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r25, %r26, %r27, %r28}, [%rd8, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f174, %r25;
	add.f32 	%f246, %f242, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r29, %r30, %r31, %r32}, [%rd8, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f175, %r29;
	mul.f32 	%f176, %f163, %f173;
	fma.rn.f32 	%f177, %f164, %f172, %f176;
	fma.rn.f32 	%f178, %f162, %f174, %f177;
	fma.rn.f32 	%f179, %f161, %f175, %f178;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r33, %r34, %r35, %r36}, [%rd8, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f180, %r33;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r37, %r38, %r39, %r40}, [%rd8, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f181, %r37;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r41, %r42, %r43, %r44}, [%rd8, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f182, %r41;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r45, %r46, %r47, %r48}, [%rd8, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f183, %r45;
	mul.f32 	%f184, %f163, %f181;
	fma.rn.f32 	%f185, %f164, %f180, %f184;
	fma.rn.f32 	%f186, %f162, %f182, %f185;
	fma.rn.f32 	%f187, %f161, %f183, %f186;
	add.f32 	%f257, %f249, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r49, %r50, %r51, %r52}, [%rd8, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f188, %r49;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r53, %r54, %r55, %r56}, [%rd8, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f189, %r53;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r57, %r58, %r59, %r60}, [%rd8, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f190, %r57;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r61, %r62, %r63, %r64}, [%rd8, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f191, %r61;
	mul.f32 	%f192, %f163, %f189;
	fma.rn.f32 	%f193, %f164, %f188, %f192;
	fma.rn.f32 	%f194, %f162, %f190, %f193;
	fma.rn.f32 	%f195, %f161, %f191, %f194;
	add.f32 	%f265, %f249, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r65, %r66, %r67, %r68}, [%rd8, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f196, %r65;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r69, %r70, %r71, %r72}, [%rd8, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f197, %r69;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r73, %r74, %r75, %r76}, [%rd8, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f198, %r73;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r77, %r78, %r79, %r80}, [%rd8, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f199, %r77;
	mul.f32 	%f200, %f163, %f197;
	fma.rn.f32 	%f201, %f164, %f196, %f200;
	fma.rn.f32 	%f202, %f162, %f198, %f201;
	fma.rn.f32 	%f203, %f161, %f199, %f202;
	mul.f32 	%f204, %f169, %f187;
	fma.rn.f32 	%f205, %f168, %f179, %f204;
	fma.rn.f32 	%f206, %f170, %f195, %f205;
	fma.rn.f32 	%f207, %f171, %f203, %f206;
	mul.f32 	%f208, %f207, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs1, %f208;
	cvt.s64.s32 	%rd24, %r2;
	cvt.s64.s32 	%rd25, %r5;
	shr.u64 	%rd26, %rd25, 1;
	mul.lo.s64 	%rd27, %rd26, %rd24;
	cvt.s64.s32 	%rd28, %r1;
	add.s64 	%rd3, %rd27, %rd28;
	shl.b64 	%rd29, %rd3, 1;
	add.s64 	%rd30, %rd2, %rd29;
	st.global.u16 	[%rd30], %rs1;
	mov.f32 	%f331, %f338;
	@%p4 bra 	$L__BB311_19;
	sin.approx.f32 	%f209, %f4;
	sin.approx.f32 	%f210, %f8;
	mul.f32 	%f211, %f209, %f210;
	div.rn.f32 	%f331, %f211, %f9;
$L__BB311_19:
	mov.f32 	%f332, %f338;
	@%p5 bra 	$L__BB311_21;
	sin.approx.f32 	%f213, %f5;
	sin.approx.f32 	%f214, %f12;
	mul.f32 	%f215, %f213, %f214;
	div.rn.f32 	%f332, %f215, %f13;
$L__BB311_21:
	mov.f32 	%f333, %f338;
	@%p6 bra 	$L__BB311_23;
	sin.approx.f32 	%f217, %f6;
	sin.approx.f32 	%f218, %f16;
	mul.f32 	%f219, %f217, %f218;
	div.rn.f32 	%f333, %f219, %f17;
$L__BB311_23:
	mov.f32 	%f334, %f338;
	@%p7 bra 	$L__BB311_25;
	sin.approx.f32 	%f221, %f7;
	sin.approx.f32 	%f222, %f20;
	mul.f32 	%f223, %f221, %f222;
	div.rn.f32 	%f334, %f223, %f21;
$L__BB311_25:
	mov.f32 	%f335, %f338;
	@%p8 bra 	$L__BB311_27;
	sin.approx.f32 	%f225, %f24;
	sin.approx.f32 	%f226, %f28;
	mul.f32 	%f227, %f225, %f226;
	div.rn.f32 	%f335, %f227, %f29;
$L__BB311_27:
	mov.f32 	%f336, %f338;
	@%p9 bra 	$L__BB311_29;
	sin.approx.f32 	%f229, %f25;
	sin.approx.f32 	%f230, %f32;
	mul.f32 	%f231, %f229, %f230;
	div.rn.f32 	%f336, %f231, %f33;
$L__BB311_29:
	ld.param.u64 	%rd6, [Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_6];
	mov.f32 	%f337, %f338;
	@%p10 bra 	$L__BB311_31;
	sin.approx.f32 	%f233, %f26;
	sin.approx.f32 	%f234, %f36;
	mul.f32 	%f235, %f233, %f234;
	div.rn.f32 	%f337, %f235, %f37;
$L__BB311_31:
	ld.param.u64 	%rd31, [Subsample_Lanczos_yuv444p16le_yuv444p16le_uv_param_2];
	cvta.to.global.u64 	%rd1, %rd6;
	@%p11 bra 	$L__BB311_33;
	sin.approx.f32 	%f237, %f27;
	sin.approx.f32 	%f238, %f40;
	mul.f32 	%f239, %f237, %f238;
	div.rn.f32 	%f338, %f239, %f41;
$L__BB311_33:
	add.f32 	%f272, %f331, %f332;
	add.f32 	%f273, %f272, %f333;
	add.f32 	%f274, %f273, %f334;
	div.rn.f32 	%f275, %f334, %f274;
	div.rn.f32 	%f276, %f333, %f274;
	div.rn.f32 	%f277, %f332, %f274;
	div.rn.f32 	%f278, %f331, %f274;
	add.f32 	%f279, %f335, %f336;
	add.f32 	%f280, %f279, %f337;
	add.f32 	%f281, %f280, %f338;
	div.rn.f32 	%f282, %f335, %f281;
	div.rn.f32 	%f283, %f336, %f281;
	div.rn.f32 	%f284, %f337, %f281;
	div.rn.f32 	%f285, %f338, %f281;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r81, %r82, %r83, %r84}, [%rd31, {%f240, %f241}];
	// end inline asm
	mov.b32 	%f286, %r81;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r85, %r86, %r87, %r88}, [%rd31, {%f242, %f241}];
	// end inline asm
	mov.b32 	%f287, %r85;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r89, %r90, %r91, %r92}, [%rd31, {%f244, %f241}];
	// end inline asm
	mov.b32 	%f288, %r89;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r93, %r94, %r95, %r96}, [%rd31, {%f246, %f241}];
	// end inline asm
	mov.b32 	%f289, %r93;
	mul.f32 	%f290, %f277, %f287;
	fma.rn.f32 	%f291, %f278, %f286, %f290;
	fma.rn.f32 	%f292, %f276, %f288, %f291;
	fma.rn.f32 	%f293, %f275, %f289, %f292;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r97, %r98, %r99, %r100}, [%rd31, {%f240, %f249}];
	// end inline asm
	mov.b32 	%f294, %r97;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r101, %r102, %r103, %r104}, [%rd31, {%f242, %f249}];
	// end inline asm
	mov.b32 	%f295, %r101;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r105, %r106, %r107, %r108}, [%rd31, {%f244, %f249}];
	// end inline asm
	mov.b32 	%f296, %r105;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r109, %r110, %r111, %r112}, [%rd31, {%f246, %f249}];
	// end inline asm
	mov.b32 	%f297, %r109;
	mul.f32 	%f298, %f277, %f295;
	fma.rn.f32 	%f299, %f278, %f294, %f298;
	fma.rn.f32 	%f300, %f276, %f296, %f299;
	fma.rn.f32 	%f301, %f275, %f297, %f300;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r113, %r114, %r115, %r116}, [%rd31, {%f240, %f257}];
	// end inline asm
	mov.b32 	%f302, %r113;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r117, %r118, %r119, %r120}, [%rd31, {%f242, %f257}];
	// end inline asm
	mov.b32 	%f303, %r117;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r121, %r122, %r123, %r124}, [%rd31, {%f244, %f257}];
	// end inline asm
	mov.b32 	%f304, %r121;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r125, %r126, %r127, %r128}, [%rd31, {%f246, %f257}];
	// end inline asm
	mov.b32 	%f305, %r125;
	mul.f32 	%f306, %f277, %f303;
	fma.rn.f32 	%f307, %f278, %f302, %f306;
	fma.rn.f32 	%f308, %f276, %f304, %f307;
	fma.rn.f32 	%f309, %f275, %f305, %f308;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r129, %r130, %r131, %r132}, [%rd31, {%f240, %f265}];
	// end inline asm
	mov.b32 	%f310, %r129;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r133, %r134, %r135, %r136}, [%rd31, {%f242, %f265}];
	// end inline asm
	mov.b32 	%f311, %r133;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r137, %r138, %r139, %r140}, [%rd31, {%f244, %f265}];
	// end inline asm
	mov.b32 	%f312, %r137;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r141, %r142, %r143, %r144}, [%rd31, {%f246, %f265}];
	// end inline asm
	mov.b32 	%f313, %r141;
	mul.f32 	%f314, %f277, %f311;
	fma.rn.f32 	%f315, %f278, %f310, %f314;
	fma.rn.f32 	%f316, %f276, %f312, %f315;
	fma.rn.f32 	%f317, %f275, %f313, %f316;
	mul.f32 	%f318, %f283, %f301;
	fma.rn.f32 	%f319, %f282, %f293, %f318;
	fma.rn.f32 	%f320, %f284, %f309, %f319;
	fma.rn.f32 	%f321, %f285, %f317, %f320;
	mul.f32 	%f322, %f321, 0f477FFF00;
	cvt.rzi.u16.f32 	%rs2, %f322;
	add.s64 	%rd48, %rd1, %rd29;
	st.global.u16 	[%rd48], %rs2;
$L__BB311_34:
	ret;

}
	// .globl	Subsample_Lanczos_bgr0_bgr0
.visible .entry Subsample_Lanczos_bgr0_bgr0(
	.param .u64 Subsample_Lanczos_bgr0_bgr0_param_0,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_param_1,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_param_2,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_param_3,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_param_4,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_param_5,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_param_6,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_param_7,
	.param .u32 Subsample_Lanczos_bgr0_bgr0_param_8,
	.param .u32 Subsample_Lanczos_bgr0_bgr0_param_9,
	.param .u32 Subsample_Lanczos_bgr0_bgr0_param_10,
	.param .u32 Subsample_Lanczos_bgr0_bgr0_param_11,
	.param .u32 Subsample_Lanczos_bgr0_bgr0_param_12,
	.param .f32 Subsample_Lanczos_bgr0_bgr0_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<17>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Lanczos_bgr0_bgr0_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_bgr0_bgr0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB312_2;
	bra.uni 	$L__BB312_1;
$L__BB312_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_bgr0_bgr0_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_bgr0_bgr0_param_11];
	ld.param.u32 	%r5, [Subsample_Lanczos_bgr0_bgr0_param_10];
	ld.param.u64 	%rd2, [Subsample_Lanczos_bgr0_bgr0_param_0];
	ld.param.u64 	%rd3, [Subsample_Lanczos_bgr0_bgr0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	{ // callseq 0, 0
	.reg .b32 temp_param_reg;
	.param .b64 param0;
	st.param.b64 	[param0+0], %rd2;
	.param .b32 param1;
	st.param.b32 	[param1+0], %r1;
	.param .b32 param2;
	st.param.b32 	[param2+0], %r2;
	.param .b32 param3;
	st.param.b32 	[param3+0], %r3;
	.param .b32 param4;
	st.param.b32 	[param4+0], %r4;
	.param .b32 param5;
	st.param.b32 	[param5+0], %r6;
	.param .b32 param6;
	st.param.b32 	[param6+0], %r7;
	.param .align 16 .b8 retval0[4];
	call.uni (retval0), 
	_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif, 
	(
	param0, 
	param1, 
	param2, 
	param3, 
	param4, 
	param5, 
	param6
	);
	ld.param.v4.b8 	{%rs1, %rs2, %rs3, %rs4}, [retval0+0];
	} // callseq 0
	cvt.s64.s32 	%rd4, %r2;
	cvt.s64.s32 	%rd5, %r5;
	shr.u64 	%rd6, %rd5, 2;
	mul.lo.s64 	%rd7, %rd6, %rd4;
	cvt.s64.s32 	%rd8, %r1;
	add.s64 	%rd9, %rd7, %rd8;
	shl.b64 	%rd10, %rd9, 2;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.v4.u8 	[%rd11], {%rs1, %rs2, %rs3, %rs4};
$L__BB312_2:
	ret;

}
	// .globl	Subsample_Lanczos_bgr0_bgr0_uv
.visible .entry Subsample_Lanczos_bgr0_bgr0_uv(
	.param .u64 Subsample_Lanczos_bgr0_bgr0_uv_param_0,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_uv_param_1,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_uv_param_2,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_uv_param_3,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_uv_param_4,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_uv_param_5,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_uv_param_6,
	.param .u64 Subsample_Lanczos_bgr0_bgr0_uv_param_7,
	.param .u32 Subsample_Lanczos_bgr0_bgr0_uv_param_8,
	.param .u32 Subsample_Lanczos_bgr0_bgr0_uv_param_9,
	.param .u32 Subsample_Lanczos_bgr0_bgr0_uv_param_10,
	.param .u32 Subsample_Lanczos_bgr0_bgr0_uv_param_11,
	.param .u32 Subsample_Lanczos_bgr0_bgr0_uv_param_12,
	.param .f32 Subsample_Lanczos_bgr0_bgr0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Lanczos_rgb0_rgb0
.visible .entry Subsample_Lanczos_rgb0_rgb0(
	.param .u64 Subsample_Lanczos_rgb0_rgb0_param_0,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_param_1,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_param_2,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_param_3,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_param_4,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_param_5,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_param_6,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_param_7,
	.param .u32 Subsample_Lanczos_rgb0_rgb0_param_8,
	.param .u32 Subsample_Lanczos_rgb0_rgb0_param_9,
	.param .u32 Subsample_Lanczos_rgb0_rgb0_param_10,
	.param .u32 Subsample_Lanczos_rgb0_rgb0_param_11,
	.param .u32 Subsample_Lanczos_rgb0_rgb0_param_12,
	.param .f32 Subsample_Lanczos_rgb0_rgb0_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<17>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Lanczos_rgb0_rgb0_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_rgb0_rgb0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB314_2;
	bra.uni 	$L__BB314_1;
$L__BB314_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_rgb0_rgb0_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_rgb0_rgb0_param_11];
	ld.param.u32 	%r5, [Subsample_Lanczos_rgb0_rgb0_param_10];
	ld.param.u64 	%rd2, [Subsample_Lanczos_rgb0_rgb0_param_0];
	ld.param.u64 	%rd3, [Subsample_Lanczos_rgb0_rgb0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	{ // callseq 1, 0
	.reg .b32 temp_param_reg;
	.param .b64 param0;
	st.param.b64 	[param0+0], %rd2;
	.param .b32 param1;
	st.param.b32 	[param1+0], %r1;
	.param .b32 param2;
	st.param.b32 	[param2+0], %r2;
	.param .b32 param3;
	st.param.b32 	[param3+0], %r3;
	.param .b32 param4;
	st.param.b32 	[param4+0], %r4;
	.param .b32 param5;
	st.param.b32 	[param5+0], %r6;
	.param .b32 param6;
	st.param.b32 	[param6+0], %r7;
	.param .align 16 .b8 retval0[4];
	call.uni (retval0), 
	_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif, 
	(
	param0, 
	param1, 
	param2, 
	param3, 
	param4, 
	param5, 
	param6
	);
	ld.param.v4.b8 	{%rs1, %rs2, %rs3, %rs4}, [retval0+0];
	} // callseq 1
	cvt.s64.s32 	%rd4, %r2;
	cvt.s64.s32 	%rd5, %r5;
	shr.u64 	%rd6, %rd5, 2;
	mul.lo.s64 	%rd7, %rd6, %rd4;
	cvt.s64.s32 	%rd8, %r1;
	add.s64 	%rd9, %rd7, %rd8;
	shl.b64 	%rd10, %rd9, 2;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.v4.u8 	[%rd11], {%rs1, %rs2, %rs3, %rs4};
$L__BB314_2:
	ret;

}
	// .globl	Subsample_Lanczos_rgb0_rgb0_uv
.visible .entry Subsample_Lanczos_rgb0_rgb0_uv(
	.param .u64 Subsample_Lanczos_rgb0_rgb0_uv_param_0,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_uv_param_1,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_uv_param_2,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_uv_param_3,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_uv_param_4,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_uv_param_5,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_uv_param_6,
	.param .u64 Subsample_Lanczos_rgb0_rgb0_uv_param_7,
	.param .u32 Subsample_Lanczos_rgb0_rgb0_uv_param_8,
	.param .u32 Subsample_Lanczos_rgb0_rgb0_uv_param_9,
	.param .u32 Subsample_Lanczos_rgb0_rgb0_uv_param_10,
	.param .u32 Subsample_Lanczos_rgb0_rgb0_uv_param_11,
	.param .u32 Subsample_Lanczos_rgb0_rgb0_uv_param_12,
	.param .f32 Subsample_Lanczos_rgb0_rgb0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Lanczos_bgr0_rgb0
.visible .entry Subsample_Lanczos_bgr0_rgb0(
	.param .u64 Subsample_Lanczos_bgr0_rgb0_param_0,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_param_1,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_param_2,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_param_3,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_param_4,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_param_5,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_param_6,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_param_7,
	.param .u32 Subsample_Lanczos_bgr0_rgb0_param_8,
	.param .u32 Subsample_Lanczos_bgr0_rgb0_param_9,
	.param .u32 Subsample_Lanczos_bgr0_rgb0_param_10,
	.param .u32 Subsample_Lanczos_bgr0_rgb0_param_11,
	.param .u32 Subsample_Lanczos_bgr0_rgb0_param_12,
	.param .f32 Subsample_Lanczos_bgr0_rgb0_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<17>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Lanczos_bgr0_rgb0_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_bgr0_rgb0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB316_2;
	bra.uni 	$L__BB316_1;
$L__BB316_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_bgr0_rgb0_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_bgr0_rgb0_param_11];
	ld.param.u32 	%r5, [Subsample_Lanczos_bgr0_rgb0_param_10];
	ld.param.u64 	%rd2, [Subsample_Lanczos_bgr0_rgb0_param_0];
	ld.param.u64 	%rd3, [Subsample_Lanczos_bgr0_rgb0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	{ // callseq 2, 0
	.reg .b32 temp_param_reg;
	.param .b64 param0;
	st.param.b64 	[param0+0], %rd2;
	.param .b32 param1;
	st.param.b32 	[param1+0], %r1;
	.param .b32 param2;
	st.param.b32 	[param2+0], %r2;
	.param .b32 param3;
	st.param.b32 	[param3+0], %r3;
	.param .b32 param4;
	st.param.b32 	[param4+0], %r4;
	.param .b32 param5;
	st.param.b32 	[param5+0], %r6;
	.param .b32 param6;
	st.param.b32 	[param6+0], %r7;
	.param .align 16 .b8 retval0[4];
	call.uni (retval0), 
	_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif, 
	(
	param0, 
	param1, 
	param2, 
	param3, 
	param4, 
	param5, 
	param6
	);
	ld.param.v4.b8 	{%rs1, %rs2, %rs3, %rs4}, [retval0+0];
	} // callseq 2
	cvt.s64.s32 	%rd4, %r2;
	cvt.s64.s32 	%rd5, %r5;
	shr.u64 	%rd6, %rd5, 2;
	mul.lo.s64 	%rd7, %rd6, %rd4;
	cvt.s64.s32 	%rd8, %r1;
	add.s64 	%rd9, %rd7, %rd8;
	shl.b64 	%rd10, %rd9, 2;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.v4.u8 	[%rd11], {%rs3, %rs2, %rs1, %rs4};
$L__BB316_2:
	ret;

}
	// .globl	Subsample_Lanczos_bgr0_rgb0_uv
.visible .entry Subsample_Lanczos_bgr0_rgb0_uv(
	.param .u64 Subsample_Lanczos_bgr0_rgb0_uv_param_0,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_uv_param_1,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_uv_param_2,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_uv_param_3,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_uv_param_4,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_uv_param_5,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_uv_param_6,
	.param .u64 Subsample_Lanczos_bgr0_rgb0_uv_param_7,
	.param .u32 Subsample_Lanczos_bgr0_rgb0_uv_param_8,
	.param .u32 Subsample_Lanczos_bgr0_rgb0_uv_param_9,
	.param .u32 Subsample_Lanczos_bgr0_rgb0_uv_param_10,
	.param .u32 Subsample_Lanczos_bgr0_rgb0_uv_param_11,
	.param .u32 Subsample_Lanczos_bgr0_rgb0_uv_param_12,
	.param .f32 Subsample_Lanczos_bgr0_rgb0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
	// .globl	Subsample_Lanczos_rgb0_bgr0
.visible .entry Subsample_Lanczos_rgb0_bgr0(
	.param .u64 Subsample_Lanczos_rgb0_bgr0_param_0,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_param_1,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_param_2,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_param_3,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_param_4,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_param_5,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_param_6,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_param_7,
	.param .u32 Subsample_Lanczos_rgb0_bgr0_param_8,
	.param .u32 Subsample_Lanczos_rgb0_bgr0_param_9,
	.param .u32 Subsample_Lanczos_rgb0_bgr0_param_10,
	.param .u32 Subsample_Lanczos_rgb0_bgr0_param_11,
	.param .u32 Subsample_Lanczos_rgb0_bgr0_param_12,
	.param .f32 Subsample_Lanczos_rgb0_bgr0_param_13
)
{
	.reg .pred 	%p<4>;
	.reg .b16 	%rs<9>;
	.reg .b32 	%r<17>;
	.reg .b64 	%rd<12>;

	ld.param.u32 	%r4, [Subsample_Lanczos_rgb0_bgr0_param_9];
	ld.param.u32 	%r3, [Subsample_Lanczos_rgb0_bgr0_param_8];
	// begin inline asm
	mov.u32 %r8, %ctaid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r9, %ctaid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r11, %ntid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r12, %ntid.y;
	// end inline asm
	// begin inline asm
	mov.u32 %r14, %tid.x;
	// end inline asm
	// begin inline asm
	mov.u32 %r15, %tid.y;
	// end inline asm
	mad.lo.s32 	%r1, %r11, %r8, %r14;
	mad.lo.s32 	%r2, %r12, %r9, %r15;
	setp.lt.s32 	%p1, %r2, %r4;
	setp.lt.s32 	%p2, %r1, %r3;
	and.pred  	%p3, %p1, %p2;
	@!%p3 bra 	$L__BB318_2;
	bra.uni 	$L__BB318_1;
$L__BB318_1:
	ld.param.u32 	%r7, [Subsample_Lanczos_rgb0_bgr0_param_12];
	ld.param.u32 	%r6, [Subsample_Lanczos_rgb0_bgr0_param_11];
	ld.param.u32 	%r5, [Subsample_Lanczos_rgb0_bgr0_param_10];
	ld.param.u64 	%rd2, [Subsample_Lanczos_rgb0_bgr0_param_0];
	ld.param.u64 	%rd3, [Subsample_Lanczos_rgb0_bgr0_param_4];
	cvta.to.global.u64 	%rd1, %rd3;
	{ // callseq 3, 0
	.reg .b32 temp_param_reg;
	.param .b64 param0;
	st.param.b64 	[param0+0], %rd2;
	.param .b32 param1;
	st.param.b32 	[param1+0], %r1;
	.param .b32 param2;
	st.param.b32 	[param2+0], %r2;
	.param .b32 param3;
	st.param.b32 	[param3+0], %r3;
	.param .b32 param4;
	st.param.b32 	[param4+0], %r4;
	.param .b32 param5;
	st.param.b32 	[param5+0], %r6;
	.param .b32 param6;
	st.param.b32 	[param6+0], %r7;
	.param .align 16 .b8 retval0[4];
	call.uni (retval0), 
	_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif, 
	(
	param0, 
	param1, 
	param2, 
	param3, 
	param4, 
	param5, 
	param6
	);
	ld.param.v4.b8 	{%rs1, %rs2, %rs3, %rs4}, [retval0+0];
	} // callseq 3
	cvt.s64.s32 	%rd4, %r2;
	cvt.s64.s32 	%rd5, %r5;
	shr.u64 	%rd6, %rd5, 2;
	mul.lo.s64 	%rd7, %rd6, %rd4;
	cvt.s64.s32 	%rd8, %r1;
	add.s64 	%rd9, %rd7, %rd8;
	shl.b64 	%rd10, %rd9, 2;
	add.s64 	%rd11, %rd1, %rd10;
	st.global.v4.u8 	[%rd11], {%rs3, %rs2, %rs1, %rs4};
$L__BB318_2:
	ret;

}
	// .globl	Subsample_Lanczos_rgb0_bgr0_uv
.visible .entry Subsample_Lanczos_rgb0_bgr0_uv(
	.param .u64 Subsample_Lanczos_rgb0_bgr0_uv_param_0,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_uv_param_1,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_uv_param_2,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_uv_param_3,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_uv_param_4,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_uv_param_5,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_uv_param_6,
	.param .u64 Subsample_Lanczos_rgb0_bgr0_uv_param_7,
	.param .u32 Subsample_Lanczos_rgb0_bgr0_uv_param_8,
	.param .u32 Subsample_Lanczos_rgb0_bgr0_uv_param_9,
	.param .u32 Subsample_Lanczos_rgb0_bgr0_uv_param_10,
	.param .u32 Subsample_Lanczos_rgb0_bgr0_uv_param_11,
	.param .u32 Subsample_Lanczos_rgb0_bgr0_uv_param_12,
	.param .f32 Subsample_Lanczos_rgb0_bgr0_uv_param_13
)
{
	.reg .b32 	%r<10>;

	ret;

}
.func  (.param .align 16 .b8 func_retval0[4]) _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif(
	.param .b64 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_0,
	.param .b32 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_1,
	.param .b32 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_2,
	.param .b32 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_3,
	.param .b32 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_4,
	.param .b32 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_5,
	.param .b32 _ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_6
)
{
	.reg .pred 	%p<9>;
	.reg .b16 	%rs<5>;
	.reg .b32 	%r<71>;
	.reg .f32 	%f<305>;
	.reg .b64 	%rd<18>;

	ld.param.u32 	%r1, [_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_5];
	cvt.rn.f32.s32 	%f45, %r1;
	ld.param.u32 	%r2, [_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_1];
	ld.param.u32 	%r3, [_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_3];
	cvt.rn.f32.s32 	%f46, %r3;
	ld.param.u32 	%r4, [_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_2];
	div.rn.f32 	%f47, %f45, %f46;
	ld.param.u32 	%r5, [_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_6];
	cvt.rn.f32.s32 	%f48, %r5;
	ld.param.u32 	%r6, [_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_4];
	cvt.rn.f32.s32 	%f49, %r6;
	div.rn.f32 	%f50, %f48, %f49;
	cvt.rn.f32.s32 	%f51, %r2;
	add.f32 	%f52, %f51, 0f3F000000;
	fma.rn.f32 	%f53, %f52, %f47, 0fBF000000;
	cvt.rn.f32.s32 	%f54, %r4;
	add.f32 	%f55, %f54, 0f3F000000;
	cvt.rmi.f32.f32 	%f105, %f53;
	sub.f32 	%f57, %f53, %f105;
	add.f32 	%f58, %f57, 0f3F800000;
	mul.f32 	%f4, %f58, 0f40490FDB;
	mul.f32 	%f5, %f57, 0f40490FDB;
	add.f32 	%f59, %f57, 0fBF800000;
	setp.eq.f32 	%p1, %f4, 0f00000000;
	mov.f32 	%f304, 0f3F800000;
	mov.f32 	%f297, %f304;
	@%p1 bra 	$L__BB320_2;
	mul.f32 	%f8, %f4, 0f3F000000;
	mul.f32 	%f61, %f4, %f4;
	mul.f32 	%f9, %f61, 0f3F000000;
	sin.approx.f32 	%f62, %f4;
	sin.approx.f32 	%f63, %f8;
	mul.f32 	%f64, %f62, %f63;
	div.rn.f32 	%f297, %f64, %f9;
$L__BB320_2:
	fma.rn.f32 	%f56, %f55, %f50, 0fBF000000;
	add.f32 	%f60, %f57, 0fC0000000;
	mul.f32 	%f6, %f59, 0f40490FDB;
	setp.eq.f32 	%p2, %f5, 0f00000000;
	mov.f32 	%f298, %f304;
	@%p2 bra 	$L__BB320_4;
	mul.f32 	%f12, %f5, 0f3F000000;
	mul.f32 	%f66, %f5, %f5;
	mul.f32 	%f13, %f66, 0f3F000000;
	sin.approx.f32 	%f67, %f5;
	sin.approx.f32 	%f68, %f12;
	mul.f32 	%f69, %f67, %f68;
	div.rn.f32 	%f298, %f69, %f13;
$L__BB320_4:
	cvt.rmi.f32.f32 	%f112, %f56;
	mul.f32 	%f7, %f60, 0f40490FDB;
	setp.eq.f32 	%p3, %f6, 0f00000000;
	mov.f32 	%f299, %f304;
	@%p3 bra 	$L__BB320_6;
	mul.f32 	%f16, %f6, 0f3F000000;
	mul.f32 	%f71, %f6, %f6;
	mul.f32 	%f17, %f71, 0f3F000000;
	sin.approx.f32 	%f72, %f6;
	sin.approx.f32 	%f73, %f16;
	mul.f32 	%f74, %f72, %f73;
	div.rn.f32 	%f299, %f74, %f17;
$L__BB320_6:
	sub.f32 	%f3, %f56, %f112;
	setp.eq.f32 	%p4, %f7, 0f00000000;
	mov.f32 	%f300, %f304;
	@%p4 bra 	$L__BB320_8;
	mul.f32 	%f20, %f7, 0f3F000000;
	mul.f32 	%f76, %f7, %f7;
	mul.f32 	%f21, %f76, 0f3F000000;
	sin.approx.f32 	%f77, %f7;
	sin.approx.f32 	%f78, %f20;
	mul.f32 	%f79, %f77, %f78;
	div.rn.f32 	%f300, %f79, %f21;
$L__BB320_8:
	add.f32 	%f81, %f3, 0f3F800000;
	mul.f32 	%f24, %f81, 0f40490FDB;
	mul.f32 	%f25, %f3, 0f40490FDB;
	add.f32 	%f82, %f3, 0fBF800000;
	setp.eq.f32 	%p5, %f24, 0f00000000;
	mov.f32 	%f301, %f304;
	@%p5 bra 	$L__BB320_10;
	mul.f32 	%f28, %f24, 0f3F000000;
	mul.f32 	%f84, %f24, %f24;
	mul.f32 	%f29, %f84, 0f3F000000;
	sin.approx.f32 	%f85, %f24;
	sin.approx.f32 	%f86, %f28;
	mul.f32 	%f87, %f85, %f86;
	div.rn.f32 	%f301, %f87, %f29;
$L__BB320_10:
	add.f32 	%f83, %f3, 0fC0000000;
	mul.f32 	%f26, %f82, 0f40490FDB;
	setp.eq.f32 	%p6, %f25, 0f00000000;
	mov.f32 	%f302, %f304;
	@%p6 bra 	$L__BB320_12;
	mul.f32 	%f32, %f25, 0f3F000000;
	mul.f32 	%f89, %f25, %f25;
	mul.f32 	%f33, %f89, 0f3F000000;
	sin.approx.f32 	%f90, %f25;
	sin.approx.f32 	%f91, %f32;
	mul.f32 	%f92, %f90, %f91;
	div.rn.f32 	%f302, %f92, %f33;
$L__BB320_12:
	mul.f32 	%f27, %f83, 0f40490FDB;
	setp.eq.f32 	%p7, %f26, 0f00000000;
	mov.f32 	%f303, %f304;
	@%p7 bra 	$L__BB320_14;
	mul.f32 	%f36, %f26, 0f3F000000;
	mul.f32 	%f94, %f26, %f26;
	mul.f32 	%f37, %f94, 0f3F000000;
	sin.approx.f32 	%f95, %f26;
	sin.approx.f32 	%f96, %f36;
	mul.f32 	%f97, %f95, %f96;
	div.rn.f32 	%f303, %f97, %f37;
$L__BB320_14:
	ld.param.u64 	%rd2, [_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif_param_0];
	setp.eq.f32 	%p8, %f27, 0f00000000;
	@%p8 bra 	$L__BB320_16;
	mul.f32 	%f40, %f27, 0f3F000000;
	mul.f32 	%f99, %f27, %f27;
	mul.f32 	%f41, %f99, 0f3F000000;
	sin.approx.f32 	%f100, %f27;
	sin.approx.f32 	%f101, %f40;
	mul.f32 	%f102, %f100, %f101;
	div.rn.f32 	%f304, %f102, %f41;
$L__BB320_16:
	add.f32 	%f135, %f297, %f298;
	add.f32 	%f136, %f135, %f299;
	add.f32 	%f137, %f136, %f300;
	div.rn.f32 	%f138, %f300, %f137;
	div.rn.f32 	%f139, %f299, %f137;
	div.rn.f32 	%f140, %f298, %f137;
	div.rn.f32 	%f141, %f297, %f137;
	add.f32 	%f142, %f301, %f302;
	add.f32 	%f143, %f142, %f303;
	add.f32 	%f144, %f143, %f304;
	div.rn.f32 	%f145, %f301, %f144;
	div.rn.f32 	%f146, %f302, %f144;
	div.rn.f32 	%f147, %f303, %f144;
	div.rn.f32 	%f148, %f304, %f144;
	add.f32 	%f103, %f105, 0fBF800000;
	add.f32 	%f104, %f112, 0fBF800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r7, %r8, %r9, %r10}, [%rd2, {%f103, %f104}];
	// end inline asm
	mov.b32 	%f149, %r10;
	mov.b32 	%f150, %r9;
	mov.b32 	%f151, %r8;
	mov.b32 	%f152, %r7;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r11, %r12, %r13, %r14}, [%rd2, {%f105, %f104}];
	// end inline asm
	mov.b32 	%f153, %r14;
	mov.b32 	%f154, %r13;
	mov.b32 	%f155, %r12;
	mov.b32 	%f156, %r11;
	add.f32 	%f107, %f105, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r15, %r16, %r17, %r18}, [%rd2, {%f107, %f104}];
	// end inline asm
	mov.b32 	%f157, %r18;
	mov.b32 	%f158, %r17;
	mov.b32 	%f159, %r16;
	mov.b32 	%f160, %r15;
	add.f32 	%f109, %f105, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r19, %r20, %r21, %r22}, [%rd2, {%f109, %f104}];
	// end inline asm
	mov.b32 	%f161, %r22;
	mov.b32 	%f162, %r21;
	mov.b32 	%f163, %r20;
	mov.b32 	%f164, %r19;
	mul.f32 	%f165, %f140, %f156;
	mul.f32 	%f166, %f140, %f155;
	mul.f32 	%f167, %f140, %f154;
	mul.f32 	%f168, %f140, %f153;
	fma.rn.f32 	%f169, %f141, %f152, %f165;
	fma.rn.f32 	%f170, %f141, %f151, %f166;
	fma.rn.f32 	%f171, %f141, %f150, %f167;
	fma.rn.f32 	%f172, %f141, %f149, %f168;
	fma.rn.f32 	%f173, %f139, %f160, %f169;
	fma.rn.f32 	%f174, %f139, %f159, %f170;
	fma.rn.f32 	%f175, %f139, %f158, %f171;
	fma.rn.f32 	%f176, %f139, %f157, %f172;
	fma.rn.f32 	%f177, %f138, %f164, %f173;
	fma.rn.f32 	%f178, %f138, %f163, %f174;
	fma.rn.f32 	%f179, %f138, %f162, %f175;
	fma.rn.f32 	%f180, %f138, %f161, %f176;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r23, %r24, %r25, %r26}, [%rd2, {%f103, %f112}];
	// end inline asm
	mov.b32 	%f181, %r26;
	mov.b32 	%f182, %r25;
	mov.b32 	%f183, %r24;
	mov.b32 	%f184, %r23;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r27, %r28, %r29, %r30}, [%rd2, {%f105, %f112}];
	// end inline asm
	mov.b32 	%f185, %r30;
	mov.b32 	%f186, %r29;
	mov.b32 	%f187, %r28;
	mov.b32 	%f188, %r27;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r31, %r32, %r33, %r34}, [%rd2, {%f107, %f112}];
	// end inline asm
	mov.b32 	%f189, %r34;
	mov.b32 	%f190, %r33;
	mov.b32 	%f191, %r32;
	mov.b32 	%f192, %r31;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r35, %r36, %r37, %r38}, [%rd2, {%f109, %f112}];
	// end inline asm
	mov.b32 	%f193, %r38;
	mov.b32 	%f194, %r37;
	mov.b32 	%f195, %r36;
	mov.b32 	%f196, %r35;
	mul.f32 	%f197, %f140, %f188;
	mul.f32 	%f198, %f140, %f187;
	mul.f32 	%f199, %f140, %f186;
	mul.f32 	%f200, %f140, %f185;
	fma.rn.f32 	%f201, %f141, %f184, %f197;
	fma.rn.f32 	%f202, %f141, %f183, %f198;
	fma.rn.f32 	%f203, %f141, %f182, %f199;
	fma.rn.f32 	%f204, %f141, %f181, %f200;
	fma.rn.f32 	%f205, %f139, %f192, %f201;
	fma.rn.f32 	%f206, %f139, %f191, %f202;
	fma.rn.f32 	%f207, %f139, %f190, %f203;
	fma.rn.f32 	%f208, %f139, %f189, %f204;
	fma.rn.f32 	%f209, %f138, %f196, %f205;
	fma.rn.f32 	%f210, %f138, %f195, %f206;
	fma.rn.f32 	%f211, %f138, %f194, %f207;
	fma.rn.f32 	%f212, %f138, %f193, %f208;
	add.f32 	%f120, %f112, 0f3F800000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r39, %r40, %r41, %r42}, [%rd2, {%f103, %f120}];
	// end inline asm
	mov.b32 	%f213, %r42;
	mov.b32 	%f214, %r41;
	mov.b32 	%f215, %r40;
	mov.b32 	%f216, %r39;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r43, %r44, %r45, %r46}, [%rd2, {%f105, %f120}];
	// end inline asm
	mov.b32 	%f217, %r46;
	mov.b32 	%f218, %r45;
	mov.b32 	%f219, %r44;
	mov.b32 	%f220, %r43;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r47, %r48, %r49, %r50}, [%rd2, {%f107, %f120}];
	// end inline asm
	mov.b32 	%f221, %r50;
	mov.b32 	%f222, %r49;
	mov.b32 	%f223, %r48;
	mov.b32 	%f224, %r47;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r51, %r52, %r53, %r54}, [%rd2, {%f109, %f120}];
	// end inline asm
	mov.b32 	%f225, %r54;
	mov.b32 	%f226, %r53;
	mov.b32 	%f227, %r52;
	mov.b32 	%f228, %r51;
	mul.f32 	%f229, %f140, %f220;
	mul.f32 	%f230, %f140, %f219;
	mul.f32 	%f231, %f140, %f218;
	mul.f32 	%f232, %f140, %f217;
	fma.rn.f32 	%f233, %f141, %f216, %f229;
	fma.rn.f32 	%f234, %f141, %f215, %f230;
	fma.rn.f32 	%f235, %f141, %f214, %f231;
	fma.rn.f32 	%f236, %f141, %f213, %f232;
	fma.rn.f32 	%f237, %f139, %f224, %f233;
	fma.rn.f32 	%f238, %f139, %f223, %f234;
	fma.rn.f32 	%f239, %f139, %f222, %f235;
	fma.rn.f32 	%f240, %f139, %f221, %f236;
	fma.rn.f32 	%f241, %f138, %f228, %f237;
	fma.rn.f32 	%f242, %f138, %f227, %f238;
	fma.rn.f32 	%f243, %f138, %f226, %f239;
	fma.rn.f32 	%f244, %f138, %f225, %f240;
	add.f32 	%f128, %f112, 0f40000000;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r55, %r56, %r57, %r58}, [%rd2, {%f103, %f128}];
	// end inline asm
	mov.b32 	%f245, %r58;
	mov.b32 	%f246, %r57;
	mov.b32 	%f247, %r56;
	mov.b32 	%f248, %r55;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r59, %r60, %r61, %r62}, [%rd2, {%f105, %f128}];
	// end inline asm
	mov.b32 	%f249, %r62;
	mov.b32 	%f250, %r61;
	mov.b32 	%f251, %r60;
	mov.b32 	%f252, %r59;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r63, %r64, %r65, %r66}, [%rd2, {%f107, %f128}];
	// end inline asm
	mov.b32 	%f253, %r66;
	mov.b32 	%f254, %r65;
	mov.b32 	%f255, %r64;
	mov.b32 	%f256, %r63;
	// begin inline asm
	tex.2d.v4.f32.f32 {%r67, %r68, %r69, %r70}, [%rd2, {%f109, %f128}];
	// end inline asm
	mov.b32 	%f257, %r70;
	mov.b32 	%f258, %r69;
	mov.b32 	%f259, %r68;
	mov.b32 	%f260, %r67;
	mul.f32 	%f261, %f140, %f252;
	mul.f32 	%f262, %f140, %f251;
	mul.f32 	%f263, %f140, %f250;
	mul.f32 	%f264, %f140, %f249;
	fma.rn.f32 	%f265, %f141, %f248, %f261;
	fma.rn.f32 	%f266, %f141, %f247, %f262;
	fma.rn.f32 	%f267, %f141, %f246, %f263;
	fma.rn.f32 	%f268, %f141, %f245, %f264;
	fma.rn.f32 	%f269, %f139, %f256, %f265;
	fma.rn.f32 	%f270, %f139, %f255, %f266;
	fma.rn.f32 	%f271, %f139, %f254, %f267;
	fma.rn.f32 	%f272, %f139, %f253, %f268;
	fma.rn.f32 	%f273, %f138, %f260, %f269;
	fma.rn.f32 	%f274, %f138, %f259, %f270;
	fma.rn.f32 	%f275, %f138, %f258, %f271;
	fma.rn.f32 	%f276, %f138, %f257, %f272;
	mul.f32 	%f277, %f146, %f209;
	mul.f32 	%f278, %f146, %f210;
	mul.f32 	%f279, %f146, %f211;
	mul.f32 	%f280, %f146, %f212;
	fma.rn.f32 	%f281, %f145, %f177, %f277;
	fma.rn.f32 	%f282, %f145, %f178, %f278;
	fma.rn.f32 	%f283, %f145, %f179, %f279;
	fma.rn.f32 	%f284, %f145, %f180, %f280;
	fma.rn.f32 	%f285, %f147, %f241, %f281;
	fma.rn.f32 	%f286, %f147, %f242, %f282;
	fma.rn.f32 	%f287, %f147, %f243, %f283;
	fma.rn.f32 	%f288, %f147, %f244, %f284;
	fma.rn.f32 	%f289, %f148, %f273, %f285;
	fma.rn.f32 	%f290, %f148, %f274, %f286;
	fma.rn.f32 	%f291, %f148, %f275, %f287;
	fma.rn.f32 	%f292, %f148, %f276, %f288;
	mul.f32 	%f293, %f289, 0f437F0000;
	mul.f32 	%f294, %f290, 0f437F0000;
	mul.f32 	%f295, %f291, 0f437F0000;
	mul.f32 	%f296, %f292, 0f437F0000;
	cvt.rzi.u16.f32 	%rs1, %f293;
	cvt.rzi.u16.f32 	%rs2, %f294;
	cvt.rzi.u16.f32 	%rs3, %f295;
	cvt.rzi.u16.f32 	%rs4, %f296;
	st.param.v4.b8 	[func_retval0+0], {%rs1, %rs2, %rs3, %rs4};
	ret;

}
 ELF3����=@8@�.shstrtab.strtab.symtab.symtab_shndx.nv.info.text.Subsample_Lanczos_rgb0_bgr0_uv.nv.info.Subsample_Lanczos_rgb0_bgr0_uv.nv.shared.Subsample_Lanczos_rgb0_bgr0_uv.nv.constant0.Subsample_Lanczos_rgb0_bgr0_uv.rel.nv.constant0.Subsample_Lanczos_rgb0_bgr0_uv.text.Subsample_Lanczos_rgb0_bgr0.nv.info.Subsample_Lanczos_rgb0_bgr0.nv.shared.Subsample_Lanczos_rgb0_bgr0.nv.constant2.Subsample_Lanczos_rgb0_bgr0.nv.constant0.Subsample_Lanczos_rgb0_bgr0.rel.nv.constant0.Subsample_Lanczos_rgb0_bgr0.text.Subsample_Lanczos_bgr0_rgb0_uv.nv.info.Subsample_Lanczos_bgr0_rgb0_uv.nv.shared.Subsample_Lanczos_bgr0_rgb0_uv.nv.constant0.Subsample_Lanczos_bgr0_rgb0_uv.rel.nv.constant0.Subsample_Lanczos_bgr0_rgb0_uv.text.Subsample_Lanczos_bgr0_rgb0.nv.info.Subsample_Lanczos_bgr0_rgb0.nv.shared.Subsample_Lanczos_bgr0_rgb0.nv.constant2.Subsample_Lanczos_bgr0_rgb0.nv.constant0.Subsample_Lanczos_bgr0_rgb0.rel.nv.constant0.Subsample_Lanczos_bgr0_rgb0.text.Subsample_Lanczos_rgb0_rgb0_uv.nv.info.Subsample_Lanczos_rgb0_rgb0_uv.nv.shared.Subsample_Lanczos_rgb0_rgb0_uv.nv.constant0.Subsample_Lanczos_rgb0_rgb0_uv.rel.nv.constant0.Subsample_Lanczos_rgb0_rgb0_uv.text.Subsample_Lanczos_rgb0_rgb0.nv.info.Subsample_Lanczos_rgb0_rgb0.nv.shared.Subsample_Lanczos_rgb0_rgb0.nv.constant2.Subsample_Lanczos_rgb0_rgb0.nv.constant0.Subsample_Lanczos_rgb0_rgb0.rel.nv.constant0.Subsample_Lanczos_rgb0_rgb0.text.Subsample_Lanczos_bgr0_bgr0_uv.nv.info.Subsample_Lanczos_bgr0_bgr0_uv.nv.shared.Subsample_Lanczos_bgr0_bgr0_uv.nv.constant0.Subsample_Lanczos_bgr0_bgr0_uv.rel.nv.constant0.Subsample_Lanczos_bgr0_bgr0_uv.text.Subsample_Lanczos_bgr0_bgr0.nv.info.Subsample_Lanczos_bgr0_bgr0.nv.shared.Subsample_Lanczos_bgr0_bgr0.nv.constant2.Subsample_Lanczos_bgr0_bgr0.nv.constant0.Subsample_Lanczos_bgr0_bgr0.rel.nv.constant0.Subsample_Lanczos_bgr0_bgr0.text.Subsample_Lanczos_yuv444p16le_yuv444p16le_uv.nv.info.Subsample_Lanczos_yuv444p16le_yuv444p16le_uv.nv.shared.Subsample_Lanczos_yuv444p16le_yuv444p16le_uv.nv.constant2.Subsample_Lanczos_yuv444p16le_yuv444p16le_uv.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p16le_uv.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p16le_uv.text.Subsample_Lanczos_yuv444p16le_yuv444p16le.nv.info.Subsample_Lanczos_yuv444p16le_yuv444p16le.nv.shared.Subsample_Lanczos_yuv444p16le_yuv444p16le.nv.constant2.Subsample_Lanczos_yuv444p16le_yuv444p16le.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p16le.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p16le.text.Subsample_Lanczos_p016le_yuv444p16le_uv.nv.info.Subsample_Lanczos_p016le_yuv444p16le_uv.nv.shared.Subsample_Lanczos_p016le_yuv444p16le_uv.nv.constant2.Subsample_Lanczos_p016le_yuv444p16le_uv.nv.constant0.Subsample_Lanczos_p016le_yuv444p16le_uv.rel.nv.constant0.Subsample_Lanczos_p016le_yuv444p16le_uv.text.Subsample_Lanczos_p016le_yuv444p16le.nv.info.Subsample_Lanczos_p016le_yuv444p16le.nv.shared.Subsample_Lanczos_p016le_yuv444p16le.nv.constant2.Subsample_Lanczos_p016le_yuv444p16le.nv.constant0.Subsample_Lanczos_p016le_yuv444p16le.rel.nv.constant0.Subsample_Lanczos_p016le_yuv444p16le.text.Subsample_Lanczos_p010le_yuv444p16le_uv.nv.info.Subsample_Lanczos_p010le_yuv444p16le_uv.nv.shared.Subsample_Lanczos_p010le_yuv444p16le_uv.nv.constant2.Subsample_Lanczos_p010le_yuv444p16le_uv.nv.constant0.Subsample_Lanczos_p010le_yuv444p16le_uv.rel.nv.constant0.Subsample_Lanczos_p010le_yuv444p16le_uv.text.Subsample_Lanczos_p010le_yuv444p16le.nv.info.Subsample_Lanczos_p010le_yuv444p16le.nv.shared.Subsample_Lanczos_p010le_yuv444p16le.nv.constant2.Subsample_Lanczos_p010le_yuv444p16le.nv.constant0.Subsample_Lanczos_p010le_yuv444p16le.rel.nv.constant0.Subsample_Lanczos_p010le_yuv444p16le.text.Subsample_Lanczos_yuv444p_yuv444p16le_uv.nv.info.Subsample_Lanczos_yuv444p_yuv444p16le_uv.nv.shared.Subsample_Lanczos_yuv444p_yuv444p16le_uv.nv.constant2.Subsample_Lanczos_yuv444p_yuv444p16le_uv.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p16le_uv.rel.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p16le_uv.text.Subsample_Lanczos_yuv444p_yuv444p16le.nv.info.Subsample_Lanczos_yuv444p_yuv444p16le.nv.shared.Subsample_Lanczos_yuv444p_yuv444p16le.nv.constant2.Subsample_Lanczos_yuv444p_yuv444p16le.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p16le.rel.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p16le.text.Subsample_Lanczos_nv12_yuv444p16le_uv.nv.info.Subsample_Lanczos_nv12_yuv444p16le_uv.nv.shared.Subsample_Lanczos_nv12_yuv444p16le_uv.nv.constant2.Subsample_Lanczos_nv12_yuv444p16le_uv.nv.constant0.Subsample_Lanczos_nv12_yuv444p16le_uv.rel.nv.constant0.Subsample_Lanczos_nv12_yuv444p16le_uv.text.Subsample_Lanczos_nv12_yuv444p16le.nv.info.Subsample_Lanczos_nv12_yuv444p16le.nv.shared.Subsample_Lanczos_nv12_yuv444p16le.nv.constant2.Subsample_Lanczos_nv12_yuv444p16le.nv.constant0.Subsample_Lanczos_nv12_yuv444p16le.rel.nv.constant0.Subsample_Lanczos_nv12_yuv444p16le.text.Subsample_Lanczos_yuv420p_yuv444p16le_uv.nv.info.Subsample_Lanczos_yuv420p_yuv444p16le_uv.nv.shared.Subsample_Lanczos_yuv420p_yuv444p16le_uv.nv.constant2.Subsample_Lanczos_yuv420p_yuv444p16le_uv.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p16le_uv.rel.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p16le_uv.text.Subsample_Lanczos_yuv420p_yuv444p16le.nv.info.Subsample_Lanczos_yuv420p_yuv444p16le.nv.shared.Subsample_Lanczos_yuv420p_yuv444p16le.nv.constant2.Subsample_Lanczos_yuv420p_yuv444p16le.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p16le.rel.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p16le.text.Subsample_Lanczos_yuv444p16le_p016le_uv.nv.info.Subsample_Lanczos_yuv444p16le_p016le_uv.nv.shared.Subsample_Lanczos_yuv444p16le_p016le_uv.nv.constant2.Subsample_Lanczos_yuv444p16le_p016le_uv.nv.constant0.Subsample_Lanczos_yuv444p16le_p016le_uv.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_p016le_uv.text.Subsample_Lanczos_yuv444p16le_p016le.nv.info.Subsample_Lanczos_yuv444p16le_p016le.nv.shared.Subsample_Lanczos_yuv444p16le_p016le.nv.constant2.Subsample_Lanczos_yuv444p16le_p016le.nv.constant0.Subsample_Lanczos_yuv444p16le_p016le.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_p016le.text.Subsample_Lanczos_p016le_p016le_uv.nv.info.Subsample_Lanczos_p016le_p016le_uv.nv.shared.Subsample_Lanczos_p016le_p016le_uv.nv.constant2.Subsample_Lanczos_p016le_p016le_uv.nv.constant0.Subsample_Lanczos_p016le_p016le_uv.rel.nv.constant0.Subsample_Lanczos_p016le_p016le_uv.text.Subsample_Lanczos_p016le_p016le.nv.info.Subsample_Lanczos_p016le_p016le.nv.shared.Subsample_Lanczos_p016le_p016le.nv.constant2.Subsample_Lanczos_p016le_p016le.nv.constant0.Subsample_Lanczos_p016le_p016le.rel.nv.constant0.Subsample_Lanczos_p016le_p016le.text.Subsample_Lanczos_p010le_p016le_uv.nv.info.Subsample_Lanczos_p010le_p016le_uv.nv.shared.Subsample_Lanczos_p010le_p016le_uv.nv.constant2.Subsample_Lanczos_p010le_p016le_uv.nv.constant0.Subsample_Lanczos_p010le_p016le_uv.rel.nv.constant0.Subsample_Lanczos_p010le_p016le_uv.text.Subsample_Lanczos_p010le_p016le.nv.info.Subsample_Lanczos_p010le_p016le.nv.shared.Subsample_Lanczos_p010le_p016le.nv.constant2.Subsample_Lanczos_p010le_p016le.nv.constant0.Subsample_Lanczos_p010le_p016le.rel.nv.constant0.Subsample_Lanczos_p010le_p016le.text.Subsample_Lanczos_yuv444p_p016le_uv.nv.info.Subsample_Lanczos_yuv444p_p016le_uv.nv.shared.Subsample_Lanczos_yuv444p_p016le_uv.nv.constant2.Subsample_Lanczos_yuv444p_p016le_uv.nv.constant0.Subsample_Lanczos_yuv444p_p016le_uv.rel.nv.constant0.Subsample_Lanczos_yuv444p_p016le_uv.text.Subsample_Lanczos_yuv444p_p016le.nv.info.Subsample_Lanczos_yuv444p_p016le.nv.shared.Subsample_Lanczos_yuv444p_p016le.nv.constant2.Subsample_Lanczos_yuv444p_p016le.nv.constant0.Subsample_Lanczos_yuv444p_p016le.rel.nv.constant0.Subsample_Lanczos_yuv444p_p016le.text.Subsample_Lanczos_nv12_p016le_uv.nv.info.Subsample_Lanczos_nv12_p016le_uv.nv.shared.Subsample_Lanczos_nv12_p016le_uv.nv.constant2.Subsample_Lanczos_nv12_p016le_uv.nv.constant0.Subsample_Lanczos_nv12_p016le_uv.rel.nv.constant0.Subsample_Lanczos_nv12_p016le_uv.text.Subsample_Lanczos_nv12_p016le.nv.info.Subsample_Lanczos_nv12_p016le.nv.shared.Subsample_Lanczos_nv12_p016le.nv.constant2.Subsample_Lanczos_nv12_p016le.nv.constant0.Subsample_Lanczos_nv12_p016le.rel.nv.constant0.Subsample_Lanczos_nv12_p016le.text.Subsample_Lanczos_yuv420p_p016le_uv.nv.info.Subsample_Lanczos_yuv420p_p016le_uv.nv.shared.Subsample_Lanczos_yuv420p_p016le_uv.nv.constant2.Subsample_Lanczos_yuv420p_p016le_uv.nv.constant0.Subsample_Lanczos_yuv420p_p016le_uv.rel.nv.constant0.Subsample_Lanczos_yuv420p_p016le_uv.text.Subsample_Lanczos_yuv420p_p016le.nv.info.Subsample_Lanczos_yuv420p_p016le.nv.shared.Subsample_Lanczos_yuv420p_p016le.nv.constant2.Subsample_Lanczos_yuv420p_p016le.nv.constant0.Subsample_Lanczos_yuv420p_p016le.rel.nv.constant0.Subsample_Lanczos_yuv420p_p016le.text.Subsample_Lanczos_yuv444p16le_p010le_uv.nv.info.Subsample_Lanczos_yuv444p16le_p010le_uv.nv.shared.Subsample_Lanczos_yuv444p16le_p010le_uv.nv.constant2.Subsample_Lanczos_yuv444p16le_p010le_uv.nv.constant0.Subsample_Lanczos_yuv444p16le_p010le_uv.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_p010le_uv.text.Subsample_Lanczos_yuv444p16le_p010le.nv.info.Subsample_Lanczos_yuv444p16le_p010le.nv.shared.Subsample_Lanczos_yuv444p16le_p010le.nv.constant2.Subsample_Lanczos_yuv444p16le_p010le.nv.constant0.Subsample_Lanczos_yuv444p16le_p010le.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_p010le.text.Subsample_Lanczos_p016le_p010le_uv.nv.info.Subsample_Lanczos_p016le_p010le_uv.nv.shared.Subsample_Lanczos_p016le_p010le_uv.nv.constant2.Subsample_Lanczos_p016le_p010le_uv.nv.constant0.Subsample_Lanczos_p016le_p010le_uv.rel.nv.constant0.Subsample_Lanczos_p016le_p010le_uv.text.Subsample_Lanczos_p016le_p010le.nv.info.Subsample_Lanczos_p016le_p010le.nv.shared.Subsample_Lanczos_p016le_p010le.nv.constant2.Subsample_Lanczos_p016le_p010le.nv.constant0.Subsample_Lanczos_p016le_p010le.rel.nv.constant0.Subsample_Lanczos_p016le_p010le.text.Subsample_Lanczos_p010le_p010le_uv.nv.info.Subsample_Lanczos_p010le_p010le_uv.nv.shared.Subsample_Lanczos_p010le_p010le_uv.nv.constant2.Subsample_Lanczos_p010le_p010le_uv.nv.constant0.Subsample_Lanczos_p010le_p010le_uv.rel.nv.constant0.Subsample_Lanczos_p010le_p010le_uv.text.Subsample_Lanczos_p010le_p010le.nv.info.Subsample_Lanczos_p010le_p010le.nv.shared.Subsample_Lanczos_p010le_p010le.nv.constant2.Subsample_Lanczos_p010le_p010le.nv.constant0.Subsample_Lanczos_p010le_p010le.rel.nv.constant0.Subsample_Lanczos_p010le_p010le.text.Subsample_Lanczos_yuv444p_p010le_uv.nv.info.Subsample_Lanczos_yuv444p_p010le_uv.nv.shared.Subsample_Lanczos_yuv444p_p010le_uv.nv.constant2.Subsample_Lanczos_yuv444p_p010le_uv.nv.constant0.Subsample_Lanczos_yuv444p_p010le_uv.rel.nv.constant0.Subsample_Lanczos_yuv444p_p010le_uv.text.Subsample_Lanczos_yuv444p_p010le.nv.info.Subsample_Lanczos_yuv444p_p010le.nv.shared.Subsample_Lanczos_yuv444p_p010le.nv.constant2.Subsample_Lanczos_yuv444p_p010le.nv.constant0.Subsample_Lanczos_yuv444p_p010le.rel.nv.constant0.Subsample_Lanczos_yuv444p_p010le.text.Subsample_Lanczos_nv12_p010le_uv.nv.info.Subsample_Lanczos_nv12_p010le_uv.nv.shared.Subsample_Lanczos_nv12_p010le_uv.nv.constant2.Subsample_Lanczos_nv12_p010le_uv.nv.constant0.Subsample_Lanczos_nv12_p010le_uv.rel.nv.constant0.Subsample_Lanczos_nv12_p010le_uv.text.Subsample_Lanczos_nv12_p010le.nv.info.Subsample_Lanczos_nv12_p010le.nv.shared.Subsample_Lanczos_nv12_p010le.nv.constant2.Subsample_Lanczos_nv12_p010le.nv.constant0.Subsample_Lanczos_nv12_p010le.rel.nv.constant0.Subsample_Lanczos_nv12_p010le.text.Subsample_Lanczos_yuv420p_p010le_uv.nv.info.Subsample_Lanczos_yuv420p_p010le_uv.nv.shared.Subsample_Lanczos_yuv420p_p010le_uv.nv.constant2.Subsample_Lanczos_yuv420p_p010le_uv.nv.constant0.Subsample_Lanczos_yuv420p_p010le_uv.rel.nv.constant0.Subsample_Lanczos_yuv420p_p010le_uv.text.Subsample_Lanczos_yuv420p_p010le.nv.info.Subsample_Lanczos_yuv420p_p010le.nv.shared.Subsample_Lanczos_yuv420p_p010le.nv.constant2.Subsample_Lanczos_yuv420p_p010le.nv.constant0.Subsample_Lanczos_yuv420p_p010le.rel.nv.constant0.Subsample_Lanczos_yuv420p_p010le.text.Subsample_Lanczos_yuv444p16le_yuv444p_uv.nv.info.Subsample_Lanczos_yuv444p16le_yuv444p_uv.nv.shared.Subsample_Lanczos_yuv444p16le_yuv444p_uv.nv.constant2.Subsample_Lanczos_yuv444p16le_yuv444p_uv.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p_uv.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p_uv.text.Subsample_Lanczos_yuv444p16le_yuv444p.nv.info.Subsample_Lanczos_yuv444p16le_yuv444p.nv.shared.Subsample_Lanczos_yuv444p16le_yuv444p.nv.constant2.Subsample_Lanczos_yuv444p16le_yuv444p.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p.text.Subsample_Lanczos_p016le_yuv444p_uv.nv.info.Subsample_Lanczos_p016le_yuv444p_uv.nv.shared.Subsample_Lanczos_p016le_yuv444p_uv.nv.constant2.Subsample_Lanczos_p016le_yuv444p_uv.nv.constant0.Subsample_Lanczos_p016le_yuv444p_uv.rel.nv.constant0.Subsample_Lanczos_p016le_yuv444p_uv.text.Subsample_Lanczos_p016le_yuv444p.nv.info.Subsample_Lanczos_p016le_yuv444p.nv.shared.Subsample_Lanczos_p016le_yuv444p.nv.constant2.Subsample_Lanczos_p016le_yuv444p.nv.constant0.Subsample_Lanczos_p016le_yuv444p.rel.nv.constant0.Subsample_Lanczos_p016le_yuv444p.text.Subsample_Lanczos_p010le_yuv444p_uv.nv.info.Subsample_Lanczos_p010le_yuv444p_uv.nv.shared.Subsample_Lanczos_p010le_yuv444p_uv.nv.constant2.Subsample_Lanczos_p010le_yuv444p_uv.nv.constant0.Subsample_Lanczos_p010le_yuv444p_uv.rel.nv.constant0.Subsample_Lanczos_p010le_yuv444p_uv.text.Subsample_Lanczos_p010le_yuv444p.nv.info.Subsample_Lanczos_p010le_yuv444p.nv.shared.Subsample_Lanczos_p010le_yuv444p.nv.constant2.Subsample_Lanczos_p010le_yuv444p.nv.constant0.Subsample_Lanczos_p010le_yuv444p.rel.nv.constant0.Subsample_Lanczos_p010le_yuv444p.text.Subsample_Lanczos_yuv444p_yuv444p_uv.nv.info.Subsample_Lanczos_yuv444p_yuv444p_uv.nv.shared.Subsample_Lanczos_yuv444p_yuv444p_uv.nv.constant2.Subsample_Lanczos_yuv444p_yuv444p_uv.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p_uv.rel.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p_uv.text.Subsample_Lanczos_yuv444p_yuv444p.nv.info.Subsample_Lanczos_yuv444p_yuv444p.nv.shared.Subsample_Lanczos_yuv444p_yuv444p.nv.constant2.Subsample_Lanczos_yuv444p_yuv444p.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p.rel.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p.text.Subsample_Lanczos_nv12_yuv444p_uv.nv.info.Subsample_Lanczos_nv12_yuv444p_uv.nv.shared.Subsample_Lanczos_nv12_yuv444p_uv.nv.constant2.Subsample_Lanczos_nv12_yuv444p_uv.nv.constant0.Subsample_Lanczos_nv12_yuv444p_uv.rel.nv.constant0.Subsample_Lanczos_nv12_yuv444p_uv.text.Subsample_Lanczos_nv12_yuv444p.nv.info.Subsample_Lanczos_nv12_yuv444p.nv.shared.Subsample_Lanczos_nv12_yuv444p.nv.constant2.Subsample_Lanczos_nv12_yuv444p.nv.constant0.Subsample_Lanczos_nv12_yuv444p.rel.nv.constant0.Subsample_Lanczos_nv12_yuv444p.text.Subsample_Lanczos_yuv420p_yuv444p_uv.nv.info.Subsample_Lanczos_yuv420p_yuv444p_uv.nv.shared.Subsample_Lanczos_yuv420p_yuv444p_uv.nv.constant2.Subsample_Lanczos_yuv420p_yuv444p_uv.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p_uv.rel.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p_uv.text.Subsample_Lanczos_yuv420p_yuv444p.nv.info.Subsample_Lanczos_yuv420p_yuv444p.nv.shared.Subsample_Lanczos_yuv420p_yuv444p.nv.constant2.Subsample_Lanczos_yuv420p_yuv444p.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p.rel.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p.text.Subsample_Lanczos_yuv444p16le_nv12_uv.nv.info.Subsample_Lanczos_yuv444p16le_nv12_uv.nv.shared.Subsample_Lanczos_yuv444p16le_nv12_uv.nv.constant2.Subsample_Lanczos_yuv444p16le_nv12_uv.nv.constant0.Subsample_Lanczos_yuv444p16le_nv12_uv.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_nv12_uv.text.Subsample_Lanczos_yuv444p16le_nv12.nv.info.Subsample_Lanczos_yuv444p16le_nv12.nv.shared.Subsample_Lanczos_yuv444p16le_nv12.nv.constant2.Subsample_Lanczos_yuv444p16le_nv12.nv.constant0.Subsample_Lanczos_yuv444p16le_nv12.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_nv12.text.Subsample_Lanczos_p016le_nv12_uv.nv.info.Subsample_Lanczos_p016le_nv12_uv.nv.shared.Subsample_Lanczos_p016le_nv12_uv.nv.constant2.Subsample_Lanczos_p016le_nv12_uv.nv.constant0.Subsample_Lanczos_p016le_nv12_uv.rel.nv.constant0.Subsample_Lanczos_p016le_nv12_uv.text.Subsample_Lanczos_p016le_nv12.nv.info.Subsample_Lanczos_p016le_nv12.nv.shared.Subsample_Lanczos_p016le_nv12.nv.constant2.Subsample_Lanczos_p016le_nv12.nv.constant0.Subsample_Lanczos_p016le_nv12.rel.nv.constant0.Subsample_Lanczos_p016le_nv12.text.Subsample_Lanczos_p010le_nv12_uv.nv.info.Subsample_Lanczos_p010le_nv12_uv.nv.shared.Subsample_Lanczos_p010le_nv12_uv.nv.constant2.Subsample_Lanczos_p010le_nv12_uv.nv.constant0.Subsample_Lanczos_p010le_nv12_uv.rel.nv.constant0.Subsample_Lanczos_p010le_nv12_uv.text.Subsample_Lanczos_p010le_nv12.nv.info.Subsample_Lanczos_p010le_nv12.nv.shared.Subsample_Lanczos_p010le_nv12.nv.constant2.Subsample_Lanczos_p010le_nv12.nv.constant0.Subsample_Lanczos_p010le_nv12.rel.nv.constant0.Subsample_Lanczos_p010le_nv12.text.Subsample_Lanczos_yuv444p_nv12_uv.nv.info.Subsample_Lanczos_yuv444p_nv12_uv.nv.shared.Subsample_Lanczos_yuv444p_nv12_uv.nv.constant2.Subsample_Lanczos_yuv444p_nv12_uv.nv.constant0.Subsample_Lanczos_yuv444p_nv12_uv.rel.nv.constant0.Subsample_Lanczos_yuv444p_nv12_uv.text.Subsample_Lanczos_yuv444p_nv12.nv.info.Subsample_Lanczos_yuv444p_nv12.nv.shared.Subsample_Lanczos_yuv444p_nv12.nv.constant2.Subsample_Lanczos_yuv444p_nv12.nv.constant0.Subsample_Lanczos_yuv444p_nv12.rel.nv.constant0.Subsample_Lanczos_yuv444p_nv12.text.Subsample_Lanczos_nv12_nv12_uv.nv.info.Subsample_Lanczos_nv12_nv12_uv.nv.shared.Subsample_Lanczos_nv12_nv12_uv.nv.constant2.Subsample_Lanczos_nv12_nv12_uv.nv.constant0.Subsample_Lanczos_nv12_nv12_uv.rel.nv.constant0.Subsample_Lanczos_nv12_nv12_uv.text.Subsample_Lanczos_nv12_nv12.nv.info.Subsample_Lanczos_nv12_nv12.nv.shared.Subsample_Lanczos_nv12_nv12.nv.constant2.Subsample_Lanczos_nv12_nv12.nv.constant0.Subsample_Lanczos_nv12_nv12.rel.nv.constant0.Subsample_Lanczos_nv12_nv12.text.Subsample_Lanczos_yuv420p_nv12_uv.nv.info.Subsample_Lanczos_yuv420p_nv12_uv.nv.shared.Subsample_Lanczos_yuv420p_nv12_uv.nv.constant2.Subsample_Lanczos_yuv420p_nv12_uv.nv.constant0.Subsample_Lanczos_yuv420p_nv12_uv.rel.nv.constant0.Subsample_Lanczos_yuv420p_nv12_uv.text.Subsample_Lanczos_yuv420p_nv12.nv.info.Subsample_Lanczos_yuv420p_nv12.nv.shared.Subsample_Lanczos_yuv420p_nv12.nv.constant2.Subsample_Lanczos_yuv420p_nv12.nv.constant0.Subsample_Lanczos_yuv420p_nv12.rel.nv.constant0.Subsample_Lanczos_yuv420p_nv12.text.Subsample_Lanczos_yuv444p16le_yuv420p_uv.nv.info.Subsample_Lanczos_yuv444p16le_yuv420p_uv.nv.shared.Subsample_Lanczos_yuv444p16le_yuv420p_uv.nv.constant2.Subsample_Lanczos_yuv444p16le_yuv420p_uv.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv420p_uv.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv420p_uv.text.Subsample_Lanczos_yuv444p16le_yuv420p.nv.info.Subsample_Lanczos_yuv444p16le_yuv420p.nv.shared.Subsample_Lanczos_yuv444p16le_yuv420p.nv.constant2.Subsample_Lanczos_yuv444p16le_yuv420p.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv420p.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv420p.text.Subsample_Lanczos_p016le_yuv420p_uv.nv.info.Subsample_Lanczos_p016le_yuv420p_uv.nv.shared.Subsample_Lanczos_p016le_yuv420p_uv.nv.constant2.Subsample_Lanczos_p016le_yuv420p_uv.nv.constant0.Subsample_Lanczos_p016le_yuv420p_uv.rel.nv.constant0.Subsample_Lanczos_p016le_yuv420p_uv.text.Subsample_Lanczos_p016le_yuv420p.nv.info.Subsample_Lanczos_p016le_yuv420p.nv.shared.Subsample_Lanczos_p016le_yuv420p.nv.constant2.Subsample_Lanczos_p016le_yuv420p.nv.constant0.Subsample_Lanczos_p016le_yuv420p.rel.nv.constant0.Subsample_Lanczos_p016le_yuv420p.text.Subsample_Lanczos_p010le_yuv420p_uv.nv.info.Subsample_Lanczos_p010le_yuv420p_uv.nv.shared.Subsample_Lanczos_p010le_yuv420p_uv.nv.constant2.Subsample_Lanczos_p010le_yuv420p_uv.nv.constant0.Subsample_Lanczos_p010le_yuv420p_uv.rel.nv.constant0.Subsample_Lanczos_p010le_yuv420p_uv.text.Subsample_Lanczos_p010le_yuv420p.nv.info.Subsample_Lanczos_p010le_yuv420p.nv.shared.Subsample_Lanczos_p010le_yuv420p.nv.constant2.Subsample_Lanczos_p010le_yuv420p.nv.constant0.Subsample_Lanczos_p010le_yuv420p.rel.nv.constant0.Subsample_Lanczos_p010le_yuv420p.text.Subsample_Lanczos_yuv444p_yuv420p_uv.nv.info.Subsample_Lanczos_yuv444p_yuv420p_uv.nv.shared.Subsample_Lanczos_yuv444p_yuv420p_uv.nv.constant2.Subsample_Lanczos_yuv444p_yuv420p_uv.nv.constant0.Subsample_Lanczos_yuv444p_yuv420p_uv.rel.nv.constant0.Subsample_Lanczos_yuv444p_yuv420p_uv.text.Subsample_Lanczos_yuv444p_yuv420p.nv.info.Subsample_Lanczos_yuv444p_yuv420p.nv.shared.Subsample_Lanczos_yuv444p_yuv420p.nv.constant2.Subsample_Lanczos_yuv444p_yuv420p.nv.constant0.Subsample_Lanczos_yuv444p_yuv420p.rel.nv.constant0.Subsample_Lanczos_yuv444p_yuv420p.text.Subsample_Lanczos_nv12_yuv420p_uv.nv.info.Subsample_Lanczos_nv12_yuv420p_uv.nv.shared.Subsample_Lanczos_nv12_yuv420p_uv.nv.constant2.Subsample_Lanczos_nv12_yuv420p_uv.nv.constant0.Subsample_Lanczos_nv12_yuv420p_uv.rel.nv.constant0.Subsample_Lanczos_nv12_yuv420p_uv.text.Subsample_Lanczos_nv12_yuv420p.nv.info.Subsample_Lanczos_nv12_yuv420p.nv.shared.Subsample_Lanczos_nv12_yuv420p.nv.constant2.Subsample_Lanczos_nv12_yuv420p.nv.constant0.Subsample_Lanczos_nv12_yuv420p.rel.nv.constant0.Subsample_Lanczos_nv12_yuv420p.text.Subsample_Lanczos_yuv420p_yuv420p_uv.nv.info.Subsample_Lanczos_yuv420p_yuv420p_uv.nv.shared.Subsample_Lanczos_yuv420p_yuv420p_uv.nv.constant2.Subsample_Lanczos_yuv420p_yuv420p_uv.nv.constant0.Subsample_Lanczos_yuv420p_yuv420p_uv.rel.nv.constant0.Subsample_Lanczos_yuv420p_yuv420p_uv.text.Subsample_Lanczos_yuv420p_yuv420p.nv.info.Subsample_Lanczos_yuv420p_yuv420p.nv.shared.Subsample_Lanczos_yuv420p_yuv420p.nv.constant2.Subsample_Lanczos_yuv420p_yuv420p.nv.constant0.Subsample_Lanczos_yuv420p_yuv420p.rel.nv.constant0.Subsample_Lanczos_yuv420p_yuv420p.text.Subsample_Bicubic_rgb0_bgr0_uv.nv.info.Subsample_Bicubic_rgb0_bgr0_uv.nv.shared.Subsample_Bicubic_rgb0_bgr0_uv.nv.constant0.Subsample_Bicubic_rgb0_bgr0_uv.rel.nv.constant0.Subsample_Bicubic_rgb0_bgr0_uv.text.Subsample_Bicubic_rgb0_bgr0.nv.info.Subsample_Bicubic_rgb0_bgr0.nv.shared.Subsample_Bicubic_rgb0_bgr0.nv.constant2.Subsample_Bicubic_rgb0_bgr0.nv.constant0.Subsample_Bicubic_rgb0_bgr0.rel.nv.constant0.Subsample_Bicubic_rgb0_bgr0.text.Subsample_Bicubic_bgr0_rgb0_uv.nv.info.Subsample_Bicubic_bgr0_rgb0_uv.nv.shared.Subsample_Bicubic_bgr0_rgb0_uv.nv.constant0.Subsample_Bicubic_bgr0_rgb0_uv.rel.nv.constant0.Subsample_Bicubic_bgr0_rgb0_uv.text.Subsample_Bicubic_bgr0_rgb0.nv.info.Subsample_Bicubic_bgr0_rgb0.nv.shared.Subsample_Bicubic_bgr0_rgb0.nv.constant2.Subsample_Bicubic_bgr0_rgb0.nv.constant0.Subsample_Bicubic_bgr0_rgb0.rel.nv.constant0.Subsample_Bicubic_bgr0_rgb0.text.Subsample_Bicubic_rgb0_rgb0_uv.nv.info.Subsample_Bicubic_rgb0_rgb0_uv.nv.shared.Subsample_Bicubic_rgb0_rgb0_uv.nv.constant0.Subsample_Bicubic_rgb0_rgb0_uv.rel.nv.constant0.Subsample_Bicubic_rgb0_rgb0_uv.text.Subsample_Bicubic_rgb0_rgb0.nv.info.Subsample_Bicubic_rgb0_rgb0.nv.shared.Subsample_Bicubic_rgb0_rgb0.nv.constant2.Subsample_Bicubic_rgb0_rgb0.nv.constant0.Subsample_Bicubic_rgb0_rgb0.rel.nv.constant0.Subsample_Bicubic_rgb0_rgb0.text.Subsample_Bicubic_bgr0_bgr0_uv.nv.info.Subsample_Bicubic_bgr0_bgr0_uv.nv.shared.Subsample_Bicubic_bgr0_bgr0_uv.nv.constant0.Subsample_Bicubic_bgr0_bgr0_uv.rel.nv.constant0.Subsample_Bicubic_bgr0_bgr0_uv.text.Subsample_Bicubic_bgr0_bgr0.nv.info.Subsample_Bicubic_bgr0_bgr0.nv.shared.Subsample_Bicubic_bgr0_bgr0.nv.constant2.Subsample_Bicubic_bgr0_bgr0.nv.constant0.Subsample_Bicubic_bgr0_bgr0.rel.nv.constant0.Subsample_Bicubic_bgr0_bgr0.text.Subsample_Bicubic_yuv444p16le_yuv444p16le_uv.nv.info.Subsample_Bicubic_yuv444p16le_yuv444p16le_uv.nv.shared.Subsample_Bicubic_yuv444p16le_yuv444p16le_uv.nv.constant2.Subsample_Bicubic_yuv444p16le_yuv444p16le_uv.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p16le_uv.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p16le_uv.text.Subsample_Bicubic_yuv444p16le_yuv444p16le.nv.info.Subsample_Bicubic_yuv444p16le_yuv444p16le.nv.shared.Subsample_Bicubic_yuv444p16le_yuv444p16le.nv.constant2.Subsample_Bicubic_yuv444p16le_yuv444p16le.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p16le.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p16le.text.Subsample_Bicubic_p016le_yuv444p16le_uv.nv.info.Subsample_Bicubic_p016le_yuv444p16le_uv.nv.shared.Subsample_Bicubic_p016le_yuv444p16le_uv.nv.constant2.Subsample_Bicubic_p016le_yuv444p16le_uv.nv.constant0.Subsample_Bicubic_p016le_yuv444p16le_uv.rel.nv.constant0.Subsample_Bicubic_p016le_yuv444p16le_uv.text.Subsample_Bicubic_p016le_yuv444p16le.nv.info.Subsample_Bicubic_p016le_yuv444p16le.nv.shared.Subsample_Bicubic_p016le_yuv444p16le.nv.constant2.Subsample_Bicubic_p016le_yuv444p16le.nv.constant0.Subsample_Bicubic_p016le_yuv444p16le.rel.nv.constant0.Subsample_Bicubic_p016le_yuv444p16le.text.Subsample_Bicubic_p010le_yuv444p16le_uv.nv.info.Subsample_Bicubic_p010le_yuv444p16le_uv.nv.shared.Subsample_Bicubic_p010le_yuv444p16le_uv.nv.constant2.Subsample_Bicubic_p010le_yuv444p16le_uv.nv.constant0.Subsample_Bicubic_p010le_yuv444p16le_uv.rel.nv.constant0.Subsample_Bicubic_p010le_yuv444p16le_uv.text.Subsample_Bicubic_p010le_yuv444p16le.nv.info.Subsample_Bicubic_p010le_yuv444p16le.nv.shared.Subsample_Bicubic_p010le_yuv444p16le.nv.constant2.Subsample_Bicubic_p010le_yuv444p16le.nv.constant0.Subsample_Bicubic_p010le_yuv444p16le.rel.nv.constant0.Subsample_Bicubic_p010le_yuv444p16le.text.Subsample_Bicubic_yuv444p_yuv444p16le_uv.nv.info.Subsample_Bicubic_yuv444p_yuv444p16le_uv.nv.shared.Subsample_Bicubic_yuv444p_yuv444p16le_uv.nv.constant2.Subsample_Bicubic_yuv444p_yuv444p16le_uv.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p16le_uv.rel.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p16le_uv.text.Subsample_Bicubic_yuv444p_yuv444p16le.nv.info.Subsample_Bicubic_yuv444p_yuv444p16le.nv.shared.Subsample_Bicubic_yuv444p_yuv444p16le.nv.constant2.Subsample_Bicubic_yuv444p_yuv444p16le.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p16le.rel.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p16le.text.Subsample_Bicubic_nv12_yuv444p16le_uv.nv.info.Subsample_Bicubic_nv12_yuv444p16le_uv.nv.shared.Subsample_Bicubic_nv12_yuv444p16le_uv.nv.constant2.Subsample_Bicubic_nv12_yuv444p16le_uv.nv.constant0.Subsample_Bicubic_nv12_yuv444p16le_uv.rel.nv.constant0.Subsample_Bicubic_nv12_yuv444p16le_uv.text.Subsample_Bicubic_nv12_yuv444p16le.nv.info.Subsample_Bicubic_nv12_yuv444p16le.nv.shared.Subsample_Bicubic_nv12_yuv444p16le.nv.constant2.Subsample_Bicubic_nv12_yuv444p16le.nv.constant0.Subsample_Bicubic_nv12_yuv444p16le.rel.nv.constant0.Subsample_Bicubic_nv12_yuv444p16le.text.Subsample_Bicubic_yuv420p_yuv444p16le_uv.nv.info.Subsample_Bicubic_yuv420p_yuv444p16le_uv.nv.shared.Subsample_Bicubic_yuv420p_yuv444p16le_uv.nv.constant2.Subsample_Bicubic_yuv420p_yuv444p16le_uv.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p16le_uv.rel.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p16le_uv.text.Subsample_Bicubic_yuv420p_yuv444p16le.nv.info.Subsample_Bicubic_yuv420p_yuv444p16le.nv.shared.Subsample_Bicubic_yuv420p_yuv444p16le.nv.constant2.Subsample_Bicubic_yuv420p_yuv444p16le.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p16le.rel.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p16le.text.Subsample_Bicubic_yuv444p16le_p016le_uv.nv.info.Subsample_Bicubic_yuv444p16le_p016le_uv.nv.shared.Subsample_Bicubic_yuv444p16le_p016le_uv.nv.constant2.Subsample_Bicubic_yuv444p16le_p016le_uv.nv.constant0.Subsample_Bicubic_yuv444p16le_p016le_uv.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_p016le_uv.text.Subsample_Bicubic_yuv444p16le_p016le.nv.info.Subsample_Bicubic_yuv444p16le_p016le.nv.shared.Subsample_Bicubic_yuv444p16le_p016le.nv.constant2.Subsample_Bicubic_yuv444p16le_p016le.nv.constant0.Subsample_Bicubic_yuv444p16le_p016le.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_p016le.text.Subsample_Bicubic_p016le_p016le_uv.nv.info.Subsample_Bicubic_p016le_p016le_uv.nv.shared.Subsample_Bicubic_p016le_p016le_uv.nv.constant2.Subsample_Bicubic_p016le_p016le_uv.nv.constant0.Subsample_Bicubic_p016le_p016le_uv.rel.nv.constant0.Subsample_Bicubic_p016le_p016le_uv.text.Subsample_Bicubic_p016le_p016le.nv.info.Subsample_Bicubic_p016le_p016le.nv.shared.Subsample_Bicubic_p016le_p016le.nv.constant2.Subsample_Bicubic_p016le_p016le.nv.constant0.Subsample_Bicubic_p016le_p016le.rel.nv.constant0.Subsample_Bicubic_p016le_p016le.text.Subsample_Bicubic_p010le_p016le_uv.nv.info.Subsample_Bicubic_p010le_p016le_uv.nv.shared.Subsample_Bicubic_p010le_p016le_uv.nv.constant2.Subsample_Bicubic_p010le_p016le_uv.nv.constant0.Subsample_Bicubic_p010le_p016le_uv.rel.nv.constant0.Subsample_Bicubic_p010le_p016le_uv.text.Subsample_Bicubic_p010le_p016le.nv.info.Subsample_Bicubic_p010le_p016le.nv.shared.Subsample_Bicubic_p010le_p016le.nv.constant2.Subsample_Bicubic_p010le_p016le.nv.constant0.Subsample_Bicubic_p010le_p016le.rel.nv.constant0.Subsample_Bicubic_p010le_p016le.text.Subsample_Bicubic_yuv444p_p016le_uv.nv.info.Subsample_Bicubic_yuv444p_p016le_uv.nv.shared.Subsample_Bicubic_yuv444p_p016le_uv.nv.constant2.Subsample_Bicubic_yuv444p_p016le_uv.nv.constant0.Subsample_Bicubic_yuv444p_p016le_uv.rel.nv.constant0.Subsample_Bicubic_yuv444p_p016le_uv.text.Subsample_Bicubic_yuv444p_p016le.nv.info.Subsample_Bicubic_yuv444p_p016le.nv.shared.Subsample_Bicubic_yuv444p_p016le.nv.constant2.Subsample_Bicubic_yuv444p_p016le.nv.constant0.Subsample_Bicubic_yuv444p_p016le.rel.nv.constant0.Subsample_Bicubic_yuv444p_p016le.text.Subsample_Bicubic_nv12_p016le_uv.nv.info.Subsample_Bicubic_nv12_p016le_uv.nv.shared.Subsample_Bicubic_nv12_p016le_uv.nv.constant2.Subsample_Bicubic_nv12_p016le_uv.nv.constant0.Subsample_Bicubic_nv12_p016le_uv.rel.nv.constant0.Subsample_Bicubic_nv12_p016le_uv.text.Subsample_Bicubic_nv12_p016le.nv.info.Subsample_Bicubic_nv12_p016le.nv.shared.Subsample_Bicubic_nv12_p016le.nv.constant2.Subsample_Bicubic_nv12_p016le.nv.constant0.Subsample_Bicubic_nv12_p016le.rel.nv.constant0.Subsample_Bicubic_nv12_p016le.text.Subsample_Bicubic_yuv420p_p016le_uv.nv.info.Subsample_Bicubic_yuv420p_p016le_uv.nv.shared.Subsample_Bicubic_yuv420p_p016le_uv.nv.constant2.Subsample_Bicubic_yuv420p_p016le_uv.nv.constant0.Subsample_Bicubic_yuv420p_p016le_uv.rel.nv.constant0.Subsample_Bicubic_yuv420p_p016le_uv.text.Subsample_Bicubic_yuv420p_p016le.nv.info.Subsample_Bicubic_yuv420p_p016le.nv.shared.Subsample_Bicubic_yuv420p_p016le.nv.constant2.Subsample_Bicubic_yuv420p_p016le.nv.constant0.Subsample_Bicubic_yuv420p_p016le.rel.nv.constant0.Subsample_Bicubic_yuv420p_p016le.text.Subsample_Bicubic_yuv444p16le_p010le_uv.nv.info.Subsample_Bicubic_yuv444p16le_p010le_uv.nv.shared.Subsample_Bicubic_yuv444p16le_p010le_uv.nv.constant2.Subsample_Bicubic_yuv444p16le_p010le_uv.nv.constant0.Subsample_Bicubic_yuv444p16le_p010le_uv.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_p010le_uv.text.Subsample_Bicubic_yuv444p16le_p010le.nv.info.Subsample_Bicubic_yuv444p16le_p010le.nv.shared.Subsample_Bicubic_yuv444p16le_p010le.nv.constant2.Subsample_Bicubic_yuv444p16le_p010le.nv.constant0.Subsample_Bicubic_yuv444p16le_p010le.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_p010le.text.Subsample_Bicubic_p016le_p010le_uv.nv.info.Subsample_Bicubic_p016le_p010le_uv.nv.shared.Subsample_Bicubic_p016le_p010le_uv.nv.constant2.Subsample_Bicubic_p016le_p010le_uv.nv.constant0.Subsample_Bicubic_p016le_p010le_uv.rel.nv.constant0.Subsample_Bicubic_p016le_p010le_uv.text.Subsample_Bicubic_p016le_p010le.nv.info.Subsample_Bicubic_p016le_p010le.nv.shared.Subsample_Bicubic_p016le_p010le.nv.constant2.Subsample_Bicubic_p016le_p010le.nv.constant0.Subsample_Bicubic_p016le_p010le.rel.nv.constant0.Subsample_Bicubic_p016le_p010le.text.Subsample_Bicubic_p010le_p010le_uv.nv.info.Subsample_Bicubic_p010le_p010le_uv.nv.shared.Subsample_Bicubic_p010le_p010le_uv.nv.constant2.Subsample_Bicubic_p010le_p010le_uv.nv.constant0.Subsample_Bicubic_p010le_p010le_uv.rel.nv.constant0.Subsample_Bicubic_p010le_p010le_uv.text.Subsample_Bicubic_p010le_p010le.nv.info.Subsample_Bicubic_p010le_p010le.nv.shared.Subsample_Bicubic_p010le_p010le.nv.constant2.Subsample_Bicubic_p010le_p010le.nv.constant0.Subsample_Bicubic_p010le_p010le.rel.nv.constant0.Subsample_Bicubic_p010le_p010le.text.Subsample_Bicubic_yuv444p_p010le_uv.nv.info.Subsample_Bicubic_yuv444p_p010le_uv.nv.shared.Subsample_Bicubic_yuv444p_p010le_uv.nv.constant2.Subsample_Bicubic_yuv444p_p010le_uv.nv.constant0.Subsample_Bicubic_yuv444p_p010le_uv.rel.nv.constant0.Subsample_Bicubic_yuv444p_p010le_uv.text.Subsample_Bicubic_yuv444p_p010le.nv.info.Subsample_Bicubic_yuv444p_p010le.nv.shared.Subsample_Bicubic_yuv444p_p010le.nv.constant2.Subsample_Bicubic_yuv444p_p010le.nv.constant0.Subsample_Bicubic_yuv444p_p010le.rel.nv.constant0.Subsample_Bicubic_yuv444p_p010le.text.Subsample_Bicubic_nv12_p010le_uv.nv.info.Subsample_Bicubic_nv12_p010le_uv.nv.shared.Subsample_Bicubic_nv12_p010le_uv.nv.constant2.Subsample_Bicubic_nv12_p010le_uv.nv.constant0.Subsample_Bicubic_nv12_p010le_uv.rel.nv.constant0.Subsample_Bicubic_nv12_p010le_uv.text.Subsample_Bicubic_nv12_p010le.nv.info.Subsample_Bicubic_nv12_p010le.nv.shared.Subsample_Bicubic_nv12_p010le.nv.constant2.Subsample_Bicubic_nv12_p010le.nv.constant0.Subsample_Bicubic_nv12_p010le.rel.nv.constant0.Subsample_Bicubic_nv12_p010le.text.Subsample_Bicubic_yuv420p_p010le_uv.nv.info.Subsample_Bicubic_yuv420p_p010le_uv.nv.shared.Subsample_Bicubic_yuv420p_p010le_uv.nv.constant2.Subsample_Bicubic_yuv420p_p010le_uv.nv.constant0.Subsample_Bicubic_yuv420p_p010le_uv.rel.nv.constant0.Subsample_Bicubic_yuv420p_p010le_uv.text.Subsample_Bicubic_yuv420p_p010le.nv.info.Subsample_Bicubic_yuv420p_p010le.nv.shared.Subsample_Bicubic_yuv420p_p010le.nv.constant2.Subsample_Bicubic_yuv420p_p010le.nv.constant0.Subsample_Bicubic_yuv420p_p010le.rel.nv.constant0.Subsample_Bicubic_yuv420p_p010le.text.Subsample_Bicubic_yuv444p16le_yuv444p_uv.nv.info.Subsample_Bicubic_yuv444p16le_yuv444p_uv.nv.shared.Subsample_Bicubic_yuv444p16le_yuv444p_uv.nv.constant2.Subsample_Bicubic_yuv444p16le_yuv444p_uv.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p_uv.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p_uv.text.Subsample_Bicubic_yuv444p16le_yuv444p.nv.info.Subsample_Bicubic_yuv444p16le_yuv444p.nv.shared.Subsample_Bicubic_yuv444p16le_yuv444p.nv.constant2.Subsample_Bicubic_yuv444p16le_yuv444p.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p.text.Subsample_Bicubic_p016le_yuv444p_uv.nv.info.Subsample_Bicubic_p016le_yuv444p_uv.nv.shared.Subsample_Bicubic_p016le_yuv444p_uv.nv.constant2.Subsample_Bicubic_p016le_yuv444p_uv.nv.constant0.Subsample_Bicubic_p016le_yuv444p_uv.rel.nv.constant0.Subsample_Bicubic_p016le_yuv444p_uv.text.Subsample_Bicubic_p016le_yuv444p.nv.info.Subsample_Bicubic_p016le_yuv444p.nv.shared.Subsample_Bicubic_p016le_yuv444p.nv.constant2.Subsample_Bicubic_p016le_yuv444p.nv.constant0.Subsample_Bicubic_p016le_yuv444p.rel.nv.constant0.Subsample_Bicubic_p016le_yuv444p.text.Subsample_Bicubic_p010le_yuv444p_uv.nv.info.Subsample_Bicubic_p010le_yuv444p_uv.nv.shared.Subsample_Bicubic_p010le_yuv444p_uv.nv.constant2.Subsample_Bicubic_p010le_yuv444p_uv.nv.constant0.Subsample_Bicubic_p010le_yuv444p_uv.rel.nv.constant0.Subsample_Bicubic_p010le_yuv444p_uv.text.Subsample_Bicubic_p010le_yuv444p.nv.info.Subsample_Bicubic_p010le_yuv444p.nv.shared.Subsample_Bicubic_p010le_yuv444p.nv.constant2.Subsample_Bicubic_p010le_yuv444p.nv.constant0.Subsample_Bicubic_p010le_yuv444p.rel.nv.constant0.Subsample_Bicubic_p010le_yuv444p.text.Subsample_Bicubic_yuv444p_yuv444p_uv.nv.info.Subsample_Bicubic_yuv444p_yuv444p_uv.nv.shared.Subsample_Bicubic_yuv444p_yuv444p_uv.nv.constant2.Subsample_Bicubic_yuv444p_yuv444p_uv.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p_uv.rel.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p_uv.text.Subsample_Bicubic_yuv444p_yuv444p.nv.info.Subsample_Bicubic_yuv444p_yuv444p.nv.shared.Subsample_Bicubic_yuv444p_yuv444p.nv.constant2.Subsample_Bicubic_yuv444p_yuv444p.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p.rel.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p.text.Subsample_Bicubic_nv12_yuv444p_uv.nv.info.Subsample_Bicubic_nv12_yuv444p_uv.nv.shared.Subsample_Bicubic_nv12_yuv444p_uv.nv.constant2.Subsample_Bicubic_nv12_yuv444p_uv.nv.constant0.Subsample_Bicubic_nv12_yuv444p_uv.rel.nv.constant0.Subsample_Bicubic_nv12_yuv444p_uv.text.Subsample_Bicubic_nv12_yuv444p.nv.info.Subsample_Bicubic_nv12_yuv444p.nv.shared.Subsample_Bicubic_nv12_yuv444p.nv.constant2.Subsample_Bicubic_nv12_yuv444p.nv.constant0.Subsample_Bicubic_nv12_yuv444p.rel.nv.constant0.Subsample_Bicubic_nv12_yuv444p.text.Subsample_Bicubic_yuv420p_yuv444p_uv.nv.info.Subsample_Bicubic_yuv420p_yuv444p_uv.nv.shared.Subsample_Bicubic_yuv420p_yuv444p_uv.nv.constant2.Subsample_Bicubic_yuv420p_yuv444p_uv.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p_uv.rel.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p_uv.text.Subsample_Bicubic_yuv420p_yuv444p.nv.info.Subsample_Bicubic_yuv420p_yuv444p.nv.shared.Subsample_Bicubic_yuv420p_yuv444p.nv.constant2.Subsample_Bicubic_yuv420p_yuv444p.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p.rel.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p.text.Subsample_Bicubic_yuv444p16le_nv12_uv.nv.info.Subsample_Bicubic_yuv444p16le_nv12_uv.nv.shared.Subsample_Bicubic_yuv444p16le_nv12_uv.nv.constant2.Subsample_Bicubic_yuv444p16le_nv12_uv.nv.constant0.Subsample_Bicubic_yuv444p16le_nv12_uv.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_nv12_uv.text.Subsample_Bicubic_yuv444p16le_nv12.nv.info.Subsample_Bicubic_yuv444p16le_nv12.nv.shared.Subsample_Bicubic_yuv444p16le_nv12.nv.constant2.Subsample_Bicubic_yuv444p16le_nv12.nv.constant0.Subsample_Bicubic_yuv444p16le_nv12.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_nv12.text.Subsample_Bicubic_p016le_nv12_uv.nv.info.Subsample_Bicubic_p016le_nv12_uv.nv.shared.Subsample_Bicubic_p016le_nv12_uv.nv.constant2.Subsample_Bicubic_p016le_nv12_uv.nv.constant0.Subsample_Bicubic_p016le_nv12_uv.rel.nv.constant0.Subsample_Bicubic_p016le_nv12_uv.text.Subsample_Bicubic_p016le_nv12.nv.info.Subsample_Bicubic_p016le_nv12.nv.shared.Subsample_Bicubic_p016le_nv12.nv.constant2.Subsample_Bicubic_p016le_nv12.nv.constant0.Subsample_Bicubic_p016le_nv12.rel.nv.constant0.Subsample_Bicubic_p016le_nv12.text.Subsample_Bicubic_p010le_nv12_uv.nv.info.Subsample_Bicubic_p010le_nv12_uv.nv.shared.Subsample_Bicubic_p010le_nv12_uv.nv.constant2.Subsample_Bicubic_p010le_nv12_uv.nv.constant0.Subsample_Bicubic_p010le_nv12_uv.rel.nv.constant0.Subsample_Bicubic_p010le_nv12_uv.text.Subsample_Bicubic_p010le_nv12.nv.info.Subsample_Bicubic_p010le_nv12.nv.shared.Subsample_Bicubic_p010le_nv12.nv.constant2.Subsample_Bicubic_p010le_nv12.nv.constant0.Subsample_Bicubic_p010le_nv12.rel.nv.constant0.Subsample_Bicubic_p010le_nv12.text.Subsample_Bicubic_yuv444p_nv12_uv.nv.info.Subsample_Bicubic_yuv444p_nv12_uv.nv.shared.Subsample_Bicubic_yuv444p_nv12_uv.nv.constant2.Subsample_Bicubic_yuv444p_nv12_uv.nv.constant0.Subsample_Bicubic_yuv444p_nv12_uv.rel.nv.constant0.Subsample_Bicubic_yuv444p_nv12_uv.text.Subsample_Bicubic_yuv444p_nv12.nv.info.Subsample_Bicubic_yuv444p_nv12.nv.shared.Subsample_Bicubic_yuv444p_nv12.nv.constant2.Subsample_Bicubic_yuv444p_nv12.nv.constant0.Subsample_Bicubic_yuv444p_nv12.rel.nv.constant0.Subsample_Bicubic_yuv444p_nv12.text.Subsample_Bicubic_nv12_nv12_uv.nv.info.Subsample_Bicubic_nv12_nv12_uv.nv.shared.Subsample_Bicubic_nv12_nv12_uv.nv.constant2.Subsample_Bicubic_nv12_nv12_uv.nv.constant0.Subsample_Bicubic_nv12_nv12_uv.rel.nv.constant0.Subsample_Bicubic_nv12_nv12_uv.text.Subsample_Bicubic_nv12_nv12.nv.info.Subsample_Bicubic_nv12_nv12.nv.shared.Subsample_Bicubic_nv12_nv12.nv.constant2.Subsample_Bicubic_nv12_nv12.nv.constant0.Subsample_Bicubic_nv12_nv12.rel.nv.constant0.Subsample_Bicubic_nv12_nv12.text.Subsample_Bicubic_yuv420p_nv12_uv.nv.info.Subsample_Bicubic_yuv420p_nv12_uv.nv.shared.Subsample_Bicubic_yuv420p_nv12_uv.nv.constant2.Subsample_Bicubic_yuv420p_nv12_uv.nv.constant0.Subsample_Bicubic_yuv420p_nv12_uv.rel.nv.constant0.Subsample_Bicubic_yuv420p_nv12_uv.text.Subsample_Bicubic_yuv420p_nv12.nv.info.Subsample_Bicubic_yuv420p_nv12.nv.shared.Subsample_Bicubic_yuv420p_nv12.nv.constant2.Subsample_Bicubic_yuv420p_nv12.nv.constant0.Subsample_Bicubic_yuv420p_nv12.rel.nv.constant0.Subsample_Bicubic_yuv420p_nv12.text.Subsample_Bicubic_yuv444p16le_yuv420p_uv.nv.info.Subsample_Bicubic_yuv444p16le_yuv420p_uv.nv.shared.Subsample_Bicubic_yuv444p16le_yuv420p_uv.nv.constant2.Subsample_Bicubic_yuv444p16le_yuv420p_uv.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv420p_uv.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv420p_uv.text.Subsample_Bicubic_yuv444p16le_yuv420p.nv.info.Subsample_Bicubic_yuv444p16le_yuv420p.nv.shared.Subsample_Bicubic_yuv444p16le_yuv420p.nv.constant2.Subsample_Bicubic_yuv444p16le_yuv420p.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv420p.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv420p.text.Subsample_Bicubic_p016le_yuv420p_uv.nv.info.Subsample_Bicubic_p016le_yuv420p_uv.nv.shared.Subsample_Bicubic_p016le_yuv420p_uv.nv.constant2.Subsample_Bicubic_p016le_yuv420p_uv.nv.constant0.Subsample_Bicubic_p016le_yuv420p_uv.rel.nv.constant0.Subsample_Bicubic_p016le_yuv420p_uv.text.Subsample_Bicubic_p016le_yuv420p.nv.info.Subsample_Bicubic_p016le_yuv420p.nv.shared.Subsample_Bicubic_p016le_yuv420p.nv.constant2.Subsample_Bicubic_p016le_yuv420p.nv.constant0.Subsample_Bicubic_p016le_yuv420p.rel.nv.constant0.Subsample_Bicubic_p016le_yuv420p.text.Subsample_Bicubic_p010le_yuv420p_uv.nv.info.Subsample_Bicubic_p010le_yuv420p_uv.nv.shared.Subsample_Bicubic_p010le_yuv420p_uv.nv.constant2.Subsample_Bicubic_p010le_yuv420p_uv.nv.constant0.Subsample_Bicubic_p010le_yuv420p_uv.rel.nv.constant0.Subsample_Bicubic_p010le_yuv420p_uv.text.Subsample_Bicubic_p010le_yuv420p.nv.info.Subsample_Bicubic_p010le_yuv420p.nv.shared.Subsample_Bicubic_p010le_yuv420p.nv.constant2.Subsample_Bicubic_p010le_yuv420p.nv.constant0.Subsample_Bicubic_p010le_yuv420p.rel.nv.constant0.Subsample_Bicubic_p010le_yuv420p.text.Subsample_Bicubic_yuv444p_yuv420p_uv.nv.info.Subsample_Bicubic_yuv444p_yuv420p_uv.nv.shared.Subsample_Bicubic_yuv444p_yuv420p_uv.nv.constant2.Subsample_Bicubic_yuv444p_yuv420p_uv.nv.constant0.Subsample_Bicubic_yuv444p_yuv420p_uv.rel.nv.constant0.Subsample_Bicubic_yuv444p_yuv420p_uv.text.Subsample_Bicubic_yuv444p_yuv420p.nv.info.Subsample_Bicubic_yuv444p_yuv420p.nv.shared.Subsample_Bicubic_yuv444p_yuv420p.nv.constant2.Subsample_Bicubic_yuv444p_yuv420p.nv.constant0.Subsample_Bicubic_yuv444p_yuv420p.rel.nv.constant0.Subsample_Bicubic_yuv444p_yuv420p.text.Subsample_Bicubic_nv12_yuv420p_uv.nv.info.Subsample_Bicubic_nv12_yuv420p_uv.nv.shared.Subsample_Bicubic_nv12_yuv420p_uv.nv.constant2.Subsample_Bicubic_nv12_yuv420p_uv.nv.constant0.Subsample_Bicubic_nv12_yuv420p_uv.rel.nv.constant0.Subsample_Bicubic_nv12_yuv420p_uv.text.Subsample_Bicubic_nv12_yuv420p.nv.info.Subsample_Bicubic_nv12_yuv420p.nv.shared.Subsample_Bicubic_nv12_yuv420p.nv.constant2.Subsample_Bicubic_nv12_yuv420p.nv.constant0.Subsample_Bicubic_nv12_yuv420p.rel.nv.constant0.Subsample_Bicubic_nv12_yuv420p.text.Subsample_Bicubic_yuv420p_yuv420p_uv.nv.info.Subsample_Bicubic_yuv420p_yuv420p_uv.nv.shared.Subsample_Bicubic_yuv420p_yuv420p_uv.nv.constant2.Subsample_Bicubic_yuv420p_yuv420p_uv.nv.constant0.Subsample_Bicubic_yuv420p_yuv420p_uv.rel.nv.constant0.Subsample_Bicubic_yuv420p_yuv420p_uv.text.Subsample_Bicubic_yuv420p_yuv420p.nv.info.Subsample_Bicubic_yuv420p_yuv420p.nv.shared.Subsample_Bicubic_yuv420p_yuv420p.nv.constant2.Subsample_Bicubic_yuv420p_yuv420p.nv.constant0.Subsample_Bicubic_yuv420p_yuv420p.rel.nv.constant0.Subsample_Bicubic_yuv420p_yuv420p.text.Subsample_Bilinear_rgb0_bgr0_uv.nv.info.Subsample_Bilinear_rgb0_bgr0_uv.nv.shared.Subsample_Bilinear_rgb0_bgr0_uv.nv.constant0.Subsample_Bilinear_rgb0_bgr0_uv.rel.nv.constant0.Subsample_Bilinear_rgb0_bgr0_uv.text.Subsample_Bilinear_rgb0_bgr0.nv.info.Subsample_Bilinear_rgb0_bgr0.nv.shared.Subsample_Bilinear_rgb0_bgr0.nv.constant2.Subsample_Bilinear_rgb0_bgr0.nv.constant0.Subsample_Bilinear_rgb0_bgr0.rel.nv.constant0.Subsample_Bilinear_rgb0_bgr0.text.Subsample_Bilinear_bgr0_rgb0_uv.nv.info.Subsample_Bilinear_bgr0_rgb0_uv.nv.shared.Subsample_Bilinear_bgr0_rgb0_uv.nv.constant0.Subsample_Bilinear_bgr0_rgb0_uv.rel.nv.constant0.Subsample_Bilinear_bgr0_rgb0_uv.text.Subsample_Bilinear_bgr0_rgb0.nv.info.Subsample_Bilinear_bgr0_rgb0.nv.shared.Subsample_Bilinear_bgr0_rgb0.nv.constant2.Subsample_Bilinear_bgr0_rgb0.nv.constant0.Subsample_Bilinear_bgr0_rgb0.rel.nv.constant0.Subsample_Bilinear_bgr0_rgb0.text.Subsample_Bilinear_rgb0_rgb0_uv.nv.info.Subsample_Bilinear_rgb0_rgb0_uv.nv.shared.Subsample_Bilinear_rgb0_rgb0_uv.nv.constant0.Subsample_Bilinear_rgb0_rgb0_uv.rel.nv.constant0.Subsample_Bilinear_rgb0_rgb0_uv.text.Subsample_Bilinear_rgb0_rgb0.nv.info.Subsample_Bilinear_rgb0_rgb0.nv.shared.Subsample_Bilinear_rgb0_rgb0.nv.constant2.Subsample_Bilinear_rgb0_rgb0.nv.constant0.Subsample_Bilinear_rgb0_rgb0.rel.nv.constant0.Subsample_Bilinear_rgb0_rgb0.text.Subsample_Bilinear_bgr0_bgr0_uv.nv.info.Subsample_Bilinear_bgr0_bgr0_uv.nv.shared.Subsample_Bilinear_bgr0_bgr0_uv.nv.constant0.Subsample_Bilinear_bgr0_bgr0_uv.rel.nv.constant0.Subsample_Bilinear_bgr0_bgr0_uv.text.Subsample_Bilinear_bgr0_bgr0.nv.info.Subsample_Bilinear_bgr0_bgr0.nv.shared.Subsample_Bilinear_bgr0_bgr0.nv.constant2.Subsample_Bilinear_bgr0_bgr0.nv.constant0.Subsample_Bilinear_bgr0_bgr0.rel.nv.constant0.Subsample_Bilinear_bgr0_bgr0.text.Subsample_Bilinear_yuv444p16le_yuv444p16le_uv.nv.info.Subsample_Bilinear_yuv444p16le_yuv444p16le_uv.nv.shared.Subsample_Bilinear_yuv444p16le_yuv444p16le_uv.nv.constant2.Subsample_Bilinear_yuv444p16le_yuv444p16le_uv.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p16le_uv.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p16le_uv.text.Subsample_Bilinear_yuv444p16le_yuv444p16le.nv.info.Subsample_Bilinear_yuv444p16le_yuv444p16le.nv.shared.Subsample_Bilinear_yuv444p16le_yuv444p16le.nv.constant2.Subsample_Bilinear_yuv444p16le_yuv444p16le.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p16le.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p16le.text.Subsample_Bilinear_p016le_yuv444p16le_uv.nv.info.Subsample_Bilinear_p016le_yuv444p16le_uv.nv.shared.Subsample_Bilinear_p016le_yuv444p16le_uv.nv.constant2.Subsample_Bilinear_p016le_yuv444p16le_uv.nv.constant0.Subsample_Bilinear_p016le_yuv444p16le_uv.rel.nv.constant0.Subsample_Bilinear_p016le_yuv444p16le_uv.text.Subsample_Bilinear_p016le_yuv444p16le.nv.info.Subsample_Bilinear_p016le_yuv444p16le.nv.shared.Subsample_Bilinear_p016le_yuv444p16le.nv.constant2.Subsample_Bilinear_p016le_yuv444p16le.nv.constant0.Subsample_Bilinear_p016le_yuv444p16le.rel.nv.constant0.Subsample_Bilinear_p016le_yuv444p16le.text.Subsample_Bilinear_p010le_yuv444p16le_uv.nv.info.Subsample_Bilinear_p010le_yuv444p16le_uv.nv.shared.Subsample_Bilinear_p010le_yuv444p16le_uv.nv.constant2.Subsample_Bilinear_p010le_yuv444p16le_uv.nv.constant0.Subsample_Bilinear_p010le_yuv444p16le_uv.rel.nv.constant0.Subsample_Bilinear_p010le_yuv444p16le_uv.text.Subsample_Bilinear_p010le_yuv444p16le.nv.info.Subsample_Bilinear_p010le_yuv444p16le.nv.shared.Subsample_Bilinear_p010le_yuv444p16le.nv.constant2.Subsample_Bilinear_p010le_yuv444p16le.nv.constant0.Subsample_Bilinear_p010le_yuv444p16le.rel.nv.constant0.Subsample_Bilinear_p010le_yuv444p16le.text.Subsample_Bilinear_yuv444p_yuv444p16le_uv.nv.info.Subsample_Bilinear_yuv444p_yuv444p16le_uv.nv.shared.Subsample_Bilinear_yuv444p_yuv444p16le_uv.nv.constant2.Subsample_Bilinear_yuv444p_yuv444p16le_uv.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p16le_uv.rel.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p16le_uv.text.Subsample_Bilinear_yuv444p_yuv444p16le.nv.info.Subsample_Bilinear_yuv444p_yuv444p16le.nv.shared.Subsample_Bilinear_yuv444p_yuv444p16le.nv.constant2.Subsample_Bilinear_yuv444p_yuv444p16le.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p16le.rel.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p16le.text.Subsample_Bilinear_nv12_yuv444p16le_uv.nv.info.Subsample_Bilinear_nv12_yuv444p16le_uv.nv.shared.Subsample_Bilinear_nv12_yuv444p16le_uv.nv.constant2.Subsample_Bilinear_nv12_yuv444p16le_uv.nv.constant0.Subsample_Bilinear_nv12_yuv444p16le_uv.rel.nv.constant0.Subsample_Bilinear_nv12_yuv444p16le_uv.text.Subsample_Bilinear_nv12_yuv444p16le.nv.info.Subsample_Bilinear_nv12_yuv444p16le.nv.shared.Subsample_Bilinear_nv12_yuv444p16le.nv.constant2.Subsample_Bilinear_nv12_yuv444p16le.nv.constant0.Subsample_Bilinear_nv12_yuv444p16le.rel.nv.constant0.Subsample_Bilinear_nv12_yuv444p16le.text.Subsample_Bilinear_yuv420p_yuv444p16le_uv.nv.info.Subsample_Bilinear_yuv420p_yuv444p16le_uv.nv.shared.Subsample_Bilinear_yuv420p_yuv444p16le_uv.nv.constant2.Subsample_Bilinear_yuv420p_yuv444p16le_uv.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p16le_uv.rel.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p16le_uv.text.Subsample_Bilinear_yuv420p_yuv444p16le.nv.info.Subsample_Bilinear_yuv420p_yuv444p16le.nv.shared.Subsample_Bilinear_yuv420p_yuv444p16le.nv.constant2.Subsample_Bilinear_yuv420p_yuv444p16le.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p16le.rel.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p16le.text.Subsample_Bilinear_yuv444p16le_p016le_uv.nv.info.Subsample_Bilinear_yuv444p16le_p016le_uv.nv.shared.Subsample_Bilinear_yuv444p16le_p016le_uv.nv.constant2.Subsample_Bilinear_yuv444p16le_p016le_uv.nv.constant0.Subsample_Bilinear_yuv444p16le_p016le_uv.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_p016le_uv.text.Subsample_Bilinear_yuv444p16le_p016le.nv.info.Subsample_Bilinear_yuv444p16le_p016le.nv.shared.Subsample_Bilinear_yuv444p16le_p016le.nv.constant2.Subsample_Bilinear_yuv444p16le_p016le.nv.constant0.Subsample_Bilinear_yuv444p16le_p016le.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_p016le.text.Subsample_Bilinear_p016le_p016le_uv.nv.info.Subsample_Bilinear_p016le_p016le_uv.nv.shared.Subsample_Bilinear_p016le_p016le_uv.nv.constant2.Subsample_Bilinear_p016le_p016le_uv.nv.constant0.Subsample_Bilinear_p016le_p016le_uv.rel.nv.constant0.Subsample_Bilinear_p016le_p016le_uv.text.Subsample_Bilinear_p016le_p016le.nv.info.Subsample_Bilinear_p016le_p016le.nv.shared.Subsample_Bilinear_p016le_p016le.nv.constant2.Subsample_Bilinear_p016le_p016le.nv.constant0.Subsample_Bilinear_p016le_p016le.rel.nv.constant0.Subsample_Bilinear_p016le_p016le.text.Subsample_Bilinear_p010le_p016le_uv.nv.info.Subsample_Bilinear_p010le_p016le_uv.nv.shared.Subsample_Bilinear_p010le_p016le_uv.nv.constant2.Subsample_Bilinear_p010le_p016le_uv.nv.constant0.Subsample_Bilinear_p010le_p016le_uv.rel.nv.constant0.Subsample_Bilinear_p010le_p016le_uv.text.Subsample_Bilinear_p010le_p016le.nv.info.Subsample_Bilinear_p010le_p016le.nv.shared.Subsample_Bilinear_p010le_p016le.nv.constant2.Subsample_Bilinear_p010le_p016le.nv.constant0.Subsample_Bilinear_p010le_p016le.rel.nv.constant0.Subsample_Bilinear_p010le_p016le.text.Subsample_Bilinear_yuv444p_p016le_uv.nv.info.Subsample_Bilinear_yuv444p_p016le_uv.nv.shared.Subsample_Bilinear_yuv444p_p016le_uv.nv.constant2.Subsample_Bilinear_yuv444p_p016le_uv.nv.constant0.Subsample_Bilinear_yuv444p_p016le_uv.rel.nv.constant0.Subsample_Bilinear_yuv444p_p016le_uv.text.Subsample_Bilinear_yuv444p_p016le.nv.info.Subsample_Bilinear_yuv444p_p016le.nv.shared.Subsample_Bilinear_yuv444p_p016le.nv.constant2.Subsample_Bilinear_yuv444p_p016le.nv.constant0.Subsample_Bilinear_yuv444p_p016le.rel.nv.constant0.Subsample_Bilinear_yuv444p_p016le.text.Subsample_Bilinear_nv12_p016le_uv.nv.info.Subsample_Bilinear_nv12_p016le_uv.nv.shared.Subsample_Bilinear_nv12_p016le_uv.nv.constant2.Subsample_Bilinear_nv12_p016le_uv.nv.constant0.Subsample_Bilinear_nv12_p016le_uv.rel.nv.constant0.Subsample_Bilinear_nv12_p016le_uv.text.Subsample_Bilinear_nv12_p016le.nv.info.Subsample_Bilinear_nv12_p016le.nv.shared.Subsample_Bilinear_nv12_p016le.nv.constant2.Subsample_Bilinear_nv12_p016le.nv.constant0.Subsample_Bilinear_nv12_p016le.rel.nv.constant0.Subsample_Bilinear_nv12_p016le.text.Subsample_Bilinear_yuv420p_p016le_uv.nv.info.Subsample_Bilinear_yuv420p_p016le_uv.nv.shared.Subsample_Bilinear_yuv420p_p016le_uv.nv.constant2.Subsample_Bilinear_yuv420p_p016le_uv.nv.constant0.Subsample_Bilinear_yuv420p_p016le_uv.rel.nv.constant0.Subsample_Bilinear_yuv420p_p016le_uv.text.Subsample_Bilinear_yuv420p_p016le.nv.info.Subsample_Bilinear_yuv420p_p016le.nv.shared.Subsample_Bilinear_yuv420p_p016le.nv.constant2.Subsample_Bilinear_yuv420p_p016le.nv.constant0.Subsample_Bilinear_yuv420p_p016le.rel.nv.constant0.Subsample_Bilinear_yuv420p_p016le.text.Subsample_Bilinear_yuv444p16le_p010le_uv.nv.info.Subsample_Bilinear_yuv444p16le_p010le_uv.nv.shared.Subsample_Bilinear_yuv444p16le_p010le_uv.nv.constant2.Subsample_Bilinear_yuv444p16le_p010le_uv.nv.constant0.Subsample_Bilinear_yuv444p16le_p010le_uv.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_p010le_uv.text.Subsample_Bilinear_yuv444p16le_p010le.nv.info.Subsample_Bilinear_yuv444p16le_p010le.nv.shared.Subsample_Bilinear_yuv444p16le_p010le.nv.constant2.Subsample_Bilinear_yuv444p16le_p010le.nv.constant0.Subsample_Bilinear_yuv444p16le_p010le.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_p010le.text.Subsample_Bilinear_p016le_p010le_uv.nv.info.Subsample_Bilinear_p016le_p010le_uv.nv.shared.Subsample_Bilinear_p016le_p010le_uv.nv.constant2.Subsample_Bilinear_p016le_p010le_uv.nv.constant0.Subsample_Bilinear_p016le_p010le_uv.rel.nv.constant0.Subsample_Bilinear_p016le_p010le_uv.text.Subsample_Bilinear_p016le_p010le.nv.info.Subsample_Bilinear_p016le_p010le.nv.shared.Subsample_Bilinear_p016le_p010le.nv.constant2.Subsample_Bilinear_p016le_p010le.nv.constant0.Subsample_Bilinear_p016le_p010le.rel.nv.constant0.Subsample_Bilinear_p016le_p010le.text.Subsample_Bilinear_p010le_p010le_uv.nv.info.Subsample_Bilinear_p010le_p010le_uv.nv.shared.Subsample_Bilinear_p010le_p010le_uv.nv.constant2.Subsample_Bilinear_p010le_p010le_uv.nv.constant0.Subsample_Bilinear_p010le_p010le_uv.rel.nv.constant0.Subsample_Bilinear_p010le_p010le_uv.text.Subsample_Bilinear_p010le_p010le.nv.info.Subsample_Bilinear_p010le_p010le.nv.shared.Subsample_Bilinear_p010le_p010le.nv.constant2.Subsample_Bilinear_p010le_p010le.nv.constant0.Subsample_Bilinear_p010le_p010le.rel.nv.constant0.Subsample_Bilinear_p010le_p010le.text.Subsample_Bilinear_yuv444p_p010le_uv.nv.info.Subsample_Bilinear_yuv444p_p010le_uv.nv.shared.Subsample_Bilinear_yuv444p_p010le_uv.nv.constant2.Subsample_Bilinear_yuv444p_p010le_uv.nv.constant0.Subsample_Bilinear_yuv444p_p010le_uv.rel.nv.constant0.Subsample_Bilinear_yuv444p_p010le_uv.text.Subsample_Bilinear_yuv444p_p010le.nv.info.Subsample_Bilinear_yuv444p_p010le.nv.shared.Subsample_Bilinear_yuv444p_p010le.nv.constant2.Subsample_Bilinear_yuv444p_p010le.nv.constant0.Subsample_Bilinear_yuv444p_p010le.rel.nv.constant0.Subsample_Bilinear_yuv444p_p010le.text.Subsample_Bilinear_nv12_p010le_uv.nv.info.Subsample_Bilinear_nv12_p010le_uv.nv.shared.Subsample_Bilinear_nv12_p010le_uv.nv.constant2.Subsample_Bilinear_nv12_p010le_uv.nv.constant0.Subsample_Bilinear_nv12_p010le_uv.rel.nv.constant0.Subsample_Bilinear_nv12_p010le_uv.text.Subsample_Bilinear_nv12_p010le.nv.info.Subsample_Bilinear_nv12_p010le.nv.shared.Subsample_Bilinear_nv12_p010le.nv.constant2.Subsample_Bilinear_nv12_p010le.nv.constant0.Subsample_Bilinear_nv12_p010le.rel.nv.constant0.Subsample_Bilinear_nv12_p010le.text.Subsample_Bilinear_yuv420p_p010le_uv.nv.info.Subsample_Bilinear_yuv420p_p010le_uv.nv.shared.Subsample_Bilinear_yuv420p_p010le_uv.nv.constant2.Subsample_Bilinear_yuv420p_p010le_uv.nv.constant0.Subsample_Bilinear_yuv420p_p010le_uv.rel.nv.constant0.Subsample_Bilinear_yuv420p_p010le_uv.text.Subsample_Bilinear_yuv420p_p010le.nv.info.Subsample_Bilinear_yuv420p_p010le.nv.shared.Subsample_Bilinear_yuv420p_p010le.nv.constant2.Subsample_Bilinear_yuv420p_p010le.nv.constant0.Subsample_Bilinear_yuv420p_p010le.rel.nv.constant0.Subsample_Bilinear_yuv420p_p010le.text.Subsample_Bilinear_yuv444p16le_yuv444p_uv.nv.info.Subsample_Bilinear_yuv444p16le_yuv444p_uv.nv.shared.Subsample_Bilinear_yuv444p16le_yuv444p_uv.nv.constant2.Subsample_Bilinear_yuv444p16le_yuv444p_uv.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p_uv.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p_uv.text.Subsample_Bilinear_yuv444p16le_yuv444p.nv.info.Subsample_Bilinear_yuv444p16le_yuv444p.nv.shared.Subsample_Bilinear_yuv444p16le_yuv444p.nv.constant2.Subsample_Bilinear_yuv444p16le_yuv444p.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p.text.Subsample_Bilinear_p016le_yuv444p_uv.nv.info.Subsample_Bilinear_p016le_yuv444p_uv.nv.shared.Subsample_Bilinear_p016le_yuv444p_uv.nv.constant2.Subsample_Bilinear_p016le_yuv444p_uv.nv.constant0.Subsample_Bilinear_p016le_yuv444p_uv.rel.nv.constant0.Subsample_Bilinear_p016le_yuv444p_uv.text.Subsample_Bilinear_p016le_yuv444p.nv.info.Subsample_Bilinear_p016le_yuv444p.nv.shared.Subsample_Bilinear_p016le_yuv444p.nv.constant2.Subsample_Bilinear_p016le_yuv444p.nv.constant0.Subsample_Bilinear_p016le_yuv444p.rel.nv.constant0.Subsample_Bilinear_p016le_yuv444p.text.Subsample_Bilinear_p010le_yuv444p_uv.nv.info.Subsample_Bilinear_p010le_yuv444p_uv.nv.shared.Subsample_Bilinear_p010le_yuv444p_uv.nv.constant2.Subsample_Bilinear_p010le_yuv444p_uv.nv.constant0.Subsample_Bilinear_p010le_yuv444p_uv.rel.nv.constant0.Subsample_Bilinear_p010le_yuv444p_uv.text.Subsample_Bilinear_p010le_yuv444p.nv.info.Subsample_Bilinear_p010le_yuv444p.nv.shared.Subsample_Bilinear_p010le_yuv444p.nv.constant2.Subsample_Bilinear_p010le_yuv444p.nv.constant0.Subsample_Bilinear_p010le_yuv444p.rel.nv.constant0.Subsample_Bilinear_p010le_yuv444p.text.Subsample_Bilinear_yuv444p_yuv444p_uv.nv.info.Subsample_Bilinear_yuv444p_yuv444p_uv.nv.shared.Subsample_Bilinear_yuv444p_yuv444p_uv.nv.constant2.Subsample_Bilinear_yuv444p_yuv444p_uv.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p_uv.rel.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p_uv.text.Subsample_Bilinear_yuv444p_yuv444p.nv.info.Subsample_Bilinear_yuv444p_yuv444p.nv.shared.Subsample_Bilinear_yuv444p_yuv444p.nv.constant2.Subsample_Bilinear_yuv444p_yuv444p.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p.rel.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p.text.Subsample_Bilinear_nv12_yuv444p_uv.nv.info.Subsample_Bilinear_nv12_yuv444p_uv.nv.shared.Subsample_Bilinear_nv12_yuv444p_uv.nv.constant2.Subsample_Bilinear_nv12_yuv444p_uv.nv.constant0.Subsample_Bilinear_nv12_yuv444p_uv.rel.nv.constant0.Subsample_Bilinear_nv12_yuv444p_uv.text.Subsample_Bilinear_nv12_yuv444p.nv.info.Subsample_Bilinear_nv12_yuv444p.nv.shared.Subsample_Bilinear_nv12_yuv444p.nv.constant2.Subsample_Bilinear_nv12_yuv444p.nv.constant0.Subsample_Bilinear_nv12_yuv444p.rel.nv.constant0.Subsample_Bilinear_nv12_yuv444p.text.Subsample_Bilinear_yuv420p_yuv444p_uv.nv.info.Subsample_Bilinear_yuv420p_yuv444p_uv.nv.shared.Subsample_Bilinear_yuv420p_yuv444p_uv.nv.constant2.Subsample_Bilinear_yuv420p_yuv444p_uv.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p_uv.rel.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p_uv.text.Subsample_Bilinear_yuv420p_yuv444p.nv.info.Subsample_Bilinear_yuv420p_yuv444p.nv.shared.Subsample_Bilinear_yuv420p_yuv444p.nv.constant2.Subsample_Bilinear_yuv420p_yuv444p.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p.rel.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p.text.Subsample_Bilinear_yuv444p16le_nv12_uv.nv.info.Subsample_Bilinear_yuv444p16le_nv12_uv.nv.shared.Subsample_Bilinear_yuv444p16le_nv12_uv.nv.constant2.Subsample_Bilinear_yuv444p16le_nv12_uv.nv.constant0.Subsample_Bilinear_yuv444p16le_nv12_uv.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_nv12_uv.text.Subsample_Bilinear_yuv444p16le_nv12.nv.info.Subsample_Bilinear_yuv444p16le_nv12.nv.shared.Subsample_Bilinear_yuv444p16le_nv12.nv.constant2.Subsample_Bilinear_yuv444p16le_nv12.nv.constant0.Subsample_Bilinear_yuv444p16le_nv12.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_nv12.text.Subsample_Bilinear_p016le_nv12_uv.nv.info.Subsample_Bilinear_p016le_nv12_uv.nv.shared.Subsample_Bilinear_p016le_nv12_uv.nv.constant2.Subsample_Bilinear_p016le_nv12_uv.nv.constant0.Subsample_Bilinear_p016le_nv12_uv.rel.nv.constant0.Subsample_Bilinear_p016le_nv12_uv.text.Subsample_Bilinear_p016le_nv12.nv.info.Subsample_Bilinear_p016le_nv12.nv.shared.Subsample_Bilinear_p016le_nv12.nv.constant2.Subsample_Bilinear_p016le_nv12.nv.constant0.Subsample_Bilinear_p016le_nv12.rel.nv.constant0.Subsample_Bilinear_p016le_nv12.text.Subsample_Bilinear_p010le_nv12_uv.nv.info.Subsample_Bilinear_p010le_nv12_uv.nv.shared.Subsample_Bilinear_p010le_nv12_uv.nv.constant2.Subsample_Bilinear_p010le_nv12_uv.nv.constant0.Subsample_Bilinear_p010le_nv12_uv.rel.nv.constant0.Subsample_Bilinear_p010le_nv12_uv.text.Subsample_Bilinear_p010le_nv12.nv.info.Subsample_Bilinear_p010le_nv12.nv.shared.Subsample_Bilinear_p010le_nv12.nv.constant2.Subsample_Bilinear_p010le_nv12.nv.constant0.Subsample_Bilinear_p010le_nv12.rel.nv.constant0.Subsample_Bilinear_p010le_nv12.text.Subsample_Bilinear_yuv444p_nv12_uv.nv.info.Subsample_Bilinear_yuv444p_nv12_uv.nv.shared.Subsample_Bilinear_yuv444p_nv12_uv.nv.constant2.Subsample_Bilinear_yuv444p_nv12_uv.nv.constant0.Subsample_Bilinear_yuv444p_nv12_uv.rel.nv.constant0.Subsample_Bilinear_yuv444p_nv12_uv.text.Subsample_Bilinear_yuv444p_nv12.nv.info.Subsample_Bilinear_yuv444p_nv12.nv.shared.Subsample_Bilinear_yuv444p_nv12.nv.constant2.Subsample_Bilinear_yuv444p_nv12.nv.constant0.Subsample_Bilinear_yuv444p_nv12.rel.nv.constant0.Subsample_Bilinear_yuv444p_nv12.text.Subsample_Bilinear_nv12_nv12_uv.nv.info.Subsample_Bilinear_nv12_nv12_uv.nv.shared.Subsample_Bilinear_nv12_nv12_uv.nv.constant2.Subsample_Bilinear_nv12_nv12_uv.nv.constant0.Subsample_Bilinear_nv12_nv12_uv.rel.nv.constant0.Subsample_Bilinear_nv12_nv12_uv.text.Subsample_Bilinear_nv12_nv12.nv.info.Subsample_Bilinear_nv12_nv12.nv.shared.Subsample_Bilinear_nv12_nv12.nv.constant2.Subsample_Bilinear_nv12_nv12.nv.constant0.Subsample_Bilinear_nv12_nv12.rel.nv.constant0.Subsample_Bilinear_nv12_nv12.text.Subsample_Bilinear_yuv420p_nv12_uv.nv.info.Subsample_Bilinear_yuv420p_nv12_uv.nv.shared.Subsample_Bilinear_yuv420p_nv12_uv.nv.constant2.Subsample_Bilinear_yuv420p_nv12_uv.nv.constant0.Subsample_Bilinear_yuv420p_nv12_uv.rel.nv.constant0.Subsample_Bilinear_yuv420p_nv12_uv.text.Subsample_Bilinear_yuv420p_nv12.nv.info.Subsample_Bilinear_yuv420p_nv12.nv.shared.Subsample_Bilinear_yuv420p_nv12.nv.constant2.Subsample_Bilinear_yuv420p_nv12.nv.constant0.Subsample_Bilinear_yuv420p_nv12.rel.nv.constant0.Subsample_Bilinear_yuv420p_nv12.text.Subsample_Bilinear_yuv444p16le_yuv420p_uv.nv.info.Subsample_Bilinear_yuv444p16le_yuv420p_uv.nv.shared.Subsample_Bilinear_yuv444p16le_yuv420p_uv.nv.constant2.Subsample_Bilinear_yuv444p16le_yuv420p_uv.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv420p_uv.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv420p_uv.text.Subsample_Bilinear_yuv444p16le_yuv420p.nv.info.Subsample_Bilinear_yuv444p16le_yuv420p.nv.shared.Subsample_Bilinear_yuv444p16le_yuv420p.nv.constant2.Subsample_Bilinear_yuv444p16le_yuv420p.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv420p.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv420p.text.Subsample_Bilinear_p016le_yuv420p_uv.nv.info.Subsample_Bilinear_p016le_yuv420p_uv.nv.shared.Subsample_Bilinear_p016le_yuv420p_uv.nv.constant2.Subsample_Bilinear_p016le_yuv420p_uv.nv.constant0.Subsample_Bilinear_p016le_yuv420p_uv.rel.nv.constant0.Subsample_Bilinear_p016le_yuv420p_uv.text.Subsample_Bilinear_p016le_yuv420p.nv.info.Subsample_Bilinear_p016le_yuv420p.nv.shared.Subsample_Bilinear_p016le_yuv420p.nv.constant2.Subsample_Bilinear_p016le_yuv420p.nv.constant0.Subsample_Bilinear_p016le_yuv420p.rel.nv.constant0.Subsample_Bilinear_p016le_yuv420p.text.Subsample_Bilinear_p010le_yuv420p_uv.nv.info.Subsample_Bilinear_p010le_yuv420p_uv.nv.shared.Subsample_Bilinear_p010le_yuv420p_uv.nv.constant2.Subsample_Bilinear_p010le_yuv420p_uv.nv.constant0.Subsample_Bilinear_p010le_yuv420p_uv.rel.nv.constant0.Subsample_Bilinear_p010le_yuv420p_uv.text.Subsample_Bilinear_p010le_yuv420p.nv.info.Subsample_Bilinear_p010le_yuv420p.nv.shared.Subsample_Bilinear_p010le_yuv420p.nv.constant2.Subsample_Bilinear_p010le_yuv420p.nv.constant0.Subsample_Bilinear_p010le_yuv420p.rel.nv.constant0.Subsample_Bilinear_p010le_yuv420p.text.Subsample_Bilinear_yuv444p_yuv420p_uv.nv.info.Subsample_Bilinear_yuv444p_yuv420p_uv.nv.shared.Subsample_Bilinear_yuv444p_yuv420p_uv.nv.constant2.Subsample_Bilinear_yuv444p_yuv420p_uv.nv.constant0.Subsample_Bilinear_yuv444p_yuv420p_uv.rel.nv.constant0.Subsample_Bilinear_yuv444p_yuv420p_uv.text.Subsample_Bilinear_yuv444p_yuv420p.nv.info.Subsample_Bilinear_yuv444p_yuv420p.nv.shared.Subsample_Bilinear_yuv444p_yuv420p.nv.constant2.Subsample_Bilinear_yuv444p_yuv420p.nv.constant0.Subsample_Bilinear_yuv444p_yuv420p.rel.nv.constant0.Subsample_Bilinear_yuv444p_yuv420p.text.Subsample_Bilinear_nv12_yuv420p_uv.nv.info.Subsample_Bilinear_nv12_yuv420p_uv.nv.shared.Subsample_Bilinear_nv12_yuv420p_uv.nv.constant2.Subsample_Bilinear_nv12_yuv420p_uv.nv.constant0.Subsample_Bilinear_nv12_yuv420p_uv.rel.nv.constant0.Subsample_Bilinear_nv12_yuv420p_uv.text.Subsample_Bilinear_nv12_yuv420p.nv.info.Subsample_Bilinear_nv12_yuv420p.nv.shared.Subsample_Bilinear_nv12_yuv420p.nv.constant2.Subsample_Bilinear_nv12_yuv420p.nv.constant0.Subsample_Bilinear_nv12_yuv420p.rel.nv.constant0.Subsample_Bilinear_nv12_yuv420p.text.Subsample_Bilinear_yuv420p_yuv420p_uv.nv.info.Subsample_Bilinear_yuv420p_yuv420p_uv.nv.shared.Subsample_Bilinear_yuv420p_yuv420p_uv.nv.constant2.Subsample_Bilinear_yuv420p_yuv420p_uv.nv.constant0.Subsample_Bilinear_yuv420p_yuv420p_uv.rel.nv.constant0.Subsample_Bilinear_yuv420p_yuv420p_uv.text.Subsample_Bilinear_yuv420p_yuv420p.nv.info.Subsample_Bilinear_yuv420p_yuv420p.nv.shared.Subsample_Bilinear_yuv420p_yuv420p.nv.constant2.Subsample_Bilinear_yuv420p_yuv420p.nv.constant0.Subsample_Bilinear_yuv420p_yuv420p.rel.nv.constant0.Subsample_Bilinear_yuv420p_yuv420p.text.Subsample_Nearest_rgb0_bgr0_uv.nv.info.Subsample_Nearest_rgb0_bgr0_uv.nv.shared.Subsample_Nearest_rgb0_bgr0_uv.nv.constant0.Subsample_Nearest_rgb0_bgr0_uv.rel.nv.constant0.Subsample_Nearest_rgb0_bgr0_uv.text.Subsample_Nearest_rgb0_bgr0.nv.info.Subsample_Nearest_rgb0_bgr0.nv.shared.Subsample_Nearest_rgb0_bgr0.nv.constant2.Subsample_Nearest_rgb0_bgr0.nv.constant0.Subsample_Nearest_rgb0_bgr0.rel.nv.constant0.Subsample_Nearest_rgb0_bgr0.text.Subsample_Nearest_bgr0_rgb0_uv.nv.info.Subsample_Nearest_bgr0_rgb0_uv.nv.shared.Subsample_Nearest_bgr0_rgb0_uv.nv.constant0.Subsample_Nearest_bgr0_rgb0_uv.rel.nv.constant0.Subsample_Nearest_bgr0_rgb0_uv.text.Subsample_Nearest_bgr0_rgb0.nv.info.Subsample_Nearest_bgr0_rgb0.nv.shared.Subsample_Nearest_bgr0_rgb0.nv.constant2.Subsample_Nearest_bgr0_rgb0.nv.constant0.Subsample_Nearest_bgr0_rgb0.rel.nv.constant0.Subsample_Nearest_bgr0_rgb0.text.Subsample_Nearest_rgb0_rgb0_uv.nv.info.Subsample_Nearest_rgb0_rgb0_uv.nv.shared.Subsample_Nearest_rgb0_rgb0_uv.nv.constant0.Subsample_Nearest_rgb0_rgb0_uv.rel.nv.constant0.Subsample_Nearest_rgb0_rgb0_uv.text.Subsample_Nearest_rgb0_rgb0.nv.info.Subsample_Nearest_rgb0_rgb0.nv.shared.Subsample_Nearest_rgb0_rgb0.nv.constant2.Subsample_Nearest_rgb0_rgb0.nv.constant0.Subsample_Nearest_rgb0_rgb0.rel.nv.constant0.Subsample_Nearest_rgb0_rgb0.text.Subsample_Nearest_bgr0_bgr0_uv.nv.info.Subsample_Nearest_bgr0_bgr0_uv.nv.shared.Subsample_Nearest_bgr0_bgr0_uv.nv.constant0.Subsample_Nearest_bgr0_bgr0_uv.rel.nv.constant0.Subsample_Nearest_bgr0_bgr0_uv.text.Subsample_Nearest_bgr0_bgr0.nv.info.Subsample_Nearest_bgr0_bgr0.nv.shared.Subsample_Nearest_bgr0_bgr0.nv.constant2.Subsample_Nearest_bgr0_bgr0.nv.constant0.Subsample_Nearest_bgr0_bgr0.rel.nv.constant0.Subsample_Nearest_bgr0_bgr0.text.Subsample_Nearest_yuv444p16le_yuv444p16le_uv.nv.info.Subsample_Nearest_yuv444p16le_yuv444p16le_uv.nv.shared.Subsample_Nearest_yuv444p16le_yuv444p16le_uv.nv.constant2.Subsample_Nearest_yuv444p16le_yuv444p16le_uv.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p16le_uv.rel.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p16le_uv.text.Subsample_Nearest_yuv444p16le_yuv444p16le.nv.info.Subsample_Nearest_yuv444p16le_yuv444p16le.nv.shared.Subsample_Nearest_yuv444p16le_yuv444p16le.nv.constant2.Subsample_Nearest_yuv444p16le_yuv444p16le.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p16le.rel.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p16le.text.Subsample_Nearest_p016le_yuv444p16le_uv.nv.info.Subsample_Nearest_p016le_yuv444p16le_uv.nv.shared.Subsample_Nearest_p016le_yuv444p16le_uv.nv.constant2.Subsample_Nearest_p016le_yuv444p16le_uv.nv.constant0.Subsample_Nearest_p016le_yuv444p16le_uv.rel.nv.constant0.Subsample_Nearest_p016le_yuv444p16le_uv.text.Subsample_Nearest_p016le_yuv444p16le.nv.info.Subsample_Nearest_p016le_yuv444p16le.nv.shared.Subsample_Nearest_p016le_yuv444p16le.nv.constant2.Subsample_Nearest_p016le_yuv444p16le.nv.constant0.Subsample_Nearest_p016le_yuv444p16le.rel.nv.constant0.Subsample_Nearest_p016le_yuv444p16le.text.Subsample_Nearest_p010le_yuv444p16le_uv.nv.info.Subsample_Nearest_p010le_yuv444p16le_uv.nv.shared.Subsample_Nearest_p010le_yuv444p16le_uv.nv.constant2.Subsample_Nearest_p010le_yuv444p16le_uv.nv.constant0.Subsample_Nearest_p010le_yuv444p16le_uv.rel.nv.constant0.Subsample_Nearest_p010le_yuv444p16le_uv.text.Subsample_Nearest_p010le_yuv444p16le.nv.info.Subsample_Nearest_p010le_yuv444p16le.nv.shared.Subsample_Nearest_p010le_yuv444p16le.nv.constant2.Subsample_Nearest_p010le_yuv444p16le.nv.constant0.Subsample_Nearest_p010le_yuv444p16le.rel.nv.constant0.Subsample_Nearest_p010le_yuv444p16le.text.Subsample_Nearest_yuv444p_yuv444p16le_uv.nv.info.Subsample_Nearest_yuv444p_yuv444p16le_uv.nv.shared.Subsample_Nearest_yuv444p_yuv444p16le_uv.nv.constant2.Subsample_Nearest_yuv444p_yuv444p16le_uv.nv.constant0.Subsample_Nearest_yuv444p_yuv444p16le_uv.rel.nv.constant0.Subsample_Nearest_yuv444p_yuv444p16le_uv.text.Subsample_Nearest_yuv444p_yuv444p16le.nv.info.Subsample_Nearest_yuv444p_yuv444p16le.nv.shared.Subsample_Nearest_yuv444p_yuv444p16le.nv.constant2.Subsample_Nearest_yuv444p_yuv444p16le.nv.constant0.Subsample_Nearest_yuv444p_yuv444p16le.rel.nv.constant0.Subsample_Nearest_yuv444p_yuv444p16le.text.Subsample_Nearest_nv12_yuv444p16le_uv.nv.info.Subsample_Nearest_nv12_yuv444p16le_uv.nv.shared.Subsample_Nearest_nv12_yuv444p16le_uv.nv.constant2.Subsample_Nearest_nv12_yuv444p16le_uv.nv.constant0.Subsample_Nearest_nv12_yuv444p16le_uv.rel.nv.constant0.Subsample_Nearest_nv12_yuv444p16le_uv.text.Subsample_Nearest_nv12_yuv444p16le.nv.info.Subsample_Nearest_nv12_yuv444p16le.nv.shared.Subsample_Nearest_nv12_yuv444p16le.nv.constant2.Subsample_Nearest_nv12_yuv444p16le.nv.constant0.Subsample_Nearest_nv12_yuv444p16le.rel.nv.constant0.Subsample_Nearest_nv12_yuv444p16le.text.Subsample_Nearest_yuv420p_yuv444p16le_uv.nv.info.Subsample_Nearest_yuv420p_yuv444p16le_uv.nv.shared.Subsample_Nearest_yuv420p_yuv444p16le_uv.nv.constant2.Subsample_Nearest_yuv420p_yuv444p16le_uv.nv.constant0.Subsample_Nearest_yuv420p_yuv444p16le_uv.rel.nv.constant0.Subsample_Nearest_yuv420p_yuv444p16le_uv.text.Subsample_Nearest_yuv420p_yuv444p16le.nv.info.Subsample_Nearest_yuv420p_yuv444p16le.nv.shared.Subsample_Nearest_yuv420p_yuv444p16le.nv.constant2.Subsample_Nearest_yuv420p_yuv444p16le.nv.constant0.Subsample_Nearest_yuv420p_yuv444p16le.rel.nv.constant0.Subsample_Nearest_yuv420p_yuv444p16le.text.Subsample_Nearest_yuv444p16le_p016le_uv.nv.info.Subsample_Nearest_yuv444p16le_p016le_uv.nv.shared.Subsample_Nearest_yuv444p16le_p016le_uv.nv.constant2.Subsample_Nearest_yuv444p16le_p016le_uv.nv.constant0.Subsample_Nearest_yuv444p16le_p016le_uv.rel.nv.constant0.Subsample_Nearest_yuv444p16le_p016le_uv.text.Subsample_Nearest_yuv444p16le_p016le.nv.info.Subsample_Nearest_yuv444p16le_p016le.nv.shared.Subsample_Nearest_yuv444p16le_p016le.nv.constant2.Subsample_Nearest_yuv444p16le_p016le.nv.constant0.Subsample_Nearest_yuv444p16le_p016le.rel.nv.constant0.Subsample_Nearest_yuv444p16le_p016le.text.Subsample_Nearest_p016le_p016le_uv.nv.info.Subsample_Nearest_p016le_p016le_uv.nv.shared.Subsample_Nearest_p016le_p016le_uv.nv.constant2.Subsample_Nearest_p016le_p016le_uv.nv.constant0.Subsample_Nearest_p016le_p016le_uv.rel.nv.constant0.Subsample_Nearest_p016le_p016le_uv.text.Subsample_Nearest_p016le_p016le.nv.info.Subsample_Nearest_p016le_p016le.nv.shared.Subsample_Nearest_p016le_p016le.nv.constant2.Subsample_Nearest_p016le_p016le.nv.constant0.Subsample_Nearest_p016le_p016le.rel.nv.constant0.Subsample_Nearest_p016le_p016le.text.Subsample_Nearest_p010le_p016le_uv.nv.info.Subsample_Nearest_p010le_p016le_uv.nv.shared.Subsample_Nearest_p010le_p016le_uv.nv.constant2.Subsample_Nearest_p010le_p016le_uv.nv.constant0.Subsample_Nearest_p010le_p016le_uv.rel.nv.constant0.Subsample_Nearest_p010le_p016le_uv.text.Subsample_Nearest_p010le_p016le.nv.info.Subsample_Nearest_p010le_p016le.nv.shared.Subsample_Nearest_p010le_p016le.nv.constant2.Subsample_Nearest_p010le_p016le.nv.constant0.Subsample_Nearest_p010le_p016le.rel.nv.constant0.Subsample_Nearest_p010le_p016le.text.Subsample_Nearest_yuv444p_p016le_uv.nv.info.Subsample_Nearest_yuv444p_p016le_uv.nv.shared.Subsample_Nearest_yuv444p_p016le_uv.nv.constant2.Subsample_Nearest_yuv444p_p016le_uv.nv.constant0.Subsample_Nearest_yuv444p_p016le_uv.rel.nv.constant0.Subsample_Nearest_yuv444p_p016le_uv.text.Subsample_Nearest_yuv444p_p016le.nv.info.Subsample_Nearest_yuv444p_p016le.nv.shared.Subsample_Nearest_yuv444p_p016le.nv.constant2.Subsample_Nearest_yuv444p_p016le.nv.constant0.Subsample_Nearest_yuv444p_p016le.rel.nv.constant0.Subsample_Nearest_yuv444p_p016le.text.Subsample_Nearest_nv12_p016le_uv.nv.info.Subsample_Nearest_nv12_p016le_uv.nv.shared.Subsample_Nearest_nv12_p016le_uv.nv.constant2.Subsample_Nearest_nv12_p016le_uv.nv.constant0.Subsample_Nearest_nv12_p016le_uv.rel.nv.constant0.Subsample_Nearest_nv12_p016le_uv.text.Subsample_Nearest_nv12_p016le.nv.info.Subsample_Nearest_nv12_p016le.nv.shared.Subsample_Nearest_nv12_p016le.nv.constant2.Subsample_Nearest_nv12_p016le.nv.constant0.Subsample_Nearest_nv12_p016le.rel.nv.constant0.Subsample_Nearest_nv12_p016le.text.Subsample_Nearest_yuv420p_p016le_uv.nv.info.Subsample_Nearest_yuv420p_p016le_uv.nv.shared.Subsample_Nearest_yuv420p_p016le_uv.nv.constant2.Subsample_Nearest_yuv420p_p016le_uv.nv.constant0.Subsample_Nearest_yuv420p_p016le_uv.rel.nv.constant0.Subsample_Nearest_yuv420p_p016le_uv.text.Subsample_Nearest_yuv420p_p016le.nv.info.Subsample_Nearest_yuv420p_p016le.nv.shared.Subsample_Nearest_yuv420p_p016le.nv.constant2.Subsample_Nearest_yuv420p_p016le.nv.constant0.Subsample_Nearest_yuv420p_p016le.rel.nv.constant0.Subsample_Nearest_yuv420p_p016le.text.Subsample_Nearest_yuv444p16le_p010le_uv.nv.info.Subsample_Nearest_yuv444p16le_p010le_uv.nv.shared.Subsample_Nearest_yuv444p16le_p010le_uv.nv.constant2.Subsample_Nearest_yuv444p16le_p010le_uv.nv.constant0.Subsample_Nearest_yuv444p16le_p010le_uv.rel.nv.constant0.Subsample_Nearest_yuv444p16le_p010le_uv.text.Subsample_Nearest_yuv444p16le_p010le.nv.info.Subsample_Nearest_yuv444p16le_p010le.nv.shared.Subsample_Nearest_yuv444p16le_p010le.nv.constant2.Subsample_Nearest_yuv444p16le_p010le.nv.constant0.Subsample_Nearest_yuv444p16le_p010le.rel.nv.constant0.Subsample_Nearest_yuv444p16le_p010le.text.Subsample_Nearest_p016le_p010le_uv.nv.info.Subsample_Nearest_p016le_p010le_uv.nv.shared.Subsample_Nearest_p016le_p010le_uv.nv.constant2.Subsample_Nearest_p016le_p010le_uv.nv.constant0.Subsample_Nearest_p016le_p010le_uv.rel.nv.constant0.Subsample_Nearest_p016le_p010le_uv.text.Subsample_Nearest_p016le_p010le.nv.info.Subsample_Nearest_p016le_p010le.nv.shared.Subsample_Nearest_p016le_p010le.nv.constant2.Subsample_Nearest_p016le_p010le.nv.constant0.Subsample_Nearest_p016le_p010le.rel.nv.constant0.Subsample_Nearest_p016le_p010le.text.Subsample_Nearest_p010le_p010le_uv.nv.info.Subsample_Nearest_p010le_p010le_uv.nv.shared.Subsample_Nearest_p010le_p010le_uv.nv.constant2.Subsample_Nearest_p010le_p010le_uv.nv.constant0.Subsample_Nearest_p010le_p010le_uv.rel.nv.constant0.Subsample_Nearest_p010le_p010le_uv.text.Subsample_Nearest_p010le_p010le.nv.info.Subsample_Nearest_p010le_p010le.nv.shared.Subsample_Nearest_p010le_p010le.nv.constant2.Subsample_Nearest_p010le_p010le.nv.constant0.Subsample_Nearest_p010le_p010le.rel.nv.constant0.Subsample_Nearest_p010le_p010le.text.Subsample_Nearest_yuv444p_p010le_uv.nv.info.Subsample_Nearest_yuv444p_p010le_uv.nv.shared.Subsample_Nearest_yuv444p_p010le_uv.nv.constant2.Subsample_Nearest_yuv444p_p010le_uv.nv.constant0.Subsample_Nearest_yuv444p_p010le_uv.rel.nv.constant0.Subsample_Nearest_yuv444p_p010le_uv.text.Subsample_Nearest_yuv444p_p010le.nv.info.Subsample_Nearest_yuv444p_p010le.nv.shared.Subsample_Nearest_yuv444p_p010le.nv.constant2.Subsample_Nearest_yuv444p_p010le.nv.constant0.Subsample_Nearest_yuv444p_p010le.rel.nv.constant0.Subsample_Nearest_yuv444p_p010le.text.Subsample_Nearest_nv12_p010le_uv.nv.info.Subsample_Nearest_nv12_p010le_uv.nv.shared.Subsample_Nearest_nv12_p010le_uv.nv.constant2.Subsample_Nearest_nv12_p010le_uv.nv.constant0.Subsample_Nearest_nv12_p010le_uv.rel.nv.constant0.Subsample_Nearest_nv12_p010le_uv.text.Subsample_Nearest_nv12_p010le.nv.info.Subsample_Nearest_nv12_p010le.nv.shared.Subsample_Nearest_nv12_p010le.nv.constant2.Subsample_Nearest_nv12_p010le.nv.constant0.Subsample_Nearest_nv12_p010le.rel.nv.constant0.Subsample_Nearest_nv12_p010le.text.Subsample_Nearest_yuv420p_p010le_uv.nv.info.Subsample_Nearest_yuv420p_p010le_uv.nv.shared.Subsample_Nearest_yuv420p_p010le_uv.nv.constant2.Subsample_Nearest_yuv420p_p010le_uv.nv.constant0.Subsample_Nearest_yuv420p_p010le_uv.rel.nv.constant0.Subsample_Nearest_yuv420p_p010le_uv.text.Subsample_Nearest_yuv420p_p010le.nv.info.Subsample_Nearest_yuv420p_p010le.nv.shared.Subsample_Nearest_yuv420p_p010le.nv.constant2.Subsample_Nearest_yuv420p_p010le.nv.constant0.Subsample_Nearest_yuv420p_p010le.rel.nv.constant0.Subsample_Nearest_yuv420p_p010le.text.Subsample_Nearest_yuv444p16le_yuv444p_uv.nv.info.Subsample_Nearest_yuv444p16le_yuv444p_uv.nv.shared.Subsample_Nearest_yuv444p16le_yuv444p_uv.nv.constant2.Subsample_Nearest_yuv444p16le_yuv444p_uv.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p_uv.rel.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p_uv.text.Subsample_Nearest_yuv444p16le_yuv444p.nv.info.Subsample_Nearest_yuv444p16le_yuv444p.nv.shared.Subsample_Nearest_yuv444p16le_yuv444p.nv.constant2.Subsample_Nearest_yuv444p16le_yuv444p.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p.rel.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p.text.Subsample_Nearest_p016le_yuv444p_uv.nv.info.Subsample_Nearest_p016le_yuv444p_uv.nv.shared.Subsample_Nearest_p016le_yuv444p_uv.nv.constant2.Subsample_Nearest_p016le_yuv444p_uv.nv.constant0.Subsample_Nearest_p016le_yuv444p_uv.rel.nv.constant0.Subsample_Nearest_p016le_yuv444p_uv.text.Subsample_Nearest_p016le_yuv444p.nv.info.Subsample_Nearest_p016le_yuv444p.nv.shared.Subsample_Nearest_p016le_yuv444p.nv.constant2.Subsample_Nearest_p016le_yuv444p.nv.constant0.Subsample_Nearest_p016le_yuv444p.rel.nv.constant0.Subsample_Nearest_p016le_yuv444p.text.Subsample_Nearest_p010le_yuv444p_uv.nv.info.Subsample_Nearest_p010le_yuv444p_uv.nv.shared.Subsample_Nearest_p010le_yuv444p_uv.nv.constant2.Subsample_Nearest_p010le_yuv444p_uv.nv.constant0.Subsample_Nearest_p010le_yuv444p_uv.rel.nv.constant0.Subsample_Nearest_p010le_yuv444p_uv.text.Subsample_Nearest_p010le_yuv444p.nv.info.Subsample_Nearest_p010le_yuv444p.nv.shared.Subsample_Nearest_p010le_yuv444p.nv.constant2.Subsample_Nearest_p010le_yuv444p.nv.constant0.Subsample_Nearest_p010le_yuv444p.rel.nv.constant0.Subsample_Nearest_p010le_yuv444p.text.Subsample_Nearest_yuv444p_yuv444p_uv.nv.info.Subsample_Nearest_yuv444p_yuv444p_uv.nv.shared.Subsample_Nearest_yuv444p_yuv444p_uv.nv.constant2.Subsample_Nearest_yuv444p_yuv444p_uv.nv.constant0.Subsample_Nearest_yuv444p_yuv444p_uv.rel.nv.constant0.Subsample_Nearest_yuv444p_yuv444p_uv.text.Subsample_Nearest_yuv444p_yuv444p.nv.info.Subsample_Nearest_yuv444p_yuv444p.nv.shared.Subsample_Nearest_yuv444p_yuv444p.nv.constant2.Subsample_Nearest_yuv444p_yuv444p.nv.constant0.Subsample_Nearest_yuv444p_yuv444p.rel.nv.constant0.Subsample_Nearest_yuv444p_yuv444p.text.Subsample_Nearest_nv12_yuv444p_uv.nv.info.Subsample_Nearest_nv12_yuv444p_uv.nv.shared.Subsample_Nearest_nv12_yuv444p_uv.nv.constant2.Subsample_Nearest_nv12_yuv444p_uv.nv.constant0.Subsample_Nearest_nv12_yuv444p_uv.rel.nv.constant0.Subsample_Nearest_nv12_yuv444p_uv.text.Subsample_Nearest_nv12_yuv444p.nv.info.Subsample_Nearest_nv12_yuv444p.nv.shared.Subsample_Nearest_nv12_yuv444p.nv.constant2.Subsample_Nearest_nv12_yuv444p.nv.constant0.Subsample_Nearest_nv12_yuv444p.rel.nv.constant0.Subsample_Nearest_nv12_yuv444p.text.Subsample_Nearest_yuv420p_yuv444p_uv.nv.info.Subsample_Nearest_yuv420p_yuv444p_uv.nv.shared.Subsample_Nearest_yuv420p_yuv444p_uv.nv.constant2.Subsample_Nearest_yuv420p_yuv444p_uv.nv.constant0.Subsample_Nearest_yuv420p_yuv444p_uv.rel.nv.constant0.Subsample_Nearest_yuv420p_yuv444p_uv.text.Subsample_Nearest_yuv420p_yuv444p.nv.info.Subsample_Nearest_yuv420p_yuv444p.nv.shared.Subsample_Nearest_yuv420p_yuv444p.nv.constant2.Subsample_Nearest_yuv420p_yuv444p.nv.constant0.Subsample_Nearest_yuv420p_yuv444p.rel.nv.constant0.Subsample_Nearest_yuv420p_yuv444p.text.Subsample_Nearest_yuv444p16le_nv12_uv.nv.info.Subsample_Nearest_yuv444p16le_nv12_uv.nv.shared.Subsample_Nearest_yuv444p16le_nv12_uv.nv.constant2.Subsample_Nearest_yuv444p16le_nv12_uv.nv.constant0.Subsample_Nearest_yuv444p16le_nv12_uv.rel.nv.constant0.Subsample_Nearest_yuv444p16le_nv12_uv.text.Subsample_Nearest_yuv444p16le_nv12.nv.info.Subsample_Nearest_yuv444p16le_nv12.nv.shared.Subsample_Nearest_yuv444p16le_nv12.nv.constant2.Subsample_Nearest_yuv444p16le_nv12.nv.constant0.Subsample_Nearest_yuv444p16le_nv12.rel.nv.constant0.Subsample_Nearest_yuv444p16le_nv12.text.Subsample_Nearest_p016le_nv12_uv.nv.info.Subsample_Nearest_p016le_nv12_uv.nv.shared.Subsample_Nearest_p016le_nv12_uv.nv.constant2.Subsample_Nearest_p016le_nv12_uv.nv.constant0.Subsample_Nearest_p016le_nv12_uv.rel.nv.constant0.Subsample_Nearest_p016le_nv12_uv.text.Subsample_Nearest_p016le_nv12.nv.info.Subsample_Nearest_p016le_nv12.nv.shared.Subsample_Nearest_p016le_nv12.nv.constant2.Subsample_Nearest_p016le_nv12.nv.constant0.Subsample_Nearest_p016le_nv12.rel.nv.constant0.Subsample_Nearest_p016le_nv12.text.Subsample_Nearest_p010le_nv12_uv.nv.info.Subsample_Nearest_p010le_nv12_uv.nv.shared.Subsample_Nearest_p010le_nv12_uv.nv.constant2.Subsample_Nearest_p010le_nv12_uv.nv.constant0.Subsample_Nearest_p010le_nv12_uv.rel.nv.constant0.Subsample_Nearest_p010le_nv12_uv.text.Subsample_Nearest_p010le_nv12.nv.info.Subsample_Nearest_p010le_nv12.nv.shared.Subsample_Nearest_p010le_nv12.nv.constant2.Subsample_Nearest_p010le_nv12.nv.constant0.Subsample_Nearest_p010le_nv12.rel.nv.constant0.Subsample_Nearest_p010le_nv12.text.Subsample_Nearest_yuv444p_nv12_uv.nv.info.Subsample_Nearest_yuv444p_nv12_uv.nv.shared.Subsample_Nearest_yuv444p_nv12_uv.nv.constant2.Subsample_Nearest_yuv444p_nv12_uv.nv.constant0.Subsample_Nearest_yuv444p_nv12_uv.rel.nv.constant0.Subsample_Nearest_yuv444p_nv12_uv.text.Subsample_Nearest_yuv444p_nv12.nv.info.Subsample_Nearest_yuv444p_nv12.nv.shared.Subsample_Nearest_yuv444p_nv12.nv.constant2.Subsample_Nearest_yuv444p_nv12.nv.constant0.Subsample_Nearest_yuv444p_nv12.rel.nv.constant0.Subsample_Nearest_yuv444p_nv12.text.Subsample_Nearest_nv12_nv12_uv.nv.info.Subsample_Nearest_nv12_nv12_uv.nv.shared.Subsample_Nearest_nv12_nv12_uv.nv.constant2.Subsample_Nearest_nv12_nv12_uv.nv.constant0.Subsample_Nearest_nv12_nv12_uv.rel.nv.constant0.Subsample_Nearest_nv12_nv12_uv.text.Subsample_Nearest_nv12_nv12.nv.info.Subsample_Nearest_nv12_nv12.nv.shared.Subsample_Nearest_nv12_nv12.nv.constant2.Subsample_Nearest_nv12_nv12.nv.constant0.Subsample_Nearest_nv12_nv12.rel.nv.constant0.Subsample_Nearest_nv12_nv12.text.Subsample_Nearest_yuv420p_nv12_uv.nv.info.Subsample_Nearest_yuv420p_nv12_uv.nv.shared.Subsample_Nearest_yuv420p_nv12_uv.nv.constant2.Subsample_Nearest_yuv420p_nv12_uv.nv.constant0.Subsample_Nearest_yuv420p_nv12_uv.rel.nv.constant0.Subsample_Nearest_yuv420p_nv12_uv.text.Subsample_Nearest_yuv420p_nv12.nv.info.Subsample_Nearest_yuv420p_nv12.nv.shared.Subsample_Nearest_yuv420p_nv12.nv.constant2.Subsample_Nearest_yuv420p_nv12.nv.constant0.Subsample_Nearest_yuv420p_nv12.rel.nv.constant0.Subsample_Nearest_yuv420p_nv12.text.Subsample_Nearest_yuv444p16le_yuv420p_uv.nv.info.Subsample_Nearest_yuv444p16le_yuv420p_uv.nv.shared.Subsample_Nearest_yuv444p16le_yuv420p_uv.nv.constant2.Subsample_Nearest_yuv444p16le_yuv420p_uv.nv.constant0.Subsample_Nearest_yuv444p16le_yuv420p_uv.rel.nv.constant0.Subsample_Nearest_yuv444p16le_yuv420p_uv.text.Subsample_Nearest_yuv444p16le_yuv420p.nv.info.Subsample_Nearest_yuv444p16le_yuv420p.nv.shared.Subsample_Nearest_yuv444p16le_yuv420p.nv.constant2.Subsample_Nearest_yuv444p16le_yuv420p.nv.constant0.Subsample_Nearest_yuv444p16le_yuv420p.rel.nv.constant0.Subsample_Nearest_yuv444p16le_yuv420p.text.Subsample_Nearest_p016le_yuv420p_uv.nv.info.Subsample_Nearest_p016le_yuv420p_uv.nv.shared.Subsample_Nearest_p016le_yuv420p_uv.nv.constant2.Subsample_Nearest_p016le_yuv420p_uv.nv.constant0.Subsample_Nearest_p016le_yuv420p_uv.rel.nv.constant0.Subsample_Nearest_p016le_yuv420p_uv.text.Subsample_Nearest_p016le_yuv420p.nv.info.Subsample_Nearest_p016le_yuv420p.nv.shared.Subsample_Nearest_p016le_yuv420p.nv.constant2.Subsample_Nearest_p016le_yuv420p.nv.constant0.Subsample_Nearest_p016le_yuv420p.rel.nv.constant0.Subsample_Nearest_p016le_yuv420p.text.Subsample_Nearest_p010le_yuv420p_uv.nv.info.Subsample_Nearest_p010le_yuv420p_uv.nv.shared.Subsample_Nearest_p010le_yuv420p_uv.nv.constant2.Subsample_Nearest_p010le_yuv420p_uv.nv.constant0.Subsample_Nearest_p010le_yuv420p_uv.rel.nv.constant0.Subsample_Nearest_p010le_yuv420p_uv.text.Subsample_Nearest_p010le_yuv420p.nv.info.Subsample_Nearest_p010le_yuv420p.nv.shared.Subsample_Nearest_p010le_yuv420p.nv.constant2.Subsample_Nearest_p010le_yuv420p.nv.constant0.Subsample_Nearest_p010le_yuv420p.rel.nv.constant0.Subsample_Nearest_p010le_yuv420p.text.Subsample_Nearest_yuv444p_yuv420p_uv.nv.info.Subsample_Nearest_yuv444p_yuv420p_uv.nv.shared.Subsample_Nearest_yuv444p_yuv420p_uv.nv.constant2.Subsample_Nearest_yuv444p_yuv420p_uv.nv.constant0.Subsample_Nearest_yuv444p_yuv420p_uv.rel.nv.constant0.Subsample_Nearest_yuv444p_yuv420p_uv.text.Subsample_Nearest_yuv444p_yuv420p.nv.info.Subsample_Nearest_yuv444p_yuv420p.nv.shared.Subsample_Nearest_yuv444p_yuv420p.nv.constant2.Subsample_Nearest_yuv444p_yuv420p.nv.constant0.Subsample_Nearest_yuv444p_yuv420p.rel.nv.constant0.Subsample_Nearest_yuv444p_yuv420p.text.Subsample_Nearest_nv12_yuv420p_uv.nv.info.Subsample_Nearest_nv12_yuv420p_uv.nv.shared.Subsample_Nearest_nv12_yuv420p_uv.nv.constant2.Subsample_Nearest_nv12_yuv420p_uv.nv.constant0.Subsample_Nearest_nv12_yuv420p_uv.rel.nv.constant0.Subsample_Nearest_nv12_yuv420p_uv.text.Subsample_Nearest_nv12_yuv420p.nv.info.Subsample_Nearest_nv12_yuv420p.nv.shared.Subsample_Nearest_nv12_yuv420p.nv.constant2.Subsample_Nearest_nv12_yuv420p.nv.constant0.Subsample_Nearest_nv12_yuv420p.rel.nv.constant0.Subsample_Nearest_nv12_yuv420p.text.Subsample_Nearest_yuv420p_yuv420p_uv.nv.info.Subsample_Nearest_yuv420p_yuv420p_uv.nv.shared.Subsample_Nearest_yuv420p_yuv420p_uv.nv.constant2.Subsample_Nearest_yuv420p_yuv420p_uv.nv.constant0.Subsample_Nearest_yuv420p_yuv420p_uv.rel.nv.constant0.Subsample_Nearest_yuv420p_yuv420p_uv.text.Subsample_Nearest_yuv420p_yuv420p.nv.info.Subsample_Nearest_yuv420p_yuv420p.nv.shared.Subsample_Nearest_yuv420p_yuv420p.nv.constant2.Subsample_Nearest_yuv420p_yuv420p.nv.constant0.Subsample_Nearest_yuv420p_yuv420p.rel.nv.constant0.Subsample_Nearest_yuv420p_yuv420p.nv.callgraph.nv.prototype.nv.rel.action.shstrtab.strtab.symtab.symtab_shndx.nv.info.text.Subsample_Lanczos_rgb0_bgr0_uv.nv.info.Subsample_Lanczos_rgb0_bgr0_uv.nv.shared.Subsample_Lanczos_rgb0_bgr0_uv.rel.nv.constant0.Subsample_Lanczos_rgb0_bgr0_uv.nv.constant0.Subsample_Lanczos_rgb0_bgr0_uv.text.Subsample_Lanczos_rgb0_bgr0.nv.info.Subsample_Lanczos_rgb0_bgr0.nv.shared.Subsample_Lanczos_rgb0_bgr0.nv.constant2.Subsample_Lanczos_rgb0_bgr0$Subsample_Lanczos_rgb0_bgr0$_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif$__internal_0_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_rgb0_bgr0.nv.constant0.Subsample_Lanczos_rgb0_bgr0.text.Subsample_Lanczos_bgr0_rgb0_uv.nv.info.Subsample_Lanczos_bgr0_rgb0_uv.nv.shared.Subsample_Lanczos_bgr0_rgb0_uv.rel.nv.constant0.Subsample_Lanczos_bgr0_rgb0_uv.nv.constant0.Subsample_Lanczos_bgr0_rgb0_uv.text.Subsample_Lanczos_bgr0_rgb0.nv.info.Subsample_Lanczos_bgr0_rgb0.nv.shared.Subsample_Lanczos_bgr0_rgb0.nv.constant2.Subsample_Lanczos_bgr0_rgb0$Subsample_Lanczos_bgr0_rgb0$_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif$__internal_1_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_bgr0_rgb0.nv.constant0.Subsample_Lanczos_bgr0_rgb0.text.Subsample_Lanczos_rgb0_rgb0_uv.nv.info.Subsample_Lanczos_rgb0_rgb0_uv.nv.shared.Subsample_Lanczos_rgb0_rgb0_uv.rel.nv.constant0.Subsample_Lanczos_rgb0_rgb0_uv.nv.constant0.Subsample_Lanczos_rgb0_rgb0_uv.text.Subsample_Lanczos_rgb0_rgb0.nv.info.Subsample_Lanczos_rgb0_rgb0.nv.shared.Subsample_Lanczos_rgb0_rgb0.nv.constant2.Subsample_Lanczos_rgb0_rgb0$Subsample_Lanczos_rgb0_rgb0$_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif$__internal_2_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_rgb0_rgb0.nv.constant0.Subsample_Lanczos_rgb0_rgb0.text.Subsample_Lanczos_bgr0_bgr0_uv.nv.info.Subsample_Lanczos_bgr0_bgr0_uv.nv.shared.Subsample_Lanczos_bgr0_bgr0_uv.rel.nv.constant0.Subsample_Lanczos_bgr0_bgr0_uv.nv.constant0.Subsample_Lanczos_bgr0_bgr0_uv.text.Subsample_Lanczos_bgr0_bgr0.nv.info.Subsample_Lanczos_bgr0_bgr0.nv.shared.Subsample_Lanczos_bgr0_bgr0.nv.constant2.Subsample_Lanczos_bgr0_bgr0$Subsample_Lanczos_bgr0_bgr0$_ZL17Subsample_BicubicI6uchar4XadL_ZL14lanczos_coeffsffEEET_yiiiiiiif$__internal_3_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_bgr0_bgr0.nv.constant0.Subsample_Lanczos_bgr0_bgr0.text.Subsample_Lanczos_yuv444p16le_yuv444p16le_uv.nv.info.Subsample_Lanczos_yuv444p16le_yuv444p16le_uv.nv.shared.Subsample_Lanczos_yuv444p16le_yuv444p16le_uv.nv.constant2.Subsample_Lanczos_yuv444p16le_yuv444p16le_uv$__internal_4_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p16le_uv.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p16le_uv.text.Subsample_Lanczos_yuv444p16le_yuv444p16le.nv.info.Subsample_Lanczos_yuv444p16le_yuv444p16le.nv.shared.Subsample_Lanczos_yuv444p16le_yuv444p16le.nv.constant2.Subsample_Lanczos_yuv444p16le_yuv444p16le$__internal_5_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p16le.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p16le.text.Subsample_Lanczos_p016le_yuv444p16le_uv.nv.info.Subsample_Lanczos_p016le_yuv444p16le_uv.nv.shared.Subsample_Lanczos_p016le_yuv444p16le_uv.nv.constant2.Subsample_Lanczos_p016le_yuv444p16le_uv$__internal_6_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p016le_yuv444p16le_uv.nv.constant0.Subsample_Lanczos_p016le_yuv444p16le_uv.text.Subsample_Lanczos_p016le_yuv444p16le.nv.info.Subsample_Lanczos_p016le_yuv444p16le.nv.shared.Subsample_Lanczos_p016le_yuv444p16le.nv.constant2.Subsample_Lanczos_p016le_yuv444p16le$__internal_7_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p016le_yuv444p16le.nv.constant0.Subsample_Lanczos_p016le_yuv444p16le.text.Subsample_Lanczos_p010le_yuv444p16le_uv.nv.info.Subsample_Lanczos_p010le_yuv444p16le_uv.nv.shared.Subsample_Lanczos_p010le_yuv444p16le_uv.nv.constant2.Subsample_Lanczos_p010le_yuv444p16le_uv$__internal_8_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p010le_yuv444p16le_uv.nv.constant0.Subsample_Lanczos_p010le_yuv444p16le_uv.text.Subsample_Lanczos_p010le_yuv444p16le.nv.info.Subsample_Lanczos_p010le_yuv444p16le.nv.shared.Subsample_Lanczos_p010le_yuv444p16le.nv.constant2.Subsample_Lanczos_p010le_yuv444p16le$__internal_9_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p010le_yuv444p16le.nv.constant0.Subsample_Lanczos_p010le_yuv444p16le.text.Subsample_Lanczos_yuv444p_yuv444p16le_uv.nv.info.Subsample_Lanczos_yuv444p_yuv444p16le_uv.nv.shared.Subsample_Lanczos_yuv444p_yuv444p16le_uv.nv.constant2.Subsample_Lanczos_yuv444p_yuv444p16le_uv$__internal_10_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p16le_uv.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p16le_uv.text.Subsample_Lanczos_yuv444p_yuv444p16le.nv.info.Subsample_Lanczos_yuv444p_yuv444p16le.nv.shared.Subsample_Lanczos_yuv444p_yuv444p16le.nv.constant2.Subsample_Lanczos_yuv444p_yuv444p16le$__internal_11_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p16le.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p16le.text.Subsample_Lanczos_nv12_yuv444p16le_uv.nv.info.Subsample_Lanczos_nv12_yuv444p16le_uv.nv.shared.Subsample_Lanczos_nv12_yuv444p16le_uv.nv.constant2.Subsample_Lanczos_nv12_yuv444p16le_uv$__internal_12_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_nv12_yuv444p16le_uv.nv.constant0.Subsample_Lanczos_nv12_yuv444p16le_uv.text.Subsample_Lanczos_nv12_yuv444p16le.nv.info.Subsample_Lanczos_nv12_yuv444p16le.nv.shared.Subsample_Lanczos_nv12_yuv444p16le.nv.constant2.Subsample_Lanczos_nv12_yuv444p16le$__internal_13_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_nv12_yuv444p16le.nv.constant0.Subsample_Lanczos_nv12_yuv444p16le.text.Subsample_Lanczos_yuv420p_yuv444p16le_uv.nv.info.Subsample_Lanczos_yuv420p_yuv444p16le_uv.nv.shared.Subsample_Lanczos_yuv420p_yuv444p16le_uv.nv.constant2.Subsample_Lanczos_yuv420p_yuv444p16le_uv$__internal_14_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p16le_uv.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p16le_uv.text.Subsample_Lanczos_yuv420p_yuv444p16le.nv.info.Subsample_Lanczos_yuv420p_yuv444p16le.nv.shared.Subsample_Lanczos_yuv420p_yuv444p16le.nv.constant2.Subsample_Lanczos_yuv420p_yuv444p16le$__internal_15_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p16le.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p16le.text.Subsample_Lanczos_yuv444p16le_p016le_uv.nv.info.Subsample_Lanczos_yuv444p16le_p016le_uv.nv.shared.Subsample_Lanczos_yuv444p16le_p016le_uv.nv.constant2.Subsample_Lanczos_yuv444p16le_p016le_uv$__internal_16_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_p016le_uv.nv.constant0.Subsample_Lanczos_yuv444p16le_p016le_uv.text.Subsample_Lanczos_yuv444p16le_p016le.nv.info.Subsample_Lanczos_yuv444p16le_p016le.nv.shared.Subsample_Lanczos_yuv444p16le_p016le.nv.constant2.Subsample_Lanczos_yuv444p16le_p016le$__internal_17_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_p016le.nv.constant0.Subsample_Lanczos_yuv444p16le_p016le.text.Subsample_Lanczos_p016le_p016le_uv.nv.info.Subsample_Lanczos_p016le_p016le_uv.nv.shared.Subsample_Lanczos_p016le_p016le_uv.nv.constant2.Subsample_Lanczos_p016le_p016le_uv$__internal_18_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p016le_p016le_uv.nv.constant0.Subsample_Lanczos_p016le_p016le_uv.text.Subsample_Lanczos_p016le_p016le.nv.info.Subsample_Lanczos_p016le_p016le.nv.shared.Subsample_Lanczos_p016le_p016le.nv.constant2.Subsample_Lanczos_p016le_p016le$__internal_19_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p016le_p016le.nv.constant0.Subsample_Lanczos_p016le_p016le.text.Subsample_Lanczos_p010le_p016le_uv.nv.info.Subsample_Lanczos_p010le_p016le_uv.nv.shared.Subsample_Lanczos_p010le_p016le_uv.nv.constant2.Subsample_Lanczos_p010le_p016le_uv$__internal_20_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p010le_p016le_uv.nv.constant0.Subsample_Lanczos_p010le_p016le_uv.text.Subsample_Lanczos_p010le_p016le.nv.info.Subsample_Lanczos_p010le_p016le.nv.shared.Subsample_Lanczos_p010le_p016le.nv.constant2.Subsample_Lanczos_p010le_p016le$__internal_21_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p010le_p016le.nv.constant0.Subsample_Lanczos_p010le_p016le.text.Subsample_Lanczos_yuv444p_p016le_uv.nv.info.Subsample_Lanczos_yuv444p_p016le_uv.nv.shared.Subsample_Lanczos_yuv444p_p016le_uv.nv.constant2.Subsample_Lanczos_yuv444p_p016le_uv$__internal_22_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p_p016le_uv.nv.constant0.Subsample_Lanczos_yuv444p_p016le_uv.text.Subsample_Lanczos_yuv444p_p016le.nv.info.Subsample_Lanczos_yuv444p_p016le.nv.shared.Subsample_Lanczos_yuv444p_p016le.nv.constant2.Subsample_Lanczos_yuv444p_p016le$__internal_23_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p_p016le.nv.constant0.Subsample_Lanczos_yuv444p_p016le.text.Subsample_Lanczos_nv12_p016le_uv.nv.info.Subsample_Lanczos_nv12_p016le_uv.nv.shared.Subsample_Lanczos_nv12_p016le_uv.nv.constant2.Subsample_Lanczos_nv12_p016le_uv$__internal_24_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_nv12_p016le_uv.nv.constant0.Subsample_Lanczos_nv12_p016le_uv.text.Subsample_Lanczos_nv12_p016le.nv.info.Subsample_Lanczos_nv12_p016le.nv.shared.Subsample_Lanczos_nv12_p016le.nv.constant2.Subsample_Lanczos_nv12_p016le$__internal_25_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_nv12_p016le.nv.constant0.Subsample_Lanczos_nv12_p016le.text.Subsample_Lanczos_yuv420p_p016le_uv.nv.info.Subsample_Lanczos_yuv420p_p016le_uv.nv.shared.Subsample_Lanczos_yuv420p_p016le_uv.nv.constant2.Subsample_Lanczos_yuv420p_p016le_uv$__internal_26_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv420p_p016le_uv.nv.constant0.Subsample_Lanczos_yuv420p_p016le_uv.text.Subsample_Lanczos_yuv420p_p016le.nv.info.Subsample_Lanczos_yuv420p_p016le.nv.shared.Subsample_Lanczos_yuv420p_p016le.nv.constant2.Subsample_Lanczos_yuv420p_p016le$__internal_27_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv420p_p016le.nv.constant0.Subsample_Lanczos_yuv420p_p016le.text.Subsample_Lanczos_yuv444p16le_p010le_uv.nv.info.Subsample_Lanczos_yuv444p16le_p010le_uv.nv.shared.Subsample_Lanczos_yuv444p16le_p010le_uv.nv.constant2.Subsample_Lanczos_yuv444p16le_p010le_uv$__internal_28_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_p010le_uv.nv.constant0.Subsample_Lanczos_yuv444p16le_p010le_uv.text.Subsample_Lanczos_yuv444p16le_p010le.nv.info.Subsample_Lanczos_yuv444p16le_p010le.nv.shared.Subsample_Lanczos_yuv444p16le_p010le.nv.constant2.Subsample_Lanczos_yuv444p16le_p010le$__internal_29_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_p010le.nv.constant0.Subsample_Lanczos_yuv444p16le_p010le.text.Subsample_Lanczos_p016le_p010le_uv.nv.info.Subsample_Lanczos_p016le_p010le_uv.nv.shared.Subsample_Lanczos_p016le_p010le_uv.nv.constant2.Subsample_Lanczos_p016le_p010le_uv$__internal_30_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p016le_p010le_uv.nv.constant0.Subsample_Lanczos_p016le_p010le_uv.text.Subsample_Lanczos_p016le_p010le.nv.info.Subsample_Lanczos_p016le_p010le.nv.shared.Subsample_Lanczos_p016le_p010le.nv.constant2.Subsample_Lanczos_p016le_p010le$__internal_31_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p016le_p010le.nv.constant0.Subsample_Lanczos_p016le_p010le.text.Subsample_Lanczos_p010le_p010le_uv.nv.info.Subsample_Lanczos_p010le_p010le_uv.nv.shared.Subsample_Lanczos_p010le_p010le_uv.nv.constant2.Subsample_Lanczos_p010le_p010le_uv$__internal_32_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p010le_p010le_uv.nv.constant0.Subsample_Lanczos_p010le_p010le_uv.text.Subsample_Lanczos_p010le_p010le.nv.info.Subsample_Lanczos_p010le_p010le.nv.shared.Subsample_Lanczos_p010le_p010le.nv.constant2.Subsample_Lanczos_p010le_p010le$__internal_33_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p010le_p010le.nv.constant0.Subsample_Lanczos_p010le_p010le.text.Subsample_Lanczos_yuv444p_p010le_uv.nv.info.Subsample_Lanczos_yuv444p_p010le_uv.nv.shared.Subsample_Lanczos_yuv444p_p010le_uv.nv.constant2.Subsample_Lanczos_yuv444p_p010le_uv$__internal_34_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p_p010le_uv.nv.constant0.Subsample_Lanczos_yuv444p_p010le_uv.text.Subsample_Lanczos_yuv444p_p010le.nv.info.Subsample_Lanczos_yuv444p_p010le.nv.shared.Subsample_Lanczos_yuv444p_p010le.nv.constant2.Subsample_Lanczos_yuv444p_p010le$__internal_35_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p_p010le.nv.constant0.Subsample_Lanczos_yuv444p_p010le.text.Subsample_Lanczos_nv12_p010le_uv.nv.info.Subsample_Lanczos_nv12_p010le_uv.nv.shared.Subsample_Lanczos_nv12_p010le_uv.nv.constant2.Subsample_Lanczos_nv12_p010le_uv$__internal_36_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_nv12_p010le_uv.nv.constant0.Subsample_Lanczos_nv12_p010le_uv.text.Subsample_Lanczos_nv12_p010le.nv.info.Subsample_Lanczos_nv12_p010le.nv.shared.Subsample_Lanczos_nv12_p010le.nv.constant2.Subsample_Lanczos_nv12_p010le$__internal_37_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_nv12_p010le.nv.constant0.Subsample_Lanczos_nv12_p010le.text.Subsample_Lanczos_yuv420p_p010le_uv.nv.info.Subsample_Lanczos_yuv420p_p010le_uv.nv.shared.Subsample_Lanczos_yuv420p_p010le_uv.nv.constant2.Subsample_Lanczos_yuv420p_p010le_uv$__internal_38_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv420p_p010le_uv.nv.constant0.Subsample_Lanczos_yuv420p_p010le_uv.text.Subsample_Lanczos_yuv420p_p010le.nv.info.Subsample_Lanczos_yuv420p_p010le.nv.shared.Subsample_Lanczos_yuv420p_p010le.nv.constant2.Subsample_Lanczos_yuv420p_p010le$__internal_39_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv420p_p010le.nv.constant0.Subsample_Lanczos_yuv420p_p010le.text.Subsample_Lanczos_yuv444p16le_yuv444p_uv.nv.info.Subsample_Lanczos_yuv444p16le_yuv444p_uv.nv.shared.Subsample_Lanczos_yuv444p16le_yuv444p_uv.nv.constant2.Subsample_Lanczos_yuv444p16le_yuv444p_uv$__internal_40_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p_uv.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p_uv.text.Subsample_Lanczos_yuv444p16le_yuv444p.nv.info.Subsample_Lanczos_yuv444p16le_yuv444p.nv.shared.Subsample_Lanczos_yuv444p16le_yuv444p.nv.constant2.Subsample_Lanczos_yuv444p16le_yuv444p$__internal_41_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv444p.text.Subsample_Lanczos_p016le_yuv444p_uv.nv.info.Subsample_Lanczos_p016le_yuv444p_uv.nv.shared.Subsample_Lanczos_p016le_yuv444p_uv.nv.constant2.Subsample_Lanczos_p016le_yuv444p_uv$__internal_42_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p016le_yuv444p_uv.nv.constant0.Subsample_Lanczos_p016le_yuv444p_uv.text.Subsample_Lanczos_p016le_yuv444p.nv.info.Subsample_Lanczos_p016le_yuv444p.nv.shared.Subsample_Lanczos_p016le_yuv444p.nv.constant2.Subsample_Lanczos_p016le_yuv444p$__internal_43_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p016le_yuv444p.nv.constant0.Subsample_Lanczos_p016le_yuv444p.text.Subsample_Lanczos_p010le_yuv444p_uv.nv.info.Subsample_Lanczos_p010le_yuv444p_uv.nv.shared.Subsample_Lanczos_p010le_yuv444p_uv.nv.constant2.Subsample_Lanczos_p010le_yuv444p_uv$__internal_44_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p010le_yuv444p_uv.nv.constant0.Subsample_Lanczos_p010le_yuv444p_uv.text.Subsample_Lanczos_p010le_yuv444p.nv.info.Subsample_Lanczos_p010le_yuv444p.nv.shared.Subsample_Lanczos_p010le_yuv444p.nv.constant2.Subsample_Lanczos_p010le_yuv444p$__internal_45_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p010le_yuv444p.nv.constant0.Subsample_Lanczos_p010le_yuv444p.text.Subsample_Lanczos_yuv444p_yuv444p_uv.nv.info.Subsample_Lanczos_yuv444p_yuv444p_uv.nv.shared.Subsample_Lanczos_yuv444p_yuv444p_uv.nv.constant2.Subsample_Lanczos_yuv444p_yuv444p_uv$__internal_46_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p_uv.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p_uv.text.Subsample_Lanczos_yuv444p_yuv444p.nv.info.Subsample_Lanczos_yuv444p_yuv444p.nv.shared.Subsample_Lanczos_yuv444p_yuv444p.nv.constant2.Subsample_Lanczos_yuv444p_yuv444p$__internal_47_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p.nv.constant0.Subsample_Lanczos_yuv444p_yuv444p.text.Subsample_Lanczos_nv12_yuv444p_uv.nv.info.Subsample_Lanczos_nv12_yuv444p_uv.nv.shared.Subsample_Lanczos_nv12_yuv444p_uv.nv.constant2.Subsample_Lanczos_nv12_yuv444p_uv$__internal_48_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_nv12_yuv444p_uv.nv.constant0.Subsample_Lanczos_nv12_yuv444p_uv.text.Subsample_Lanczos_nv12_yuv444p.nv.info.Subsample_Lanczos_nv12_yuv444p.nv.shared.Subsample_Lanczos_nv12_yuv444p.nv.constant2.Subsample_Lanczos_nv12_yuv444p$__internal_49_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_nv12_yuv444p.nv.constant0.Subsample_Lanczos_nv12_yuv444p.text.Subsample_Lanczos_yuv420p_yuv444p_uv.nv.info.Subsample_Lanczos_yuv420p_yuv444p_uv.nv.shared.Subsample_Lanczos_yuv420p_yuv444p_uv.nv.constant2.Subsample_Lanczos_yuv420p_yuv444p_uv$__internal_50_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p_uv.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p_uv.text.Subsample_Lanczos_yuv420p_yuv444p.nv.info.Subsample_Lanczos_yuv420p_yuv444p.nv.shared.Subsample_Lanczos_yuv420p_yuv444p.nv.constant2.Subsample_Lanczos_yuv420p_yuv444p$__internal_51_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p.nv.constant0.Subsample_Lanczos_yuv420p_yuv444p.text.Subsample_Lanczos_yuv444p16le_nv12_uv.nv.info.Subsample_Lanczos_yuv444p16le_nv12_uv.nv.shared.Subsample_Lanczos_yuv444p16le_nv12_uv.nv.constant2.Subsample_Lanczos_yuv444p16le_nv12_uv$__internal_52_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_nv12_uv.nv.constant0.Subsample_Lanczos_yuv444p16le_nv12_uv.text.Subsample_Lanczos_yuv444p16le_nv12.nv.info.Subsample_Lanczos_yuv444p16le_nv12.nv.shared.Subsample_Lanczos_yuv444p16le_nv12.nv.constant2.Subsample_Lanczos_yuv444p16le_nv12$__internal_53_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_nv12.nv.constant0.Subsample_Lanczos_yuv444p16le_nv12.text.Subsample_Lanczos_p016le_nv12_uv.nv.info.Subsample_Lanczos_p016le_nv12_uv.nv.shared.Subsample_Lanczos_p016le_nv12_uv.nv.constant2.Subsample_Lanczos_p016le_nv12_uv$__internal_54_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p016le_nv12_uv.nv.constant0.Subsample_Lanczos_p016le_nv12_uv.text.Subsample_Lanczos_p016le_nv12.nv.info.Subsample_Lanczos_p016le_nv12.nv.shared.Subsample_Lanczos_p016le_nv12.nv.constant2.Subsample_Lanczos_p016le_nv12$__internal_55_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p016le_nv12.nv.constant0.Subsample_Lanczos_p016le_nv12.text.Subsample_Lanczos_p010le_nv12_uv.nv.info.Subsample_Lanczos_p010le_nv12_uv.nv.shared.Subsample_Lanczos_p010le_nv12_uv.nv.constant2.Subsample_Lanczos_p010le_nv12_uv$__internal_56_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p010le_nv12_uv.nv.constant0.Subsample_Lanczos_p010le_nv12_uv.text.Subsample_Lanczos_p010le_nv12.nv.info.Subsample_Lanczos_p010le_nv12.nv.shared.Subsample_Lanczos_p010le_nv12.nv.constant2.Subsample_Lanczos_p010le_nv12$__internal_57_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p010le_nv12.nv.constant0.Subsample_Lanczos_p010le_nv12.text.Subsample_Lanczos_yuv444p_nv12_uv.nv.info.Subsample_Lanczos_yuv444p_nv12_uv.nv.shared.Subsample_Lanczos_yuv444p_nv12_uv.nv.constant2.Subsample_Lanczos_yuv444p_nv12_uv$__internal_58_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p_nv12_uv.nv.constant0.Subsample_Lanczos_yuv444p_nv12_uv.text.Subsample_Lanczos_yuv444p_nv12.nv.info.Subsample_Lanczos_yuv444p_nv12.nv.shared.Subsample_Lanczos_yuv444p_nv12.nv.constant2.Subsample_Lanczos_yuv444p_nv12$__internal_59_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p_nv12.nv.constant0.Subsample_Lanczos_yuv444p_nv12.text.Subsample_Lanczos_nv12_nv12_uv.nv.info.Subsample_Lanczos_nv12_nv12_uv.nv.shared.Subsample_Lanczos_nv12_nv12_uv.nv.constant2.Subsample_Lanczos_nv12_nv12_uv$__internal_60_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_nv12_nv12_uv.nv.constant0.Subsample_Lanczos_nv12_nv12_uv.text.Subsample_Lanczos_nv12_nv12.nv.info.Subsample_Lanczos_nv12_nv12.nv.shared.Subsample_Lanczos_nv12_nv12.nv.constant2.Subsample_Lanczos_nv12_nv12$__internal_61_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_nv12_nv12.nv.constant0.Subsample_Lanczos_nv12_nv12.text.Subsample_Lanczos_yuv420p_nv12_uv.nv.info.Subsample_Lanczos_yuv420p_nv12_uv.nv.shared.Subsample_Lanczos_yuv420p_nv12_uv.nv.constant2.Subsample_Lanczos_yuv420p_nv12_uv$__internal_62_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv420p_nv12_uv.nv.constant0.Subsample_Lanczos_yuv420p_nv12_uv.text.Subsample_Lanczos_yuv420p_nv12.nv.info.Subsample_Lanczos_yuv420p_nv12.nv.shared.Subsample_Lanczos_yuv420p_nv12.nv.constant2.Subsample_Lanczos_yuv420p_nv12$__internal_63_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv420p_nv12.nv.constant0.Subsample_Lanczos_yuv420p_nv12.text.Subsample_Lanczos_yuv444p16le_yuv420p_uv.nv.info.Subsample_Lanczos_yuv444p16le_yuv420p_uv.nv.shared.Subsample_Lanczos_yuv444p16le_yuv420p_uv.nv.constant2.Subsample_Lanczos_yuv444p16le_yuv420p_uv$__internal_64_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv420p_uv.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv420p_uv.text.Subsample_Lanczos_yuv444p16le_yuv420p.nv.info.Subsample_Lanczos_yuv444p16le_yuv420p.nv.shared.Subsample_Lanczos_yuv444p16le_yuv420p.nv.constant2.Subsample_Lanczos_yuv444p16le_yuv420p$__internal_65_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv420p.nv.constant0.Subsample_Lanczos_yuv444p16le_yuv420p.text.Subsample_Lanczos_p016le_yuv420p_uv.nv.info.Subsample_Lanczos_p016le_yuv420p_uv.nv.shared.Subsample_Lanczos_p016le_yuv420p_uv.nv.constant2.Subsample_Lanczos_p016le_yuv420p_uv$__internal_66_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p016le_yuv420p_uv.nv.constant0.Subsample_Lanczos_p016le_yuv420p_uv.text.Subsample_Lanczos_p016le_yuv420p.nv.info.Subsample_Lanczos_p016le_yuv420p.nv.shared.Subsample_Lanczos_p016le_yuv420p.nv.constant2.Subsample_Lanczos_p016le_yuv420p$__internal_67_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p016le_yuv420p.nv.constant0.Subsample_Lanczos_p016le_yuv420p.text.Subsample_Lanczos_p010le_yuv420p_uv.nv.info.Subsample_Lanczos_p010le_yuv420p_uv.nv.shared.Subsample_Lanczos_p010le_yuv420p_uv.nv.constant2.Subsample_Lanczos_p010le_yuv420p_uv$__internal_68_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p010le_yuv420p_uv.nv.constant0.Subsample_Lanczos_p010le_yuv420p_uv.text.Subsample_Lanczos_p010le_yuv420p.nv.info.Subsample_Lanczos_p010le_yuv420p.nv.shared.Subsample_Lanczos_p010le_yuv420p.nv.constant2.Subsample_Lanczos_p010le_yuv420p$__internal_69_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_p010le_yuv420p.nv.constant0.Subsample_Lanczos_p010le_yuv420p.text.Subsample_Lanczos_yuv444p_yuv420p_uv.nv.info.Subsample_Lanczos_yuv444p_yuv420p_uv.nv.shared.Subsample_Lanczos_yuv444p_yuv420p_uv.nv.constant2.Subsample_Lanczos_yuv444p_yuv420p_uv$__internal_70_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p_yuv420p_uv.nv.constant0.Subsample_Lanczos_yuv444p_yuv420p_uv.text.Subsample_Lanczos_yuv444p_yuv420p.nv.info.Subsample_Lanczos_yuv444p_yuv420p.nv.shared.Subsample_Lanczos_yuv444p_yuv420p.nv.constant2.Subsample_Lanczos_yuv444p_yuv420p$__internal_71_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv444p_yuv420p.nv.constant0.Subsample_Lanczos_yuv444p_yuv420p.text.Subsample_Lanczos_nv12_yuv420p_uv.nv.info.Subsample_Lanczos_nv12_yuv420p_uv.nv.shared.Subsample_Lanczos_nv12_yuv420p_uv.nv.constant2.Subsample_Lanczos_nv12_yuv420p_uv$__internal_72_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_nv12_yuv420p_uv.nv.constant0.Subsample_Lanczos_nv12_yuv420p_uv.text.Subsample_Lanczos_nv12_yuv420p.nv.info.Subsample_Lanczos_nv12_yuv420p.nv.shared.Subsample_Lanczos_nv12_yuv420p.nv.constant2.Subsample_Lanczos_nv12_yuv420p$__internal_73_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_nv12_yuv420p.nv.constant0.Subsample_Lanczos_nv12_yuv420p.text.Subsample_Lanczos_yuv420p_yuv420p_uv.nv.info.Subsample_Lanczos_yuv420p_yuv420p_uv.nv.shared.Subsample_Lanczos_yuv420p_yuv420p_uv.nv.constant2.Subsample_Lanczos_yuv420p_yuv420p_uv$__internal_74_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv420p_yuv420p_uv.nv.constant0.Subsample_Lanczos_yuv420p_yuv420p_uv.text.Subsample_Lanczos_yuv420p_yuv420p.nv.info.Subsample_Lanczos_yuv420p_yuv420p.nv.shared.Subsample_Lanczos_yuv420p_yuv420p.nv.constant2.Subsample_Lanczos_yuv420p_yuv420p$__internal_75_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Lanczos_yuv420p_yuv420p.nv.constant0.Subsample_Lanczos_yuv420p_yuv420p.text.Subsample_Bicubic_rgb0_bgr0_uv.nv.info.Subsample_Bicubic_rgb0_bgr0_uv.nv.shared.Subsample_Bicubic_rgb0_bgr0_uv.rel.nv.constant0.Subsample_Bicubic_rgb0_bgr0_uv.nv.constant0.Subsample_Bicubic_rgb0_bgr0_uv.text.Subsample_Bicubic_rgb0_bgr0.nv.info.Subsample_Bicubic_rgb0_bgr0.nv.shared.Subsample_Bicubic_rgb0_bgr0.nv.constant2.Subsample_Bicubic_rgb0_bgr0$__internal_76_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_rgb0_bgr0.nv.constant0.Subsample_Bicubic_rgb0_bgr0.text.Subsample_Bicubic_bgr0_rgb0_uv.nv.info.Subsample_Bicubic_bgr0_rgb0_uv.nv.shared.Subsample_Bicubic_bgr0_rgb0_uv.rel.nv.constant0.Subsample_Bicubic_bgr0_rgb0_uv.nv.constant0.Subsample_Bicubic_bgr0_rgb0_uv.text.Subsample_Bicubic_bgr0_rgb0.nv.info.Subsample_Bicubic_bgr0_rgb0.nv.shared.Subsample_Bicubic_bgr0_rgb0.nv.constant2.Subsample_Bicubic_bgr0_rgb0$__internal_77_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_bgr0_rgb0.nv.constant0.Subsample_Bicubic_bgr0_rgb0.text.Subsample_Bicubic_rgb0_rgb0_uv.nv.info.Subsample_Bicubic_rgb0_rgb0_uv.nv.shared.Subsample_Bicubic_rgb0_rgb0_uv.rel.nv.constant0.Subsample_Bicubic_rgb0_rgb0_uv.nv.constant0.Subsample_Bicubic_rgb0_rgb0_uv.text.Subsample_Bicubic_rgb0_rgb0.nv.info.Subsample_Bicubic_rgb0_rgb0.nv.shared.Subsample_Bicubic_rgb0_rgb0.nv.constant2.Subsample_Bicubic_rgb0_rgb0$__internal_78_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_rgb0_rgb0.nv.constant0.Subsample_Bicubic_rgb0_rgb0.text.Subsample_Bicubic_bgr0_bgr0_uv.nv.info.Subsample_Bicubic_bgr0_bgr0_uv.nv.shared.Subsample_Bicubic_bgr0_bgr0_uv.rel.nv.constant0.Subsample_Bicubic_bgr0_bgr0_uv.nv.constant0.Subsample_Bicubic_bgr0_bgr0_uv.text.Subsample_Bicubic_bgr0_bgr0.nv.info.Subsample_Bicubic_bgr0_bgr0.nv.shared.Subsample_Bicubic_bgr0_bgr0.nv.constant2.Subsample_Bicubic_bgr0_bgr0$__internal_79_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_bgr0_bgr0.nv.constant0.Subsample_Bicubic_bgr0_bgr0.text.Subsample_Bicubic_yuv444p16le_yuv444p16le_uv.nv.info.Subsample_Bicubic_yuv444p16le_yuv444p16le_uv.nv.shared.Subsample_Bicubic_yuv444p16le_yuv444p16le_uv.nv.constant2.Subsample_Bicubic_yuv444p16le_yuv444p16le_uv$__internal_80_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p16le_uv.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p16le_uv.text.Subsample_Bicubic_yuv444p16le_yuv444p16le.nv.info.Subsample_Bicubic_yuv444p16le_yuv444p16le.nv.shared.Subsample_Bicubic_yuv444p16le_yuv444p16le.nv.constant2.Subsample_Bicubic_yuv444p16le_yuv444p16le$__internal_81_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p16le.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p16le.text.Subsample_Bicubic_p016le_yuv444p16le_uv.nv.info.Subsample_Bicubic_p016le_yuv444p16le_uv.nv.shared.Subsample_Bicubic_p016le_yuv444p16le_uv.nv.constant2.Subsample_Bicubic_p016le_yuv444p16le_uv$__internal_82_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p016le_yuv444p16le_uv.nv.constant0.Subsample_Bicubic_p016le_yuv444p16le_uv.text.Subsample_Bicubic_p016le_yuv444p16le.nv.info.Subsample_Bicubic_p016le_yuv444p16le.nv.shared.Subsample_Bicubic_p016le_yuv444p16le.nv.constant2.Subsample_Bicubic_p016le_yuv444p16le$__internal_83_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p016le_yuv444p16le.nv.constant0.Subsample_Bicubic_p016le_yuv444p16le.text.Subsample_Bicubic_p010le_yuv444p16le_uv.nv.info.Subsample_Bicubic_p010le_yuv444p16le_uv.nv.shared.Subsample_Bicubic_p010le_yuv444p16le_uv.nv.constant2.Subsample_Bicubic_p010le_yuv444p16le_uv$__internal_84_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p010le_yuv444p16le_uv.nv.constant0.Subsample_Bicubic_p010le_yuv444p16le_uv.text.Subsample_Bicubic_p010le_yuv444p16le.nv.info.Subsample_Bicubic_p010le_yuv444p16le.nv.shared.Subsample_Bicubic_p010le_yuv444p16le.nv.constant2.Subsample_Bicubic_p010le_yuv444p16le$__internal_85_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p010le_yuv444p16le.nv.constant0.Subsample_Bicubic_p010le_yuv444p16le.text.Subsample_Bicubic_yuv444p_yuv444p16le_uv.nv.info.Subsample_Bicubic_yuv444p_yuv444p16le_uv.nv.shared.Subsample_Bicubic_yuv444p_yuv444p16le_uv.nv.constant2.Subsample_Bicubic_yuv444p_yuv444p16le_uv$__internal_86_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p16le_uv.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p16le_uv.text.Subsample_Bicubic_yuv444p_yuv444p16le.nv.info.Subsample_Bicubic_yuv444p_yuv444p16le.nv.shared.Subsample_Bicubic_yuv444p_yuv444p16le.nv.constant2.Subsample_Bicubic_yuv444p_yuv444p16le$__internal_87_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p16le.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p16le.text.Subsample_Bicubic_nv12_yuv444p16le_uv.nv.info.Subsample_Bicubic_nv12_yuv444p16le_uv.nv.shared.Subsample_Bicubic_nv12_yuv444p16le_uv.nv.constant2.Subsample_Bicubic_nv12_yuv444p16le_uv$__internal_88_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_nv12_yuv444p16le_uv.nv.constant0.Subsample_Bicubic_nv12_yuv444p16le_uv.text.Subsample_Bicubic_nv12_yuv444p16le.nv.info.Subsample_Bicubic_nv12_yuv444p16le.nv.shared.Subsample_Bicubic_nv12_yuv444p16le.nv.constant2.Subsample_Bicubic_nv12_yuv444p16le$__internal_89_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_nv12_yuv444p16le.nv.constant0.Subsample_Bicubic_nv12_yuv444p16le.text.Subsample_Bicubic_yuv420p_yuv444p16le_uv.nv.info.Subsample_Bicubic_yuv420p_yuv444p16le_uv.nv.shared.Subsample_Bicubic_yuv420p_yuv444p16le_uv.nv.constant2.Subsample_Bicubic_yuv420p_yuv444p16le_uv$__internal_90_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p16le_uv.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p16le_uv.text.Subsample_Bicubic_yuv420p_yuv444p16le.nv.info.Subsample_Bicubic_yuv420p_yuv444p16le.nv.shared.Subsample_Bicubic_yuv420p_yuv444p16le.nv.constant2.Subsample_Bicubic_yuv420p_yuv444p16le$__internal_91_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p16le.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p16le.text.Subsample_Bicubic_yuv444p16le_p016le_uv.nv.info.Subsample_Bicubic_yuv444p16le_p016le_uv.nv.shared.Subsample_Bicubic_yuv444p16le_p016le_uv.nv.constant2.Subsample_Bicubic_yuv444p16le_p016le_uv$__internal_92_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_p016le_uv.nv.constant0.Subsample_Bicubic_yuv444p16le_p016le_uv.text.Subsample_Bicubic_yuv444p16le_p016le.nv.info.Subsample_Bicubic_yuv444p16le_p016le.nv.shared.Subsample_Bicubic_yuv444p16le_p016le.nv.constant2.Subsample_Bicubic_yuv444p16le_p016le$__internal_93_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_p016le.nv.constant0.Subsample_Bicubic_yuv444p16le_p016le.text.Subsample_Bicubic_p016le_p016le_uv.nv.info.Subsample_Bicubic_p016le_p016le_uv.nv.shared.Subsample_Bicubic_p016le_p016le_uv.nv.constant2.Subsample_Bicubic_p016le_p016le_uv$__internal_94_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p016le_p016le_uv.nv.constant0.Subsample_Bicubic_p016le_p016le_uv.text.Subsample_Bicubic_p016le_p016le.nv.info.Subsample_Bicubic_p016le_p016le.nv.shared.Subsample_Bicubic_p016le_p016le.nv.constant2.Subsample_Bicubic_p016le_p016le$__internal_95_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p016le_p016le.nv.constant0.Subsample_Bicubic_p016le_p016le.text.Subsample_Bicubic_p010le_p016le_uv.nv.info.Subsample_Bicubic_p010le_p016le_uv.nv.shared.Subsample_Bicubic_p010le_p016le_uv.nv.constant2.Subsample_Bicubic_p010le_p016le_uv$__internal_96_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p010le_p016le_uv.nv.constant0.Subsample_Bicubic_p010le_p016le_uv.text.Subsample_Bicubic_p010le_p016le.nv.info.Subsample_Bicubic_p010le_p016le.nv.shared.Subsample_Bicubic_p010le_p016le.nv.constant2.Subsample_Bicubic_p010le_p016le$__internal_97_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p010le_p016le.nv.constant0.Subsample_Bicubic_p010le_p016le.text.Subsample_Bicubic_yuv444p_p016le_uv.nv.info.Subsample_Bicubic_yuv444p_p016le_uv.nv.shared.Subsample_Bicubic_yuv444p_p016le_uv.nv.constant2.Subsample_Bicubic_yuv444p_p016le_uv$__internal_98_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p_p016le_uv.nv.constant0.Subsample_Bicubic_yuv444p_p016le_uv.text.Subsample_Bicubic_yuv444p_p016le.nv.info.Subsample_Bicubic_yuv444p_p016le.nv.shared.Subsample_Bicubic_yuv444p_p016le.nv.constant2.Subsample_Bicubic_yuv444p_p016le$__internal_99_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p_p016le.nv.constant0.Subsample_Bicubic_yuv444p_p016le.text.Subsample_Bicubic_nv12_p016le_uv.nv.info.Subsample_Bicubic_nv12_p016le_uv.nv.shared.Subsample_Bicubic_nv12_p016le_uv.nv.constant2.Subsample_Bicubic_nv12_p016le_uv$__internal_100_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_nv12_p016le_uv.nv.constant0.Subsample_Bicubic_nv12_p016le_uv.text.Subsample_Bicubic_nv12_p016le.nv.info.Subsample_Bicubic_nv12_p016le.nv.shared.Subsample_Bicubic_nv12_p016le.nv.constant2.Subsample_Bicubic_nv12_p016le$__internal_101_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_nv12_p016le.nv.constant0.Subsample_Bicubic_nv12_p016le.text.Subsample_Bicubic_yuv420p_p016le_uv.nv.info.Subsample_Bicubic_yuv420p_p016le_uv.nv.shared.Subsample_Bicubic_yuv420p_p016le_uv.nv.constant2.Subsample_Bicubic_yuv420p_p016le_uv$__internal_102_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv420p_p016le_uv.nv.constant0.Subsample_Bicubic_yuv420p_p016le_uv.text.Subsample_Bicubic_yuv420p_p016le.nv.info.Subsample_Bicubic_yuv420p_p016le.nv.shared.Subsample_Bicubic_yuv420p_p016le.nv.constant2.Subsample_Bicubic_yuv420p_p016le$__internal_103_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv420p_p016le.nv.constant0.Subsample_Bicubic_yuv420p_p016le.text.Subsample_Bicubic_yuv444p16le_p010le_uv.nv.info.Subsample_Bicubic_yuv444p16le_p010le_uv.nv.shared.Subsample_Bicubic_yuv444p16le_p010le_uv.nv.constant2.Subsample_Bicubic_yuv444p16le_p010le_uv$__internal_104_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_p010le_uv.nv.constant0.Subsample_Bicubic_yuv444p16le_p010le_uv.text.Subsample_Bicubic_yuv444p16le_p010le.nv.info.Subsample_Bicubic_yuv444p16le_p010le.nv.shared.Subsample_Bicubic_yuv444p16le_p010le.nv.constant2.Subsample_Bicubic_yuv444p16le_p010le$__internal_105_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_p010le.nv.constant0.Subsample_Bicubic_yuv444p16le_p010le.text.Subsample_Bicubic_p016le_p010le_uv.nv.info.Subsample_Bicubic_p016le_p010le_uv.nv.shared.Subsample_Bicubic_p016le_p010le_uv.nv.constant2.Subsample_Bicubic_p016le_p010le_uv$__internal_106_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p016le_p010le_uv.nv.constant0.Subsample_Bicubic_p016le_p010le_uv.text.Subsample_Bicubic_p016le_p010le.nv.info.Subsample_Bicubic_p016le_p010le.nv.shared.Subsample_Bicubic_p016le_p010le.nv.constant2.Subsample_Bicubic_p016le_p010le$__internal_107_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p016le_p010le.nv.constant0.Subsample_Bicubic_p016le_p010le.text.Subsample_Bicubic_p010le_p010le_uv.nv.info.Subsample_Bicubic_p010le_p010le_uv.nv.shared.Subsample_Bicubic_p010le_p010le_uv.nv.constant2.Subsample_Bicubic_p010le_p010le_uv$__internal_108_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p010le_p010le_uv.nv.constant0.Subsample_Bicubic_p010le_p010le_uv.text.Subsample_Bicubic_p010le_p010le.nv.info.Subsample_Bicubic_p010le_p010le.nv.shared.Subsample_Bicubic_p010le_p010le.nv.constant2.Subsample_Bicubic_p010le_p010le$__internal_109_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p010le_p010le.nv.constant0.Subsample_Bicubic_p010le_p010le.text.Subsample_Bicubic_yuv444p_p010le_uv.nv.info.Subsample_Bicubic_yuv444p_p010le_uv.nv.shared.Subsample_Bicubic_yuv444p_p010le_uv.nv.constant2.Subsample_Bicubic_yuv444p_p010le_uv$__internal_110_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p_p010le_uv.nv.constant0.Subsample_Bicubic_yuv444p_p010le_uv.text.Subsample_Bicubic_yuv444p_p010le.nv.info.Subsample_Bicubic_yuv444p_p010le.nv.shared.Subsample_Bicubic_yuv444p_p010le.nv.constant2.Subsample_Bicubic_yuv444p_p010le$__internal_111_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p_p010le.nv.constant0.Subsample_Bicubic_yuv444p_p010le.text.Subsample_Bicubic_nv12_p010le_uv.nv.info.Subsample_Bicubic_nv12_p010le_uv.nv.shared.Subsample_Bicubic_nv12_p010le_uv.nv.constant2.Subsample_Bicubic_nv12_p010le_uv$__internal_112_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_nv12_p010le_uv.nv.constant0.Subsample_Bicubic_nv12_p010le_uv.text.Subsample_Bicubic_nv12_p010le.nv.info.Subsample_Bicubic_nv12_p010le.nv.shared.Subsample_Bicubic_nv12_p010le.nv.constant2.Subsample_Bicubic_nv12_p010le$__internal_113_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_nv12_p010le.nv.constant0.Subsample_Bicubic_nv12_p010le.text.Subsample_Bicubic_yuv420p_p010le_uv.nv.info.Subsample_Bicubic_yuv420p_p010le_uv.nv.shared.Subsample_Bicubic_yuv420p_p010le_uv.nv.constant2.Subsample_Bicubic_yuv420p_p010le_uv$__internal_114_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv420p_p010le_uv.nv.constant0.Subsample_Bicubic_yuv420p_p010le_uv.text.Subsample_Bicubic_yuv420p_p010le.nv.info.Subsample_Bicubic_yuv420p_p010le.nv.shared.Subsample_Bicubic_yuv420p_p010le.nv.constant2.Subsample_Bicubic_yuv420p_p010le$__internal_115_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv420p_p010le.nv.constant0.Subsample_Bicubic_yuv420p_p010le.text.Subsample_Bicubic_yuv444p16le_yuv444p_uv.nv.info.Subsample_Bicubic_yuv444p16le_yuv444p_uv.nv.shared.Subsample_Bicubic_yuv444p16le_yuv444p_uv.nv.constant2.Subsample_Bicubic_yuv444p16le_yuv444p_uv$__internal_116_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p_uv.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p_uv.text.Subsample_Bicubic_yuv444p16le_yuv444p.nv.info.Subsample_Bicubic_yuv444p16le_yuv444p.nv.shared.Subsample_Bicubic_yuv444p16le_yuv444p.nv.constant2.Subsample_Bicubic_yuv444p16le_yuv444p$__internal_117_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv444p.text.Subsample_Bicubic_p016le_yuv444p_uv.nv.info.Subsample_Bicubic_p016le_yuv444p_uv.nv.shared.Subsample_Bicubic_p016le_yuv444p_uv.nv.constant2.Subsample_Bicubic_p016le_yuv444p_uv$__internal_118_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p016le_yuv444p_uv.nv.constant0.Subsample_Bicubic_p016le_yuv444p_uv.text.Subsample_Bicubic_p016le_yuv444p.nv.info.Subsample_Bicubic_p016le_yuv444p.nv.shared.Subsample_Bicubic_p016le_yuv444p.nv.constant2.Subsample_Bicubic_p016le_yuv444p$__internal_119_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p016le_yuv444p.nv.constant0.Subsample_Bicubic_p016le_yuv444p.text.Subsample_Bicubic_p010le_yuv444p_uv.nv.info.Subsample_Bicubic_p010le_yuv444p_uv.nv.shared.Subsample_Bicubic_p010le_yuv444p_uv.nv.constant2.Subsample_Bicubic_p010le_yuv444p_uv$__internal_120_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p010le_yuv444p_uv.nv.constant0.Subsample_Bicubic_p010le_yuv444p_uv.text.Subsample_Bicubic_p010le_yuv444p.nv.info.Subsample_Bicubic_p010le_yuv444p.nv.shared.Subsample_Bicubic_p010le_yuv444p.nv.constant2.Subsample_Bicubic_p010le_yuv444p$__internal_121_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p010le_yuv444p.nv.constant0.Subsample_Bicubic_p010le_yuv444p.text.Subsample_Bicubic_yuv444p_yuv444p_uv.nv.info.Subsample_Bicubic_yuv444p_yuv444p_uv.nv.shared.Subsample_Bicubic_yuv444p_yuv444p_uv.nv.constant2.Subsample_Bicubic_yuv444p_yuv444p_uv$__internal_122_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p_uv.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p_uv.text.Subsample_Bicubic_yuv444p_yuv444p.nv.info.Subsample_Bicubic_yuv444p_yuv444p.nv.shared.Subsample_Bicubic_yuv444p_yuv444p.nv.constant2.Subsample_Bicubic_yuv444p_yuv444p$__internal_123_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p.nv.constant0.Subsample_Bicubic_yuv444p_yuv444p.text.Subsample_Bicubic_nv12_yuv444p_uv.nv.info.Subsample_Bicubic_nv12_yuv444p_uv.nv.shared.Subsample_Bicubic_nv12_yuv444p_uv.nv.constant2.Subsample_Bicubic_nv12_yuv444p_uv$__internal_124_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_nv12_yuv444p_uv.nv.constant0.Subsample_Bicubic_nv12_yuv444p_uv.text.Subsample_Bicubic_nv12_yuv444p.nv.info.Subsample_Bicubic_nv12_yuv444p.nv.shared.Subsample_Bicubic_nv12_yuv444p.nv.constant2.Subsample_Bicubic_nv12_yuv444p$__internal_125_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_nv12_yuv444p.nv.constant0.Subsample_Bicubic_nv12_yuv444p.text.Subsample_Bicubic_yuv420p_yuv444p_uv.nv.info.Subsample_Bicubic_yuv420p_yuv444p_uv.nv.shared.Subsample_Bicubic_yuv420p_yuv444p_uv.nv.constant2.Subsample_Bicubic_yuv420p_yuv444p_uv$__internal_126_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p_uv.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p_uv.text.Subsample_Bicubic_yuv420p_yuv444p.nv.info.Subsample_Bicubic_yuv420p_yuv444p.nv.shared.Subsample_Bicubic_yuv420p_yuv444p.nv.constant2.Subsample_Bicubic_yuv420p_yuv444p$__internal_127_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p.nv.constant0.Subsample_Bicubic_yuv420p_yuv444p.text.Subsample_Bicubic_yuv444p16le_nv12_uv.nv.info.Subsample_Bicubic_yuv444p16le_nv12_uv.nv.shared.Subsample_Bicubic_yuv444p16le_nv12_uv.nv.constant2.Subsample_Bicubic_yuv444p16le_nv12_uv$__internal_128_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_nv12_uv.nv.constant0.Subsample_Bicubic_yuv444p16le_nv12_uv.text.Subsample_Bicubic_yuv444p16le_nv12.nv.info.Subsample_Bicubic_yuv444p16le_nv12.nv.shared.Subsample_Bicubic_yuv444p16le_nv12.nv.constant2.Subsample_Bicubic_yuv444p16le_nv12$__internal_129_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_nv12.nv.constant0.Subsample_Bicubic_yuv444p16le_nv12.text.Subsample_Bicubic_p016le_nv12_uv.nv.info.Subsample_Bicubic_p016le_nv12_uv.nv.shared.Subsample_Bicubic_p016le_nv12_uv.nv.constant2.Subsample_Bicubic_p016le_nv12_uv$__internal_130_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p016le_nv12_uv.nv.constant0.Subsample_Bicubic_p016le_nv12_uv.text.Subsample_Bicubic_p016le_nv12.nv.info.Subsample_Bicubic_p016le_nv12.nv.shared.Subsample_Bicubic_p016le_nv12.nv.constant2.Subsample_Bicubic_p016le_nv12$__internal_131_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p016le_nv12.nv.constant0.Subsample_Bicubic_p016le_nv12.text.Subsample_Bicubic_p010le_nv12_uv.nv.info.Subsample_Bicubic_p010le_nv12_uv.nv.shared.Subsample_Bicubic_p010le_nv12_uv.nv.constant2.Subsample_Bicubic_p010le_nv12_uv$__internal_132_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p010le_nv12_uv.nv.constant0.Subsample_Bicubic_p010le_nv12_uv.text.Subsample_Bicubic_p010le_nv12.nv.info.Subsample_Bicubic_p010le_nv12.nv.shared.Subsample_Bicubic_p010le_nv12.nv.constant2.Subsample_Bicubic_p010le_nv12$__internal_133_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p010le_nv12.nv.constant0.Subsample_Bicubic_p010le_nv12.text.Subsample_Bicubic_yuv444p_nv12_uv.nv.info.Subsample_Bicubic_yuv444p_nv12_uv.nv.shared.Subsample_Bicubic_yuv444p_nv12_uv.nv.constant2.Subsample_Bicubic_yuv444p_nv12_uv$__internal_134_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p_nv12_uv.nv.constant0.Subsample_Bicubic_yuv444p_nv12_uv.text.Subsample_Bicubic_yuv444p_nv12.nv.info.Subsample_Bicubic_yuv444p_nv12.nv.shared.Subsample_Bicubic_yuv444p_nv12.nv.constant2.Subsample_Bicubic_yuv444p_nv12$__internal_135_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p_nv12.nv.constant0.Subsample_Bicubic_yuv444p_nv12.text.Subsample_Bicubic_nv12_nv12_uv.nv.info.Subsample_Bicubic_nv12_nv12_uv.nv.shared.Subsample_Bicubic_nv12_nv12_uv.nv.constant2.Subsample_Bicubic_nv12_nv12_uv$__internal_136_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_nv12_nv12_uv.nv.constant0.Subsample_Bicubic_nv12_nv12_uv.text.Subsample_Bicubic_nv12_nv12.nv.info.Subsample_Bicubic_nv12_nv12.nv.shared.Subsample_Bicubic_nv12_nv12.nv.constant2.Subsample_Bicubic_nv12_nv12$__internal_137_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_nv12_nv12.nv.constant0.Subsample_Bicubic_nv12_nv12.text.Subsample_Bicubic_yuv420p_nv12_uv.nv.info.Subsample_Bicubic_yuv420p_nv12_uv.nv.shared.Subsample_Bicubic_yuv420p_nv12_uv.nv.constant2.Subsample_Bicubic_yuv420p_nv12_uv$__internal_138_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv420p_nv12_uv.nv.constant0.Subsample_Bicubic_yuv420p_nv12_uv.text.Subsample_Bicubic_yuv420p_nv12.nv.info.Subsample_Bicubic_yuv420p_nv12.nv.shared.Subsample_Bicubic_yuv420p_nv12.nv.constant2.Subsample_Bicubic_yuv420p_nv12$__internal_139_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv420p_nv12.nv.constant0.Subsample_Bicubic_yuv420p_nv12.text.Subsample_Bicubic_yuv444p16le_yuv420p_uv.nv.info.Subsample_Bicubic_yuv444p16le_yuv420p_uv.nv.shared.Subsample_Bicubic_yuv444p16le_yuv420p_uv.nv.constant2.Subsample_Bicubic_yuv444p16le_yuv420p_uv$__internal_140_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv420p_uv.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv420p_uv.text.Subsample_Bicubic_yuv444p16le_yuv420p.nv.info.Subsample_Bicubic_yuv444p16le_yuv420p.nv.shared.Subsample_Bicubic_yuv444p16le_yuv420p.nv.constant2.Subsample_Bicubic_yuv444p16le_yuv420p$__internal_141_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv420p.nv.constant0.Subsample_Bicubic_yuv444p16le_yuv420p.text.Subsample_Bicubic_p016le_yuv420p_uv.nv.info.Subsample_Bicubic_p016le_yuv420p_uv.nv.shared.Subsample_Bicubic_p016le_yuv420p_uv.nv.constant2.Subsample_Bicubic_p016le_yuv420p_uv$__internal_142_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p016le_yuv420p_uv.nv.constant0.Subsample_Bicubic_p016le_yuv420p_uv.text.Subsample_Bicubic_p016le_yuv420p.nv.info.Subsample_Bicubic_p016le_yuv420p.nv.shared.Subsample_Bicubic_p016le_yuv420p.nv.constant2.Subsample_Bicubic_p016le_yuv420p$__internal_143_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p016le_yuv420p.nv.constant0.Subsample_Bicubic_p016le_yuv420p.text.Subsample_Bicubic_p010le_yuv420p_uv.nv.info.Subsample_Bicubic_p010le_yuv420p_uv.nv.shared.Subsample_Bicubic_p010le_yuv420p_uv.nv.constant2.Subsample_Bicubic_p010le_yuv420p_uv$__internal_144_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p010le_yuv420p_uv.nv.constant0.Subsample_Bicubic_p010le_yuv420p_uv.text.Subsample_Bicubic_p010le_yuv420p.nv.info.Subsample_Bicubic_p010le_yuv420p.nv.shared.Subsample_Bicubic_p010le_yuv420p.nv.constant2.Subsample_Bicubic_p010le_yuv420p$__internal_145_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_p010le_yuv420p.nv.constant0.Subsample_Bicubic_p010le_yuv420p.text.Subsample_Bicubic_yuv444p_yuv420p_uv.nv.info.Subsample_Bicubic_yuv444p_yuv420p_uv.nv.shared.Subsample_Bicubic_yuv444p_yuv420p_uv.nv.constant2.Subsample_Bicubic_yuv444p_yuv420p_uv$__internal_146_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p_yuv420p_uv.nv.constant0.Subsample_Bicubic_yuv444p_yuv420p_uv.text.Subsample_Bicubic_yuv444p_yuv420p.nv.info.Subsample_Bicubic_yuv444p_yuv420p.nv.shared.Subsample_Bicubic_yuv444p_yuv420p.nv.constant2.Subsample_Bicubic_yuv444p_yuv420p$__internal_147_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv444p_yuv420p.nv.constant0.Subsample_Bicubic_yuv444p_yuv420p.text.Subsample_Bicubic_nv12_yuv420p_uv.nv.info.Subsample_Bicubic_nv12_yuv420p_uv.nv.shared.Subsample_Bicubic_nv12_yuv420p_uv.nv.constant2.Subsample_Bicubic_nv12_yuv420p_uv$__internal_148_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_nv12_yuv420p_uv.nv.constant0.Subsample_Bicubic_nv12_yuv420p_uv.text.Subsample_Bicubic_nv12_yuv420p.nv.info.Subsample_Bicubic_nv12_yuv420p.nv.shared.Subsample_Bicubic_nv12_yuv420p.nv.constant2.Subsample_Bicubic_nv12_yuv420p$__internal_149_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_nv12_yuv420p.nv.constant0.Subsample_Bicubic_nv12_yuv420p.text.Subsample_Bicubic_yuv420p_yuv420p_uv.nv.info.Subsample_Bicubic_yuv420p_yuv420p_uv.nv.shared.Subsample_Bicubic_yuv420p_yuv420p_uv.nv.constant2.Subsample_Bicubic_yuv420p_yuv420p_uv$__internal_150_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv420p_yuv420p_uv.nv.constant0.Subsample_Bicubic_yuv420p_yuv420p_uv.text.Subsample_Bicubic_yuv420p_yuv420p.nv.info.Subsample_Bicubic_yuv420p_yuv420p.nv.shared.Subsample_Bicubic_yuv420p_yuv420p.nv.constant2.Subsample_Bicubic_yuv420p_yuv420p$__internal_151_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bicubic_yuv420p_yuv420p.nv.constant0.Subsample_Bicubic_yuv420p_yuv420p.text.Subsample_Bilinear_rgb0_bgr0_uv.nv.info.Subsample_Bilinear_rgb0_bgr0_uv.nv.shared.Subsample_Bilinear_rgb0_bgr0_uv.rel.nv.constant0.Subsample_Bilinear_rgb0_bgr0_uv.nv.constant0.Subsample_Bilinear_rgb0_bgr0_uv.text.Subsample_Bilinear_rgb0_bgr0.nv.info.Subsample_Bilinear_rgb0_bgr0.nv.shared.Subsample_Bilinear_rgb0_bgr0.nv.constant2.Subsample_Bilinear_rgb0_bgr0$__internal_152_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_rgb0_bgr0.nv.constant0.Subsample_Bilinear_rgb0_bgr0.text.Subsample_Bilinear_bgr0_rgb0_uv.nv.info.Subsample_Bilinear_bgr0_rgb0_uv.nv.shared.Subsample_Bilinear_bgr0_rgb0_uv.rel.nv.constant0.Subsample_Bilinear_bgr0_rgb0_uv.nv.constant0.Subsample_Bilinear_bgr0_rgb0_uv.text.Subsample_Bilinear_bgr0_rgb0.nv.info.Subsample_Bilinear_bgr0_rgb0.nv.shared.Subsample_Bilinear_bgr0_rgb0.nv.constant2.Subsample_Bilinear_bgr0_rgb0$__internal_153_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_bgr0_rgb0.nv.constant0.Subsample_Bilinear_bgr0_rgb0.text.Subsample_Bilinear_rgb0_rgb0_uv.nv.info.Subsample_Bilinear_rgb0_rgb0_uv.nv.shared.Subsample_Bilinear_rgb0_rgb0_uv.rel.nv.constant0.Subsample_Bilinear_rgb0_rgb0_uv.nv.constant0.Subsample_Bilinear_rgb0_rgb0_uv.text.Subsample_Bilinear_rgb0_rgb0.nv.info.Subsample_Bilinear_rgb0_rgb0.nv.shared.Subsample_Bilinear_rgb0_rgb0.nv.constant2.Subsample_Bilinear_rgb0_rgb0$__internal_154_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_rgb0_rgb0.nv.constant0.Subsample_Bilinear_rgb0_rgb0.text.Subsample_Bilinear_bgr0_bgr0_uv.nv.info.Subsample_Bilinear_bgr0_bgr0_uv.nv.shared.Subsample_Bilinear_bgr0_bgr0_uv.rel.nv.constant0.Subsample_Bilinear_bgr0_bgr0_uv.nv.constant0.Subsample_Bilinear_bgr0_bgr0_uv.text.Subsample_Bilinear_bgr0_bgr0.nv.info.Subsample_Bilinear_bgr0_bgr0.nv.shared.Subsample_Bilinear_bgr0_bgr0.nv.constant2.Subsample_Bilinear_bgr0_bgr0$__internal_155_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_bgr0_bgr0.nv.constant0.Subsample_Bilinear_bgr0_bgr0.text.Subsample_Bilinear_yuv444p16le_yuv444p16le_uv.nv.info.Subsample_Bilinear_yuv444p16le_yuv444p16le_uv.nv.shared.Subsample_Bilinear_yuv444p16le_yuv444p16le_uv.nv.constant2.Subsample_Bilinear_yuv444p16le_yuv444p16le_uv$__internal_156_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p16le_uv.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p16le_uv.text.Subsample_Bilinear_yuv444p16le_yuv444p16le.nv.info.Subsample_Bilinear_yuv444p16le_yuv444p16le.nv.shared.Subsample_Bilinear_yuv444p16le_yuv444p16le.nv.constant2.Subsample_Bilinear_yuv444p16le_yuv444p16le$__internal_157_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p16le.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p16le.text.Subsample_Bilinear_p016le_yuv444p16le_uv.nv.info.Subsample_Bilinear_p016le_yuv444p16le_uv.nv.shared.Subsample_Bilinear_p016le_yuv444p16le_uv.nv.constant2.Subsample_Bilinear_p016le_yuv444p16le_uv$__internal_158_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p016le_yuv444p16le_uv.nv.constant0.Subsample_Bilinear_p016le_yuv444p16le_uv.text.Subsample_Bilinear_p016le_yuv444p16le.nv.info.Subsample_Bilinear_p016le_yuv444p16le.nv.shared.Subsample_Bilinear_p016le_yuv444p16le.nv.constant2.Subsample_Bilinear_p016le_yuv444p16le$__internal_159_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p016le_yuv444p16le.nv.constant0.Subsample_Bilinear_p016le_yuv444p16le.text.Subsample_Bilinear_p010le_yuv444p16le_uv.nv.info.Subsample_Bilinear_p010le_yuv444p16le_uv.nv.shared.Subsample_Bilinear_p010le_yuv444p16le_uv.nv.constant2.Subsample_Bilinear_p010le_yuv444p16le_uv$__internal_160_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p010le_yuv444p16le_uv.nv.constant0.Subsample_Bilinear_p010le_yuv444p16le_uv.text.Subsample_Bilinear_p010le_yuv444p16le.nv.info.Subsample_Bilinear_p010le_yuv444p16le.nv.shared.Subsample_Bilinear_p010le_yuv444p16le.nv.constant2.Subsample_Bilinear_p010le_yuv444p16le$__internal_161_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p010le_yuv444p16le.nv.constant0.Subsample_Bilinear_p010le_yuv444p16le.text.Subsample_Bilinear_yuv444p_yuv444p16le_uv.nv.info.Subsample_Bilinear_yuv444p_yuv444p16le_uv.nv.shared.Subsample_Bilinear_yuv444p_yuv444p16le_uv.nv.constant2.Subsample_Bilinear_yuv444p_yuv444p16le_uv$__internal_162_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p16le_uv.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p16le_uv.text.Subsample_Bilinear_yuv444p_yuv444p16le.nv.info.Subsample_Bilinear_yuv444p_yuv444p16le.nv.shared.Subsample_Bilinear_yuv444p_yuv444p16le.nv.constant2.Subsample_Bilinear_yuv444p_yuv444p16le$__internal_163_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p16le.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p16le.text.Subsample_Bilinear_nv12_yuv444p16le_uv.nv.info.Subsample_Bilinear_nv12_yuv444p16le_uv.nv.shared.Subsample_Bilinear_nv12_yuv444p16le_uv.nv.constant2.Subsample_Bilinear_nv12_yuv444p16le_uv$__internal_164_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_nv12_yuv444p16le_uv.nv.constant0.Subsample_Bilinear_nv12_yuv444p16le_uv.text.Subsample_Bilinear_nv12_yuv444p16le.nv.info.Subsample_Bilinear_nv12_yuv444p16le.nv.shared.Subsample_Bilinear_nv12_yuv444p16le.nv.constant2.Subsample_Bilinear_nv12_yuv444p16le$__internal_165_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_nv12_yuv444p16le.nv.constant0.Subsample_Bilinear_nv12_yuv444p16le.text.Subsample_Bilinear_yuv420p_yuv444p16le_uv.nv.info.Subsample_Bilinear_yuv420p_yuv444p16le_uv.nv.shared.Subsample_Bilinear_yuv420p_yuv444p16le_uv.nv.constant2.Subsample_Bilinear_yuv420p_yuv444p16le_uv$__internal_166_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p16le_uv.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p16le_uv.text.Subsample_Bilinear_yuv420p_yuv444p16le.nv.info.Subsample_Bilinear_yuv420p_yuv444p16le.nv.shared.Subsample_Bilinear_yuv420p_yuv444p16le.nv.constant2.Subsample_Bilinear_yuv420p_yuv444p16le$__internal_167_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p16le.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p16le.text.Subsample_Bilinear_yuv444p16le_p016le_uv.nv.info.Subsample_Bilinear_yuv444p16le_p016le_uv.nv.shared.Subsample_Bilinear_yuv444p16le_p016le_uv.nv.constant2.Subsample_Bilinear_yuv444p16le_p016le_uv$__internal_168_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_p016le_uv.nv.constant0.Subsample_Bilinear_yuv444p16le_p016le_uv.text.Subsample_Bilinear_yuv444p16le_p016le.nv.info.Subsample_Bilinear_yuv444p16le_p016le.nv.shared.Subsample_Bilinear_yuv444p16le_p016le.nv.constant2.Subsample_Bilinear_yuv444p16le_p016le$__internal_169_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_p016le.nv.constant0.Subsample_Bilinear_yuv444p16le_p016le.text.Subsample_Bilinear_p016le_p016le_uv.nv.info.Subsample_Bilinear_p016le_p016le_uv.nv.shared.Subsample_Bilinear_p016le_p016le_uv.nv.constant2.Subsample_Bilinear_p016le_p016le_uv$__internal_170_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p016le_p016le_uv.nv.constant0.Subsample_Bilinear_p016le_p016le_uv.text.Subsample_Bilinear_p016le_p016le.nv.info.Subsample_Bilinear_p016le_p016le.nv.shared.Subsample_Bilinear_p016le_p016le.nv.constant2.Subsample_Bilinear_p016le_p016le$__internal_171_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p016le_p016le.nv.constant0.Subsample_Bilinear_p016le_p016le.text.Subsample_Bilinear_p010le_p016le_uv.nv.info.Subsample_Bilinear_p010le_p016le_uv.nv.shared.Subsample_Bilinear_p010le_p016le_uv.nv.constant2.Subsample_Bilinear_p010le_p016le_uv$__internal_172_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p010le_p016le_uv.nv.constant0.Subsample_Bilinear_p010le_p016le_uv.text.Subsample_Bilinear_p010le_p016le.nv.info.Subsample_Bilinear_p010le_p016le.nv.shared.Subsample_Bilinear_p010le_p016le.nv.constant2.Subsample_Bilinear_p010le_p016le$__internal_173_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p010le_p016le.nv.constant0.Subsample_Bilinear_p010le_p016le.text.Subsample_Bilinear_yuv444p_p016le_uv.nv.info.Subsample_Bilinear_yuv444p_p016le_uv.nv.shared.Subsample_Bilinear_yuv444p_p016le_uv.nv.constant2.Subsample_Bilinear_yuv444p_p016le_uv$__internal_174_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p_p016le_uv.nv.constant0.Subsample_Bilinear_yuv444p_p016le_uv.text.Subsample_Bilinear_yuv444p_p016le.nv.info.Subsample_Bilinear_yuv444p_p016le.nv.shared.Subsample_Bilinear_yuv444p_p016le.nv.constant2.Subsample_Bilinear_yuv444p_p016le$__internal_175_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p_p016le.nv.constant0.Subsample_Bilinear_yuv444p_p016le.text.Subsample_Bilinear_nv12_p016le_uv.nv.info.Subsample_Bilinear_nv12_p016le_uv.nv.shared.Subsample_Bilinear_nv12_p016le_uv.nv.constant2.Subsample_Bilinear_nv12_p016le_uv$__internal_176_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_nv12_p016le_uv.nv.constant0.Subsample_Bilinear_nv12_p016le_uv.text.Subsample_Bilinear_nv12_p016le.nv.info.Subsample_Bilinear_nv12_p016le.nv.shared.Subsample_Bilinear_nv12_p016le.nv.constant2.Subsample_Bilinear_nv12_p016le$__internal_177_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_nv12_p016le.nv.constant0.Subsample_Bilinear_nv12_p016le.text.Subsample_Bilinear_yuv420p_p016le_uv.nv.info.Subsample_Bilinear_yuv420p_p016le_uv.nv.shared.Subsample_Bilinear_yuv420p_p016le_uv.nv.constant2.Subsample_Bilinear_yuv420p_p016le_uv$__internal_178_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv420p_p016le_uv.nv.constant0.Subsample_Bilinear_yuv420p_p016le_uv.text.Subsample_Bilinear_yuv420p_p016le.nv.info.Subsample_Bilinear_yuv420p_p016le.nv.shared.Subsample_Bilinear_yuv420p_p016le.nv.constant2.Subsample_Bilinear_yuv420p_p016le$__internal_179_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv420p_p016le.nv.constant0.Subsample_Bilinear_yuv420p_p016le.text.Subsample_Bilinear_yuv444p16le_p010le_uv.nv.info.Subsample_Bilinear_yuv444p16le_p010le_uv.nv.shared.Subsample_Bilinear_yuv444p16le_p010le_uv.nv.constant2.Subsample_Bilinear_yuv444p16le_p010le_uv$__internal_180_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_p010le_uv.nv.constant0.Subsample_Bilinear_yuv444p16le_p010le_uv.text.Subsample_Bilinear_yuv444p16le_p010le.nv.info.Subsample_Bilinear_yuv444p16le_p010le.nv.shared.Subsample_Bilinear_yuv444p16le_p010le.nv.constant2.Subsample_Bilinear_yuv444p16le_p010le$__internal_181_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_p010le.nv.constant0.Subsample_Bilinear_yuv444p16le_p010le.text.Subsample_Bilinear_p016le_p010le_uv.nv.info.Subsample_Bilinear_p016le_p010le_uv.nv.shared.Subsample_Bilinear_p016le_p010le_uv.nv.constant2.Subsample_Bilinear_p016le_p010le_uv$__internal_182_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p016le_p010le_uv.nv.constant0.Subsample_Bilinear_p016le_p010le_uv.text.Subsample_Bilinear_p016le_p010le.nv.info.Subsample_Bilinear_p016le_p010le.nv.shared.Subsample_Bilinear_p016le_p010le.nv.constant2.Subsample_Bilinear_p016le_p010le$__internal_183_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p016le_p010le.nv.constant0.Subsample_Bilinear_p016le_p010le.text.Subsample_Bilinear_p010le_p010le_uv.nv.info.Subsample_Bilinear_p010le_p010le_uv.nv.shared.Subsample_Bilinear_p010le_p010le_uv.nv.constant2.Subsample_Bilinear_p010le_p010le_uv$__internal_184_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p010le_p010le_uv.nv.constant0.Subsample_Bilinear_p010le_p010le_uv.text.Subsample_Bilinear_p010le_p010le.nv.info.Subsample_Bilinear_p010le_p010le.nv.shared.Subsample_Bilinear_p010le_p010le.nv.constant2.Subsample_Bilinear_p010le_p010le$__internal_185_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p010le_p010le.nv.constant0.Subsample_Bilinear_p010le_p010le.text.Subsample_Bilinear_yuv444p_p010le_uv.nv.info.Subsample_Bilinear_yuv444p_p010le_uv.nv.shared.Subsample_Bilinear_yuv444p_p010le_uv.nv.constant2.Subsample_Bilinear_yuv444p_p010le_uv$__internal_186_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p_p010le_uv.nv.constant0.Subsample_Bilinear_yuv444p_p010le_uv.text.Subsample_Bilinear_yuv444p_p010le.nv.info.Subsample_Bilinear_yuv444p_p010le.nv.shared.Subsample_Bilinear_yuv444p_p010le.nv.constant2.Subsample_Bilinear_yuv444p_p010le$__internal_187_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p_p010le.nv.constant0.Subsample_Bilinear_yuv444p_p010le.text.Subsample_Bilinear_nv12_p010le_uv.nv.info.Subsample_Bilinear_nv12_p010le_uv.nv.shared.Subsample_Bilinear_nv12_p010le_uv.nv.constant2.Subsample_Bilinear_nv12_p010le_uv$__internal_188_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_nv12_p010le_uv.nv.constant0.Subsample_Bilinear_nv12_p010le_uv.text.Subsample_Bilinear_nv12_p010le.nv.info.Subsample_Bilinear_nv12_p010le.nv.shared.Subsample_Bilinear_nv12_p010le.nv.constant2.Subsample_Bilinear_nv12_p010le$__internal_189_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_nv12_p010le.nv.constant0.Subsample_Bilinear_nv12_p010le.text.Subsample_Bilinear_yuv420p_p010le_uv.nv.info.Subsample_Bilinear_yuv420p_p010le_uv.nv.shared.Subsample_Bilinear_yuv420p_p010le_uv.nv.constant2.Subsample_Bilinear_yuv420p_p010le_uv$__internal_190_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv420p_p010le_uv.nv.constant0.Subsample_Bilinear_yuv420p_p010le_uv.text.Subsample_Bilinear_yuv420p_p010le.nv.info.Subsample_Bilinear_yuv420p_p010le.nv.shared.Subsample_Bilinear_yuv420p_p010le.nv.constant2.Subsample_Bilinear_yuv420p_p010le$__internal_191_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv420p_p010le.nv.constant0.Subsample_Bilinear_yuv420p_p010le.text.Subsample_Bilinear_yuv444p16le_yuv444p_uv.nv.info.Subsample_Bilinear_yuv444p16le_yuv444p_uv.nv.shared.Subsample_Bilinear_yuv444p16le_yuv444p_uv.nv.constant2.Subsample_Bilinear_yuv444p16le_yuv444p_uv$__internal_192_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p_uv.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p_uv.text.Subsample_Bilinear_yuv444p16le_yuv444p.nv.info.Subsample_Bilinear_yuv444p16le_yuv444p.nv.shared.Subsample_Bilinear_yuv444p16le_yuv444p.nv.constant2.Subsample_Bilinear_yuv444p16le_yuv444p$__internal_193_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv444p.text.Subsample_Bilinear_p016le_yuv444p_uv.nv.info.Subsample_Bilinear_p016le_yuv444p_uv.nv.shared.Subsample_Bilinear_p016le_yuv444p_uv.nv.constant2.Subsample_Bilinear_p016le_yuv444p_uv$__internal_194_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p016le_yuv444p_uv.nv.constant0.Subsample_Bilinear_p016le_yuv444p_uv.text.Subsample_Bilinear_p016le_yuv444p.nv.info.Subsample_Bilinear_p016le_yuv444p.nv.shared.Subsample_Bilinear_p016le_yuv444p.nv.constant2.Subsample_Bilinear_p016le_yuv444p$__internal_195_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p016le_yuv444p.nv.constant0.Subsample_Bilinear_p016le_yuv444p.text.Subsample_Bilinear_p010le_yuv444p_uv.nv.info.Subsample_Bilinear_p010le_yuv444p_uv.nv.shared.Subsample_Bilinear_p010le_yuv444p_uv.nv.constant2.Subsample_Bilinear_p010le_yuv444p_uv$__internal_196_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p010le_yuv444p_uv.nv.constant0.Subsample_Bilinear_p010le_yuv444p_uv.text.Subsample_Bilinear_p010le_yuv444p.nv.info.Subsample_Bilinear_p010le_yuv444p.nv.shared.Subsample_Bilinear_p010le_yuv444p.nv.constant2.Subsample_Bilinear_p010le_yuv444p$__internal_197_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p010le_yuv444p.nv.constant0.Subsample_Bilinear_p010le_yuv444p.text.Subsample_Bilinear_yuv444p_yuv444p_uv.nv.info.Subsample_Bilinear_yuv444p_yuv444p_uv.nv.shared.Subsample_Bilinear_yuv444p_yuv444p_uv.nv.constant2.Subsample_Bilinear_yuv444p_yuv444p_uv$__internal_198_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p_uv.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p_uv.text.Subsample_Bilinear_yuv444p_yuv444p.nv.info.Subsample_Bilinear_yuv444p_yuv444p.nv.shared.Subsample_Bilinear_yuv444p_yuv444p.nv.constant2.Subsample_Bilinear_yuv444p_yuv444p$__internal_199_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p.nv.constant0.Subsample_Bilinear_yuv444p_yuv444p.text.Subsample_Bilinear_nv12_yuv444p_uv.nv.info.Subsample_Bilinear_nv12_yuv444p_uv.nv.shared.Subsample_Bilinear_nv12_yuv444p_uv.nv.constant2.Subsample_Bilinear_nv12_yuv444p_uv$__internal_200_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_nv12_yuv444p_uv.nv.constant0.Subsample_Bilinear_nv12_yuv444p_uv.text.Subsample_Bilinear_nv12_yuv444p.nv.info.Subsample_Bilinear_nv12_yuv444p.nv.shared.Subsample_Bilinear_nv12_yuv444p.nv.constant2.Subsample_Bilinear_nv12_yuv444p$__internal_201_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_nv12_yuv444p.nv.constant0.Subsample_Bilinear_nv12_yuv444p.text.Subsample_Bilinear_yuv420p_yuv444p_uv.nv.info.Subsample_Bilinear_yuv420p_yuv444p_uv.nv.shared.Subsample_Bilinear_yuv420p_yuv444p_uv.nv.constant2.Subsample_Bilinear_yuv420p_yuv444p_uv$__internal_202_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p_uv.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p_uv.text.Subsample_Bilinear_yuv420p_yuv444p.nv.info.Subsample_Bilinear_yuv420p_yuv444p.nv.shared.Subsample_Bilinear_yuv420p_yuv444p.nv.constant2.Subsample_Bilinear_yuv420p_yuv444p$__internal_203_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p.nv.constant0.Subsample_Bilinear_yuv420p_yuv444p.text.Subsample_Bilinear_yuv444p16le_nv12_uv.nv.info.Subsample_Bilinear_yuv444p16le_nv12_uv.nv.shared.Subsample_Bilinear_yuv444p16le_nv12_uv.nv.constant2.Subsample_Bilinear_yuv444p16le_nv12_uv$__internal_204_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_nv12_uv.nv.constant0.Subsample_Bilinear_yuv444p16le_nv12_uv.text.Subsample_Bilinear_yuv444p16le_nv12.nv.info.Subsample_Bilinear_yuv444p16le_nv12.nv.shared.Subsample_Bilinear_yuv444p16le_nv12.nv.constant2.Subsample_Bilinear_yuv444p16le_nv12$__internal_205_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_nv12.nv.constant0.Subsample_Bilinear_yuv444p16le_nv12.text.Subsample_Bilinear_p016le_nv12_uv.nv.info.Subsample_Bilinear_p016le_nv12_uv.nv.shared.Subsample_Bilinear_p016le_nv12_uv.nv.constant2.Subsample_Bilinear_p016le_nv12_uv$__internal_206_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p016le_nv12_uv.nv.constant0.Subsample_Bilinear_p016le_nv12_uv.text.Subsample_Bilinear_p016le_nv12.nv.info.Subsample_Bilinear_p016le_nv12.nv.shared.Subsample_Bilinear_p016le_nv12.nv.constant2.Subsample_Bilinear_p016le_nv12$__internal_207_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p016le_nv12.nv.constant0.Subsample_Bilinear_p016le_nv12.text.Subsample_Bilinear_p010le_nv12_uv.nv.info.Subsample_Bilinear_p010le_nv12_uv.nv.shared.Subsample_Bilinear_p010le_nv12_uv.nv.constant2.Subsample_Bilinear_p010le_nv12_uv$__internal_208_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p010le_nv12_uv.nv.constant0.Subsample_Bilinear_p010le_nv12_uv.text.Subsample_Bilinear_p010le_nv12.nv.info.Subsample_Bilinear_p010le_nv12.nv.shared.Subsample_Bilinear_p010le_nv12.nv.constant2.Subsample_Bilinear_p010le_nv12$__internal_209_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p010le_nv12.nv.constant0.Subsample_Bilinear_p010le_nv12.text.Subsample_Bilinear_yuv444p_nv12_uv.nv.info.Subsample_Bilinear_yuv444p_nv12_uv.nv.shared.Subsample_Bilinear_yuv444p_nv12_uv.nv.constant2.Subsample_Bilinear_yuv444p_nv12_uv$__internal_210_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p_nv12_uv.nv.constant0.Subsample_Bilinear_yuv444p_nv12_uv.text.Subsample_Bilinear_yuv444p_nv12.nv.info.Subsample_Bilinear_yuv444p_nv12.nv.shared.Subsample_Bilinear_yuv444p_nv12.nv.constant2.Subsample_Bilinear_yuv444p_nv12$__internal_211_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p_nv12.nv.constant0.Subsample_Bilinear_yuv444p_nv12.text.Subsample_Bilinear_nv12_nv12_uv.nv.info.Subsample_Bilinear_nv12_nv12_uv.nv.shared.Subsample_Bilinear_nv12_nv12_uv.nv.constant2.Subsample_Bilinear_nv12_nv12_uv$__internal_212_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_nv12_nv12_uv.nv.constant0.Subsample_Bilinear_nv12_nv12_uv.text.Subsample_Bilinear_nv12_nv12.nv.info.Subsample_Bilinear_nv12_nv12.nv.shared.Subsample_Bilinear_nv12_nv12.nv.constant2.Subsample_Bilinear_nv12_nv12$__internal_213_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_nv12_nv12.nv.constant0.Subsample_Bilinear_nv12_nv12.text.Subsample_Bilinear_yuv420p_nv12_uv.nv.info.Subsample_Bilinear_yuv420p_nv12_uv.nv.shared.Subsample_Bilinear_yuv420p_nv12_uv.nv.constant2.Subsample_Bilinear_yuv420p_nv12_uv$__internal_214_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv420p_nv12_uv.nv.constant0.Subsample_Bilinear_yuv420p_nv12_uv.text.Subsample_Bilinear_yuv420p_nv12.nv.info.Subsample_Bilinear_yuv420p_nv12.nv.shared.Subsample_Bilinear_yuv420p_nv12.nv.constant2.Subsample_Bilinear_yuv420p_nv12$__internal_215_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv420p_nv12.nv.constant0.Subsample_Bilinear_yuv420p_nv12.text.Subsample_Bilinear_yuv444p16le_yuv420p_uv.nv.info.Subsample_Bilinear_yuv444p16le_yuv420p_uv.nv.shared.Subsample_Bilinear_yuv444p16le_yuv420p_uv.nv.constant2.Subsample_Bilinear_yuv444p16le_yuv420p_uv$__internal_216_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv420p_uv.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv420p_uv.text.Subsample_Bilinear_yuv444p16le_yuv420p.nv.info.Subsample_Bilinear_yuv444p16le_yuv420p.nv.shared.Subsample_Bilinear_yuv444p16le_yuv420p.nv.constant2.Subsample_Bilinear_yuv444p16le_yuv420p$__internal_217_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv420p.nv.constant0.Subsample_Bilinear_yuv444p16le_yuv420p.text.Subsample_Bilinear_p016le_yuv420p_uv.nv.info.Subsample_Bilinear_p016le_yuv420p_uv.nv.shared.Subsample_Bilinear_p016le_yuv420p_uv.nv.constant2.Subsample_Bilinear_p016le_yuv420p_uv$__internal_218_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p016le_yuv420p_uv.nv.constant0.Subsample_Bilinear_p016le_yuv420p_uv.text.Subsample_Bilinear_p016le_yuv420p.nv.info.Subsample_Bilinear_p016le_yuv420p.nv.shared.Subsample_Bilinear_p016le_yuv420p.nv.constant2.Subsample_Bilinear_p016le_yuv420p$__internal_219_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p016le_yuv420p.nv.constant0.Subsample_Bilinear_p016le_yuv420p.text.Subsample_Bilinear_p010le_yuv420p_uv.nv.info.Subsample_Bilinear_p010le_yuv420p_uv.nv.shared.Subsample_Bilinear_p010le_yuv420p_uv.nv.constant2.Subsample_Bilinear_p010le_yuv420p_uv$__internal_220_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p010le_yuv420p_uv.nv.constant0.Subsample_Bilinear_p010le_yuv420p_uv.text.Subsample_Bilinear_p010le_yuv420p.nv.info.Subsample_Bilinear_p010le_yuv420p.nv.shared.Subsample_Bilinear_p010le_yuv420p.nv.constant2.Subsample_Bilinear_p010le_yuv420p$__internal_221_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_p010le_yuv420p.nv.constant0.Subsample_Bilinear_p010le_yuv420p.text.Subsample_Bilinear_yuv444p_yuv420p_uv.nv.info.Subsample_Bilinear_yuv444p_yuv420p_uv.nv.shared.Subsample_Bilinear_yuv444p_yuv420p_uv.nv.constant2.Subsample_Bilinear_yuv444p_yuv420p_uv$__internal_222_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p_yuv420p_uv.nv.constant0.Subsample_Bilinear_yuv444p_yuv420p_uv.text.Subsample_Bilinear_yuv444p_yuv420p.nv.info.Subsample_Bilinear_yuv444p_yuv420p.nv.shared.Subsample_Bilinear_yuv444p_yuv420p.nv.constant2.Subsample_Bilinear_yuv444p_yuv420p$__internal_223_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv444p_yuv420p.nv.constant0.Subsample_Bilinear_yuv444p_yuv420p.text.Subsample_Bilinear_nv12_yuv420p_uv.nv.info.Subsample_Bilinear_nv12_yuv420p_uv.nv.shared.Subsample_Bilinear_nv12_yuv420p_uv.nv.constant2.Subsample_Bilinear_nv12_yuv420p_uv$__internal_224_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_nv12_yuv420p_uv.nv.constant0.Subsample_Bilinear_nv12_yuv420p_uv.text.Subsample_Bilinear_nv12_yuv420p.nv.info.Subsample_Bilinear_nv12_yuv420p.nv.shared.Subsample_Bilinear_nv12_yuv420p.nv.constant2.Subsample_Bilinear_nv12_yuv420p$__internal_225_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_nv12_yuv420p.nv.constant0.Subsample_Bilinear_nv12_yuv420p.text.Subsample_Bilinear_yuv420p_yuv420p_uv.nv.info.Subsample_Bilinear_yuv420p_yuv420p_uv.nv.shared.Subsample_Bilinear_yuv420p_yuv420p_uv.nv.constant2.Subsample_Bilinear_yuv420p_yuv420p_uv$__internal_226_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv420p_yuv420p_uv.nv.constant0.Subsample_Bilinear_yuv420p_yuv420p_uv.text.Subsample_Bilinear_yuv420p_yuv420p.nv.info.Subsample_Bilinear_yuv420p_yuv420p.nv.shared.Subsample_Bilinear_yuv420p_yuv420p.nv.constant2.Subsample_Bilinear_yuv420p_yuv420p$__internal_227_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Bilinear_yuv420p_yuv420p.nv.constant0.Subsample_Bilinear_yuv420p_yuv420p.text.Subsample_Nearest_rgb0_bgr0_uv.nv.info.Subsample_Nearest_rgb0_bgr0_uv.nv.shared.Subsample_Nearest_rgb0_bgr0_uv.rel.nv.constant0.Subsample_Nearest_rgb0_bgr0_uv.nv.constant0.Subsample_Nearest_rgb0_bgr0_uv.text.Subsample_Nearest_rgb0_bgr0.nv.info.Subsample_Nearest_rgb0_bgr0.nv.shared.Subsample_Nearest_rgb0_bgr0.nv.constant2.Subsample_Nearest_rgb0_bgr0$__internal_228_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_rgb0_bgr0.nv.constant0.Subsample_Nearest_rgb0_bgr0.text.Subsample_Nearest_bgr0_rgb0_uv.nv.info.Subsample_Nearest_bgr0_rgb0_uv.nv.shared.Subsample_Nearest_bgr0_rgb0_uv.rel.nv.constant0.Subsample_Nearest_bgr0_rgb0_uv.nv.constant0.Subsample_Nearest_bgr0_rgb0_uv.text.Subsample_Nearest_bgr0_rgb0.nv.info.Subsample_Nearest_bgr0_rgb0.nv.shared.Subsample_Nearest_bgr0_rgb0.nv.constant2.Subsample_Nearest_bgr0_rgb0$__internal_229_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_bgr0_rgb0.nv.constant0.Subsample_Nearest_bgr0_rgb0.text.Subsample_Nearest_rgb0_rgb0_uv.nv.info.Subsample_Nearest_rgb0_rgb0_uv.nv.shared.Subsample_Nearest_rgb0_rgb0_uv.rel.nv.constant0.Subsample_Nearest_rgb0_rgb0_uv.nv.constant0.Subsample_Nearest_rgb0_rgb0_uv.text.Subsample_Nearest_rgb0_rgb0.nv.info.Subsample_Nearest_rgb0_rgb0.nv.shared.Subsample_Nearest_rgb0_rgb0.nv.constant2.Subsample_Nearest_rgb0_rgb0$__internal_230_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_rgb0_rgb0.nv.constant0.Subsample_Nearest_rgb0_rgb0.text.Subsample_Nearest_bgr0_bgr0_uv.nv.info.Subsample_Nearest_bgr0_bgr0_uv.nv.shared.Subsample_Nearest_bgr0_bgr0_uv.rel.nv.constant0.Subsample_Nearest_bgr0_bgr0_uv.nv.constant0.Subsample_Nearest_bgr0_bgr0_uv.text.Subsample_Nearest_bgr0_bgr0.nv.info.Subsample_Nearest_bgr0_bgr0.nv.shared.Subsample_Nearest_bgr0_bgr0.nv.constant2.Subsample_Nearest_bgr0_bgr0$__internal_231_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_bgr0_bgr0.nv.constant0.Subsample_Nearest_bgr0_bgr0.text.Subsample_Nearest_yuv444p16le_yuv444p16le_uv.nv.info.Subsample_Nearest_yuv444p16le_yuv444p16le_uv.nv.shared.Subsample_Nearest_yuv444p16le_yuv444p16le_uv.nv.constant2.Subsample_Nearest_yuv444p16le_yuv444p16le_uv$__internal_232_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p16le_uv.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p16le_uv.text.Subsample_Nearest_yuv444p16le_yuv444p16le.nv.info.Subsample_Nearest_yuv444p16le_yuv444p16le.nv.shared.Subsample_Nearest_yuv444p16le_yuv444p16le.nv.constant2.Subsample_Nearest_yuv444p16le_yuv444p16le$__internal_233_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p16le.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p16le.text.Subsample_Nearest_p016le_yuv444p16le_uv.nv.info.Subsample_Nearest_p016le_yuv444p16le_uv.nv.shared.Subsample_Nearest_p016le_yuv444p16le_uv.nv.constant2.Subsample_Nearest_p016le_yuv444p16le_uv$__internal_234_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p016le_yuv444p16le_uv.nv.constant0.Subsample_Nearest_p016le_yuv444p16le_uv.text.Subsample_Nearest_p016le_yuv444p16le.nv.info.Subsample_Nearest_p016le_yuv444p16le.nv.shared.Subsample_Nearest_p016le_yuv444p16le.nv.constant2.Subsample_Nearest_p016le_yuv444p16le$__internal_235_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p016le_yuv444p16le.nv.constant0.Subsample_Nearest_p016le_yuv444p16le.text.Subsample_Nearest_p010le_yuv444p16le_uv.nv.info.Subsample_Nearest_p010le_yuv444p16le_uv.nv.shared.Subsample_Nearest_p010le_yuv444p16le_uv.nv.constant2.Subsample_Nearest_p010le_yuv444p16le_uv$__internal_236_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p010le_yuv444p16le_uv.nv.constant0.Subsample_Nearest_p010le_yuv444p16le_uv.text.Subsample_Nearest_p010le_yuv444p16le.nv.info.Subsample_Nearest_p010le_yuv444p16le.nv.shared.Subsample_Nearest_p010le_yuv444p16le.nv.constant2.Subsample_Nearest_p010le_yuv444p16le$__internal_237_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p010le_yuv444p16le.nv.constant0.Subsample_Nearest_p010le_yuv444p16le.text.Subsample_Nearest_yuv444p_yuv444p16le_uv.nv.info.Subsample_Nearest_yuv444p_yuv444p16le_uv.nv.shared.Subsample_Nearest_yuv444p_yuv444p16le_uv.nv.constant2.Subsample_Nearest_yuv444p_yuv444p16le_uv$__internal_238_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p_yuv444p16le_uv.nv.constant0.Subsample_Nearest_yuv444p_yuv444p16le_uv.text.Subsample_Nearest_yuv444p_yuv444p16le.nv.info.Subsample_Nearest_yuv444p_yuv444p16le.nv.shared.Subsample_Nearest_yuv444p_yuv444p16le.nv.constant2.Subsample_Nearest_yuv444p_yuv444p16le$__internal_239_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p_yuv444p16le.nv.constant0.Subsample_Nearest_yuv444p_yuv444p16le.text.Subsample_Nearest_nv12_yuv444p16le_uv.nv.info.Subsample_Nearest_nv12_yuv444p16le_uv.nv.shared.Subsample_Nearest_nv12_yuv444p16le_uv.nv.constant2.Subsample_Nearest_nv12_yuv444p16le_uv$__internal_240_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_nv12_yuv444p16le_uv.nv.constant0.Subsample_Nearest_nv12_yuv444p16le_uv.text.Subsample_Nearest_nv12_yuv444p16le.nv.info.Subsample_Nearest_nv12_yuv444p16le.nv.shared.Subsample_Nearest_nv12_yuv444p16le.nv.constant2.Subsample_Nearest_nv12_yuv444p16le$__internal_241_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_nv12_yuv444p16le.nv.constant0.Subsample_Nearest_nv12_yuv444p16le.text.Subsample_Nearest_yuv420p_yuv444p16le_uv.nv.info.Subsample_Nearest_yuv420p_yuv444p16le_uv.nv.shared.Subsample_Nearest_yuv420p_yuv444p16le_uv.nv.constant2.Subsample_Nearest_yuv420p_yuv444p16le_uv$__internal_242_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv420p_yuv444p16le_uv.nv.constant0.Subsample_Nearest_yuv420p_yuv444p16le_uv.text.Subsample_Nearest_yuv420p_yuv444p16le.nv.info.Subsample_Nearest_yuv420p_yuv444p16le.nv.shared.Subsample_Nearest_yuv420p_yuv444p16le.nv.constant2.Subsample_Nearest_yuv420p_yuv444p16le$__internal_243_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv420p_yuv444p16le.nv.constant0.Subsample_Nearest_yuv420p_yuv444p16le.text.Subsample_Nearest_yuv444p16le_p016le_uv.nv.info.Subsample_Nearest_yuv444p16le_p016le_uv.nv.shared.Subsample_Nearest_yuv444p16le_p016le_uv.nv.constant2.Subsample_Nearest_yuv444p16le_p016le_uv$__internal_244_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p16le_p016le_uv.nv.constant0.Subsample_Nearest_yuv444p16le_p016le_uv.text.Subsample_Nearest_yuv444p16le_p016le.nv.info.Subsample_Nearest_yuv444p16le_p016le.nv.shared.Subsample_Nearest_yuv444p16le_p016le.nv.constant2.Subsample_Nearest_yuv444p16le_p016le$__internal_245_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p16le_p016le.nv.constant0.Subsample_Nearest_yuv444p16le_p016le.text.Subsample_Nearest_p016le_p016le_uv.nv.info.Subsample_Nearest_p016le_p016le_uv.nv.shared.Subsample_Nearest_p016le_p016le_uv.nv.constant2.Subsample_Nearest_p016le_p016le_uv$__internal_246_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p016le_p016le_uv.nv.constant0.Subsample_Nearest_p016le_p016le_uv.text.Subsample_Nearest_p016le_p016le.nv.info.Subsample_Nearest_p016le_p016le.nv.shared.Subsample_Nearest_p016le_p016le.nv.constant2.Subsample_Nearest_p016le_p016le$__internal_247_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p016le_p016le.nv.constant0.Subsample_Nearest_p016le_p016le.text.Subsample_Nearest_p010le_p016le_uv.nv.info.Subsample_Nearest_p010le_p016le_uv.nv.shared.Subsample_Nearest_p010le_p016le_uv.nv.constant2.Subsample_Nearest_p010le_p016le_uv$__internal_248_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p010le_p016le_uv.nv.constant0.Subsample_Nearest_p010le_p016le_uv.text.Subsample_Nearest_p010le_p016le.nv.info.Subsample_Nearest_p010le_p016le.nv.shared.Subsample_Nearest_p010le_p016le.nv.constant2.Subsample_Nearest_p010le_p016le$__internal_249_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p010le_p016le.nv.constant0.Subsample_Nearest_p010le_p016le.text.Subsample_Nearest_yuv444p_p016le_uv.nv.info.Subsample_Nearest_yuv444p_p016le_uv.nv.shared.Subsample_Nearest_yuv444p_p016le_uv.nv.constant2.Subsample_Nearest_yuv444p_p016le_uv$__internal_250_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p_p016le_uv.nv.constant0.Subsample_Nearest_yuv444p_p016le_uv.text.Subsample_Nearest_yuv444p_p016le.nv.info.Subsample_Nearest_yuv444p_p016le.nv.shared.Subsample_Nearest_yuv444p_p016le.nv.constant2.Subsample_Nearest_yuv444p_p016le$__internal_251_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p_p016le.nv.constant0.Subsample_Nearest_yuv444p_p016le.text.Subsample_Nearest_nv12_p016le_uv.nv.info.Subsample_Nearest_nv12_p016le_uv.nv.shared.Subsample_Nearest_nv12_p016le_uv.nv.constant2.Subsample_Nearest_nv12_p016le_uv$__internal_252_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_nv12_p016le_uv.nv.constant0.Subsample_Nearest_nv12_p016le_uv.text.Subsample_Nearest_nv12_p016le.nv.info.Subsample_Nearest_nv12_p016le.nv.shared.Subsample_Nearest_nv12_p016le.nv.constant2.Subsample_Nearest_nv12_p016le$__internal_253_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_nv12_p016le.nv.constant0.Subsample_Nearest_nv12_p016le.text.Subsample_Nearest_yuv420p_p016le_uv.nv.info.Subsample_Nearest_yuv420p_p016le_uv.nv.shared.Subsample_Nearest_yuv420p_p016le_uv.nv.constant2.Subsample_Nearest_yuv420p_p016le_uv$__internal_254_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv420p_p016le_uv.nv.constant0.Subsample_Nearest_yuv420p_p016le_uv.text.Subsample_Nearest_yuv420p_p016le.nv.info.Subsample_Nearest_yuv420p_p016le.nv.shared.Subsample_Nearest_yuv420p_p016le.nv.constant2.Subsample_Nearest_yuv420p_p016le$__internal_255_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv420p_p016le.nv.constant0.Subsample_Nearest_yuv420p_p016le.text.Subsample_Nearest_yuv444p16le_p010le_uv.nv.info.Subsample_Nearest_yuv444p16le_p010le_uv.nv.shared.Subsample_Nearest_yuv444p16le_p010le_uv.nv.constant2.Subsample_Nearest_yuv444p16le_p010le_uv$__internal_256_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p16le_p010le_uv.nv.constant0.Subsample_Nearest_yuv444p16le_p010le_uv.text.Subsample_Nearest_yuv444p16le_p010le.nv.info.Subsample_Nearest_yuv444p16le_p010le.nv.shared.Subsample_Nearest_yuv444p16le_p010le.nv.constant2.Subsample_Nearest_yuv444p16le_p010le$__internal_257_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p16le_p010le.nv.constant0.Subsample_Nearest_yuv444p16le_p010le.text.Subsample_Nearest_p016le_p010le_uv.nv.info.Subsample_Nearest_p016le_p010le_uv.nv.shared.Subsample_Nearest_p016le_p010le_uv.nv.constant2.Subsample_Nearest_p016le_p010le_uv$__internal_258_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p016le_p010le_uv.nv.constant0.Subsample_Nearest_p016le_p010le_uv.text.Subsample_Nearest_p016le_p010le.nv.info.Subsample_Nearest_p016le_p010le.nv.shared.Subsample_Nearest_p016le_p010le.nv.constant2.Subsample_Nearest_p016le_p010le$__internal_259_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p016le_p010le.nv.constant0.Subsample_Nearest_p016le_p010le.text.Subsample_Nearest_p010le_p010le_uv.nv.info.Subsample_Nearest_p010le_p010le_uv.nv.shared.Subsample_Nearest_p010le_p010le_uv.nv.constant2.Subsample_Nearest_p010le_p010le_uv$__internal_260_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p010le_p010le_uv.nv.constant0.Subsample_Nearest_p010le_p010le_uv.text.Subsample_Nearest_p010le_p010le.nv.info.Subsample_Nearest_p010le_p010le.nv.shared.Subsample_Nearest_p010le_p010le.nv.constant2.Subsample_Nearest_p010le_p010le$__internal_261_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p010le_p010le.nv.constant0.Subsample_Nearest_p010le_p010le.text.Subsample_Nearest_yuv444p_p010le_uv.nv.info.Subsample_Nearest_yuv444p_p010le_uv.nv.shared.Subsample_Nearest_yuv444p_p010le_uv.nv.constant2.Subsample_Nearest_yuv444p_p010le_uv$__internal_262_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p_p010le_uv.nv.constant0.Subsample_Nearest_yuv444p_p010le_uv.text.Subsample_Nearest_yuv444p_p010le.nv.info.Subsample_Nearest_yuv444p_p010le.nv.shared.Subsample_Nearest_yuv444p_p010le.nv.constant2.Subsample_Nearest_yuv444p_p010le$__internal_263_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p_p010le.nv.constant0.Subsample_Nearest_yuv444p_p010le.text.Subsample_Nearest_nv12_p010le_uv.nv.info.Subsample_Nearest_nv12_p010le_uv.nv.shared.Subsample_Nearest_nv12_p010le_uv.nv.constant2.Subsample_Nearest_nv12_p010le_uv$__internal_264_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_nv12_p010le_uv.nv.constant0.Subsample_Nearest_nv12_p010le_uv.text.Subsample_Nearest_nv12_p010le.nv.info.Subsample_Nearest_nv12_p010le.nv.shared.Subsample_Nearest_nv12_p010le.nv.constant2.Subsample_Nearest_nv12_p010le$__internal_265_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_nv12_p010le.nv.constant0.Subsample_Nearest_nv12_p010le.text.Subsample_Nearest_yuv420p_p010le_uv.nv.info.Subsample_Nearest_yuv420p_p010le_uv.nv.shared.Subsample_Nearest_yuv420p_p010le_uv.nv.constant2.Subsample_Nearest_yuv420p_p010le_uv$__internal_266_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv420p_p010le_uv.nv.constant0.Subsample_Nearest_yuv420p_p010le_uv.text.Subsample_Nearest_yuv420p_p010le.nv.info.Subsample_Nearest_yuv420p_p010le.nv.shared.Subsample_Nearest_yuv420p_p010le.nv.constant2.Subsample_Nearest_yuv420p_p010le$__internal_267_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv420p_p010le.nv.constant0.Subsample_Nearest_yuv420p_p010le.text.Subsample_Nearest_yuv444p16le_yuv444p_uv.nv.info.Subsample_Nearest_yuv444p16le_yuv444p_uv.nv.shared.Subsample_Nearest_yuv444p16le_yuv444p_uv.nv.constant2.Subsample_Nearest_yuv444p16le_yuv444p_uv$__internal_268_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p_uv.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p_uv.text.Subsample_Nearest_yuv444p16le_yuv444p.nv.info.Subsample_Nearest_yuv444p16le_yuv444p.nv.shared.Subsample_Nearest_yuv444p16le_yuv444p.nv.constant2.Subsample_Nearest_yuv444p16le_yuv444p$__internal_269_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p.nv.constant0.Subsample_Nearest_yuv444p16le_yuv444p.text.Subsample_Nearest_p016le_yuv444p_uv.nv.info.Subsample_Nearest_p016le_yuv444p_uv.nv.shared.Subsample_Nearest_p016le_yuv444p_uv.nv.constant2.Subsample_Nearest_p016le_yuv444p_uv$__internal_270_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p016le_yuv444p_uv.nv.constant0.Subsample_Nearest_p016le_yuv444p_uv.text.Subsample_Nearest_p016le_yuv444p.nv.info.Subsample_Nearest_p016le_yuv444p.nv.shared.Subsample_Nearest_p016le_yuv444p.nv.constant2.Subsample_Nearest_p016le_yuv444p$__internal_271_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p016le_yuv444p.nv.constant0.Subsample_Nearest_p016le_yuv444p.text.Subsample_Nearest_p010le_yuv444p_uv.nv.info.Subsample_Nearest_p010le_yuv444p_uv.nv.shared.Subsample_Nearest_p010le_yuv444p_uv.nv.constant2.Subsample_Nearest_p010le_yuv444p_uv$__internal_272_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p010le_yuv444p_uv.nv.constant0.Subsample_Nearest_p010le_yuv444p_uv.text.Subsample_Nearest_p010le_yuv444p.nv.info.Subsample_Nearest_p010le_yuv444p.nv.shared.Subsample_Nearest_p010le_yuv444p.nv.constant2.Subsample_Nearest_p010le_yuv444p$__internal_273_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p010le_yuv444p.nv.constant0.Subsample_Nearest_p010le_yuv444p.text.Subsample_Nearest_yuv444p_yuv444p_uv.nv.info.Subsample_Nearest_yuv444p_yuv444p_uv.nv.shared.Subsample_Nearest_yuv444p_yuv444p_uv.nv.constant2.Subsample_Nearest_yuv444p_yuv444p_uv$__internal_274_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p_yuv444p_uv.nv.constant0.Subsample_Nearest_yuv444p_yuv444p_uv.text.Subsample_Nearest_yuv444p_yuv444p.nv.info.Subsample_Nearest_yuv444p_yuv444p.nv.shared.Subsample_Nearest_yuv444p_yuv444p.nv.constant2.Subsample_Nearest_yuv444p_yuv444p$__internal_275_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p_yuv444p.nv.constant0.Subsample_Nearest_yuv444p_yuv444p.text.Subsample_Nearest_nv12_yuv444p_uv.nv.info.Subsample_Nearest_nv12_yuv444p_uv.nv.shared.Subsample_Nearest_nv12_yuv444p_uv.nv.constant2.Subsample_Nearest_nv12_yuv444p_uv$__internal_276_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_nv12_yuv444p_uv.nv.constant0.Subsample_Nearest_nv12_yuv444p_uv.text.Subsample_Nearest_nv12_yuv444p.nv.info.Subsample_Nearest_nv12_yuv444p.nv.shared.Subsample_Nearest_nv12_yuv444p.nv.constant2.Subsample_Nearest_nv12_yuv444p$__internal_277_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_nv12_yuv444p.nv.constant0.Subsample_Nearest_nv12_yuv444p.text.Subsample_Nearest_yuv420p_yuv444p_uv.nv.info.Subsample_Nearest_yuv420p_yuv444p_uv.nv.shared.Subsample_Nearest_yuv420p_yuv444p_uv.nv.constant2.Subsample_Nearest_yuv420p_yuv444p_uv$__internal_278_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv420p_yuv444p_uv.nv.constant0.Subsample_Nearest_yuv420p_yuv444p_uv.text.Subsample_Nearest_yuv420p_yuv444p.nv.info.Subsample_Nearest_yuv420p_yuv444p.nv.shared.Subsample_Nearest_yuv420p_yuv444p.nv.constant2.Subsample_Nearest_yuv420p_yuv444p$__internal_279_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv420p_yuv444p.nv.constant0.Subsample_Nearest_yuv420p_yuv444p.text.Subsample_Nearest_yuv444p16le_nv12_uv.nv.info.Subsample_Nearest_yuv444p16le_nv12_uv.nv.shared.Subsample_Nearest_yuv444p16le_nv12_uv.nv.constant2.Subsample_Nearest_yuv444p16le_nv12_uv$__internal_280_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p16le_nv12_uv.nv.constant0.Subsample_Nearest_yuv444p16le_nv12_uv.text.Subsample_Nearest_yuv444p16le_nv12.nv.info.Subsample_Nearest_yuv444p16le_nv12.nv.shared.Subsample_Nearest_yuv444p16le_nv12.nv.constant2.Subsample_Nearest_yuv444p16le_nv12$__internal_281_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p16le_nv12.nv.constant0.Subsample_Nearest_yuv444p16le_nv12.text.Subsample_Nearest_p016le_nv12_uv.nv.info.Subsample_Nearest_p016le_nv12_uv.nv.shared.Subsample_Nearest_p016le_nv12_uv.nv.constant2.Subsample_Nearest_p016le_nv12_uv$__internal_282_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p016le_nv12_uv.nv.constant0.Subsample_Nearest_p016le_nv12_uv.text.Subsample_Nearest_p016le_nv12.nv.info.Subsample_Nearest_p016le_nv12.nv.shared.Subsample_Nearest_p016le_nv12.nv.constant2.Subsample_Nearest_p016le_nv12$__internal_283_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p016le_nv12.nv.constant0.Subsample_Nearest_p016le_nv12.text.Subsample_Nearest_p010le_nv12_uv.nv.info.Subsample_Nearest_p010le_nv12_uv.nv.shared.Subsample_Nearest_p010le_nv12_uv.nv.constant2.Subsample_Nearest_p010le_nv12_uv$__internal_284_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p010le_nv12_uv.nv.constant0.Subsample_Nearest_p010le_nv12_uv.text.Subsample_Nearest_p010le_nv12.nv.info.Subsample_Nearest_p010le_nv12.nv.shared.Subsample_Nearest_p010le_nv12.nv.constant2.Subsample_Nearest_p010le_nv12$__internal_285_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p010le_nv12.nv.constant0.Subsample_Nearest_p010le_nv12.text.Subsample_Nearest_yuv444p_nv12_uv.nv.info.Subsample_Nearest_yuv444p_nv12_uv.nv.shared.Subsample_Nearest_yuv444p_nv12_uv.nv.constant2.Subsample_Nearest_yuv444p_nv12_uv$__internal_286_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p_nv12_uv.nv.constant0.Subsample_Nearest_yuv444p_nv12_uv.text.Subsample_Nearest_yuv444p_nv12.nv.info.Subsample_Nearest_yuv444p_nv12.nv.shared.Subsample_Nearest_yuv444p_nv12.nv.constant2.Subsample_Nearest_yuv444p_nv12$__internal_287_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p_nv12.nv.constant0.Subsample_Nearest_yuv444p_nv12.text.Subsample_Nearest_nv12_nv12_uv.nv.info.Subsample_Nearest_nv12_nv12_uv.nv.shared.Subsample_Nearest_nv12_nv12_uv.nv.constant2.Subsample_Nearest_nv12_nv12_uv$__internal_288_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_nv12_nv12_uv.nv.constant0.Subsample_Nearest_nv12_nv12_uv.text.Subsample_Nearest_nv12_nv12.nv.info.Subsample_Nearest_nv12_nv12.nv.shared.Subsample_Nearest_nv12_nv12.nv.constant2.Subsample_Nearest_nv12_nv12$__internal_289_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_nv12_nv12.nv.constant0.Subsample_Nearest_nv12_nv12.text.Subsample_Nearest_yuv420p_nv12_uv.nv.info.Subsample_Nearest_yuv420p_nv12_uv.nv.shared.Subsample_Nearest_yuv420p_nv12_uv.nv.constant2.Subsample_Nearest_yuv420p_nv12_uv$__internal_290_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv420p_nv12_uv.nv.constant0.Subsample_Nearest_yuv420p_nv12_uv.text.Subsample_Nearest_yuv420p_nv12.nv.info.Subsample_Nearest_yuv420p_nv12.nv.shared.Subsample_Nearest_yuv420p_nv12.nv.constant2.Subsample_Nearest_yuv420p_nv12$__internal_291_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv420p_nv12.nv.constant0.Subsample_Nearest_yuv420p_nv12.text.Subsample_Nearest_yuv444p16le_yuv420p_uv.nv.info.Subsample_Nearest_yuv444p16le_yuv420p_uv.nv.shared.Subsample_Nearest_yuv444p16le_yuv420p_uv.nv.constant2.Subsample_Nearest_yuv444p16le_yuv420p_uv$__internal_292_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p16le_yuv420p_uv.nv.constant0.Subsample_Nearest_yuv444p16le_yuv420p_uv.text.Subsample_Nearest_yuv444p16le_yuv420p.nv.info.Subsample_Nearest_yuv444p16le_yuv420p.nv.shared.Subsample_Nearest_yuv444p16le_yuv420p.nv.constant2.Subsample_Nearest_yuv444p16le_yuv420p$__internal_293_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p16le_yuv420p.nv.constant0.Subsample_Nearest_yuv444p16le_yuv420p.text.Subsample_Nearest_p016le_yuv420p_uv.nv.info.Subsample_Nearest_p016le_yuv420p_uv.nv.shared.Subsample_Nearest_p016le_yuv420p_uv.nv.constant2.Subsample_Nearest_p016le_yuv420p_uv$__internal_294_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p016le_yuv420p_uv.nv.constant0.Subsample_Nearest_p016le_yuv420p_uv.text.Subsample_Nearest_p016le_yuv420p.nv.info.Subsample_Nearest_p016le_yuv420p.nv.shared.Subsample_Nearest_p016le_yuv420p.nv.constant2.Subsample_Nearest_p016le_yuv420p$__internal_295_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p016le_yuv420p.nv.constant0.Subsample_Nearest_p016le_yuv420p.text.Subsample_Nearest_p010le_yuv420p_uv.nv.info.Subsample_Nearest_p010le_yuv420p_uv.nv.shared.Subsample_Nearest_p010le_yuv420p_uv.nv.constant2.Subsample_Nearest_p010le_yuv420p_uv$__internal_296_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p010le_yuv420p_uv.nv.constant0.Subsample_Nearest_p010le_yuv420p_uv.text.Subsample_Nearest_p010le_yuv420p.nv.info.Subsample_Nearest_p010le_yuv420p.nv.shared.Subsample_Nearest_p010le_yuv420p.nv.constant2.Subsample_Nearest_p010le_yuv420p$__internal_297_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_p010le_yuv420p.nv.constant0.Subsample_Nearest_p010le_yuv420p.text.Subsample_Nearest_yuv444p_yuv420p_uv.nv.info.Subsample_Nearest_yuv444p_yuv420p_uv.nv.shared.Subsample_Nearest_yuv444p_yuv420p_uv.nv.constant2.Subsample_Nearest_yuv444p_yuv420p_uv$__internal_298_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p_yuv420p_uv.nv.constant0.Subsample_Nearest_yuv444p_yuv420p_uv.text.Subsample_Nearest_yuv444p_yuv420p.nv.info.Subsample_Nearest_yuv444p_yuv420p.nv.shared.Subsample_Nearest_yuv444p_yuv420p.nv.constant2.Subsample_Nearest_yuv444p_yuv420p$__internal_299_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv444p_yuv420p.nv.constant0.Subsample_Nearest_yuv444p_yuv420p.text.Subsample_Nearest_nv12_yuv420p_uv.nv.info.Subsample_Nearest_nv12_yuv420p_uv.nv.shared.Subsample_Nearest_nv12_yuv420p_uv.nv.constant2.Subsample_Nearest_nv12_yuv420p_uv$__internal_300_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_nv12_yuv420p_uv.nv.constant0.Subsample_Nearest_nv12_yuv420p_uv.text.Subsample_Nearest_nv12_yuv420p.nv.info.Subsample_Nearest_nv12_yuv420p.nv.shared.Subsample_Nearest_nv12_yuv420p.nv.constant2.Subsample_Nearest_nv12_yuv420p$__internal_301_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_nv12_yuv420p.nv.constant0.Subsample_Nearest_nv12_yuv420p.text.Subsample_Nearest_yuv420p_yuv420p_uv.nv.info.Subsample_Nearest_yuv420p_yuv420p_uv.nv.shared.Subsample_Nearest_yuv420p_yuv420p_uv.nv.constant2.Subsample_Nearest_yuv420p_yuv420p_uv$__internal_302_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv420p_yuv420p_uv.nv.constant0.Subsample_Nearest_yuv420p_yuv420p_uv.text.Subsample_Nearest_yuv420p_yuv420p.nv.info.Subsample_Nearest_yuv420p_yuv420p.nv.shared.Subsample_Nearest_yuv420p_yuv420p.nv.constant2.Subsample_Nearest_yuv420p_yuv420p$__internal_303_$__cuda_sm3x_div_rn_noftz_f32_slowpath.rel.nv.constant0.Subsample_Nearest_yuv420p_yuv420p.nv.constant0.Subsample_Nearest_yuv420p_yuv420p.nv.callgraph.nv.prototype.nv.rel.actionSubsample_Lanczos_rgb0_bgr0_uvSubsample_Lanczos_rgb0_bgr0Subsample_Lanczos_bgr0_rgb0_uvSubsample_Lanczos_bgr0_rgb0Subsample_Lanczos_rgb0_rgb0_uvSubsample_Lanczos_rgb0_rgb0Subsample_Lanczos_bgr0_bgr0_uvSubsample_Lanczos_bgr0_bgr0Subsample_Lanczos_yuv444p16le_yuv444p16le_uvSubsample_Lanczos_yuv444p16le_yuv444p16leSubsample_Lanczos_p016le_yuv444p16le_uvSubsample_Lanczos_p016le_yuv444p16leSubsample_Lanczos_p010le_yuv444p16le_uvSubsample_Lanczos_p010le_yuv444p16leSubsample_Lanczos_yuv444p_yuv444p16le_uvSubsample_Lanczos_yuv444p_yuv444p16leSubsample_Lanczos_nv12_yuv444p16le_uvSubsample_Lanczos_nv12_yuv444p16leSubsample_Lanczos_yuv420p_yuv444p16le_uvSubsample_Lanczos_yuv420p_yuv444p16leSubsample_Lanczos_yuv444p16le_p016le_uvSubsample_Lanczos_yuv444p16le_p016leSubsample_Lanczos_p016le_p016le_uvSubsample_Lanczos_p016le_p016leSubsample_Lanczos_p010le_p016le_uvSubsample_Lanczos_p010le_p016leSubsample_Lanczos_yuv444p_p016le_uvSubsample_Lanczos_yuv444p_p016leSubsample_Lanczos_nv12_p016le_uvSubsample_Lanczos_nv12_p016leSubsample_Lanczos_yuv420p_p016le_uvSubsample_Lanczos_yuv420p_p016leSubsample_Lanczos_yuv444p16le_p010le_uvSubsample_Lanczos_yuv444p16le_p010leSubsample_Lanczos_p016le_p010le_uvSubsample_Lanczos_p016le_p010leSubsample_Lanczos_p010le_p010le_uvSubsample_Lanczos_p010le_p010leSubsample_Lanczos_yuv444p_p010le_uvSubsample_Lanczos_yuv444p_p010leSubsample_Lanczos_nv12_p010le_uvSubsample_Lanczos_nv12_p010leSubsample_Lanczos_yuv420p_p010le_uvSubsample_Lanczos_yuv420p_p010leSubsample_Lanczos_yuv444p16le_yuv444p_uvSubsample_Lanczos_yuv444p16le_yuv444pSubsample_Lanczos_p016le_yuv444p_uvSubsample_Lanczos_p016le_yuv444pSubsample_Lanczos_p010le_yuv444p_uvSubsample_Lanczos_p010le_yuv444pSubsample_Lanczos_yuv444p_yuv444p_uvSubsample_Lanczos_yuv444p_yuv444pSubsample_Lanczos_nv12_yuv444p_uvSubsample_Lanczos_nv12_yuv444pSubsample_Lanczos_yuv420p_yuv444p_uvSubsample_Lanczos_yuv420p_yuv444pSubsample_Lanczos_yuv444p16le_nv12_uvSubsample_Lanczos_yuv444p16le_nv12Subsample_Lanczos_p016le_nv12_uvSubsample_Lanczos_p016le_nv12Subsample_Lanczos_p010le_nv12_uvSubsample_Lanczos_p010le_nv12Subsample_Lanczos_yuv444p_nv12_uvSubsample_Lanczos_yuv444p_nv12Subsample_Lanczos_nv12_nv12_uvSubsample_Lanczos_nv12_nv12Subsample_Lanczos_yuv420p_nv12_uvSubsample_Lanczos_yuv420p_nv12Subsample_Lanczos_yuv444p16le_yuv420p_uvSubsample_Lanczos_yuv444p16le_yuv420pSubsample_Lanczos_p016le_yuv420p_uvSubsample_Lanczos_p016le_yuv420pSubsample_Lanczos_p010le_yuv420p_uvSubsample_Lanczos_p010le_yuv420pSubsample_Lanczos_yuv444p_yuv420p_uvSubsample_Lanczos_yuv444p_yuv420pSubsample_Lanczos_nv12_yuv420p_uvSubsample_Lanczos_nv12_yuv420pSubsample_Lanczos_yuv420p_yuv420p_uvSubsample_Lanczos_yuv420p_yuv420pSubsample_Bicubic_rgb0_bgr0_uvSubsample_Bicubic_rgb0_bgr0Subsample_Bicubic_bgr0_rgb0_uvSubsample_Bicubic_bgr0_rgb0Subsample_Bicubic_rgb0_rgb0_uvSubsample_Bicubic_rgb0_rgb0Subsample_Bicubic_bgr0_bgr0_uvSubsample_Bicubic_bgr0_bgr0Subsample_Bicubic_yuv444p16le_yuv444p16le_uvSubsample_Bicubic_yuv444p16le_yuv444p16leSubsample_Bicubic_p016le_yuv444p16le_uvSubsample_Bicubic_p016le_yuv444p16leSubsample_Bicubic_p010le_yuv444p16le_uvSubsample_Bicubic_p010le_yuv444p16leSubsample_Bicubic_yuv444p_yuv444p16le_uvSubsample_Bicubic_yuv444p_yuv444p16leSubsample_Bicubic_nv12_yuv444p16le_uvSubsample_Bicubic_nv12_yuv444p16leSubsample_Bicubic_yuv420p_yuv444p16le_uvSubsample_Bicubic_yuv420p_yuv444p16leSubsample_Bicubic_yuv444p16le_p016le_uvSubsample_Bicubic_yuv444p16le_p016leSubsample_Bicubic_p016le_p016le_uvSubsample_Bicubic_p016le_p016leSubsample_Bicubic_p010le_p016le_uvSubsample_Bicubic_p010le_p016leSubsample_Bicubic_yuv444p_p016le_uvSubsample_Bicubic_yuv444p_p016leSubsample_Bicubic_nv12_p016le_uvSubsample_Bicubic_nv12_p016leSubsample_Bicubic_yuv420p_p016le_uvSubsample_Bicubic_yuv420p_p016leSubsample_Bicubic_yuv444p16le_p010le_uvSubsample_Bicubic_yuv444p16le_p010leSubsample_Bicubic_p016le_p010le_uvSubsample_Bicubic_p016le_p010leSubsample_Bicubic_p010le_p010le_uvSubsample_Bicubic_p010le_p010leSubsample_Bicubic_yuv444p_p010le_uvSubsample_Bicubic_yuv444p_p010leSubsample_Bicubic_nv12_p010le_uvSubsample_Bicubic_nv12_p010leSubsample_Bicubic_yuv420p_p010le_uvSubsample_Bicubic_yuv420p_p010leSubsample_Bicubic_yuv444p16le_yuv444p_uvSubsample_Bicubic_yuv444p16le_yuv444pSubsample_Bicubic_p016le_yuv444p_uvSubsample_Bicubic_p016le_yuv444pSubsample_Bicubic_p010le_yuv444p_uvSubsample_Bicubic_p010le_yuv444pSubsample_Bicubic_yuv444p_yuv444p_uvSubsample_Bicubic_yuv444p_yuv444pSubsample_Bicubic_nv12_yuv444p_uvSubsample_Bicubic_nv12_yuv444pSubsample_Bicubic_yuv420p_yuv444p_uvSubsample_Bicubic_yuv420p_yuv444pSubsample_Bicubic_yuv444p16le_nv12_uvSubsample_Bicubic_yuv444p16le_nv12Subsample_Bicubic_p016le_nv12_uvSubsample_Bicubic_p016le_nv12Subsample_Bicubic_p010le_nv12_uvSubsample_Bicubic_p010le_nv12Subsample_Bicubic_yuv444p_nv12_uvSubsample_Bicubic_yuv444p_nv12Subsample_Bicubic_nv12_nv12_uvSubsample_Bicubic_nv12_nv12Subsample_Bicubic_yuv420p_nv12_uvSubsample_Bicubic_yuv420p_nv12Subsample_Bicubic_yuv444p16le_yuv420p_uvSubsample_Bicubic_yuv444p16le_yuv420pSubsample_Bicubic_p016le_yuv420p_uvSubsample_Bicubic_p016le_yuv420pSubsample_Bicubic_p010le_yuv420p_uvSubsample_Bicubic_p010le_yuv420pSubsample_Bicubic_yuv444p_yuv420p_uvSubsample_Bicubic_yuv444p_yuv420pSubsample_Bicubic_nv12_yuv420p_uvSubsample_Bicubic_nv12_yuv420pSubsample_Bicubic_yuv420p_yuv420p_uvSubsample_Bicubic_yuv420p_yuv420pSubsample_Bilinear_rgb0_bgr0_uvSubsample_Bilinear_rgb0_bgr0Subsample_Bilinear_bgr0_rgb0_uvSubsample_Bilinear_bgr0_rgb0Subsample_Bilinear_rgb0_rgb0_uvSubsample_Bilinear_rgb0_rgb0Subsample_Bilinear_bgr0_bgr0_uvSubsample_Bilinear_bgr0_bgr0Subsample_Bilinear_yuv444p16le_yuv444p16le_uvSubsample_Bilinear_yuv444p16le_yuv444p16leSubsample_Bilinear_p016le_yuv444p16le_uvSubsample_Bilinear_p016le_yuv444p16leSubsample_Bilinear_p010le_yuv444p16le_uvSubsample_Bilinear_p010le_yuv444p16leSubsample_Bilinear_yuv444p_yuv444p16le_uvSubsample_Bilinear_yuv444p_yuv444p16leSubsample_Bilinear_nv12_yuv444p16le_uvSubsample_Bilinear_nv12_yuv444p16leSubsample_Bilinear_yuv420p_yuv444p16le_uvSubsample_Bilinear_yuv420p_yuv444p16leSubsample_Bilinear_yuv444p16le_p016le_uvSubsample_Bilinear_yuv444p16le_p016leSubsample_Bilinear_p016le_p016le_uvSubsample_Bilinear_p016le_p016leSubsample_Bilinear_p010le_p016le_uvSubsample_Bilinear_p010le_p016leSubsample_Bilinear_yuv444p_p016le_uvSubsample_Bilinear_yuv444p_p016leSubsample_Bilinear_nv12_p016le_uvSubsample_Bilinear_nv12_p016leSubsample_Bilinear_yuv420p_p016le_uvSubsample_Bilinear_yuv420p_p016leSubsample_Bilinear_yuv444p16le_p010le_uvSubsample_Bilinear_yuv444p16le_p010leSubsample_Bilinear_p016le_p010le_uvSubsample_Bilinear_p016le_p010leSubsample_Bilinear_p010le_p010le_uvSubsample_Bilinear_p010le_p010leSubsample_Bilinear_yuv444p_p010le_uvSubsample_Bilinear_yuv444p_p010leSubsample_Bilinear_nv12_p010le_uvSubsample_Bilinear_nv12_p010leSubsample_Bilinear_yuv420p_p010le_uvSubsample_Bilinear_yuv420p_p010leSubsample_Bilinear_yuv444p16le_yuv444p_uvSubsample_Bilinear_yuv444p16le_yuv444pSubsample_Bilinear_p016le_yuv444p_uvSubsample_Bilinear_p016le_yuv444pSubsample_Bilinear_p010le_yuv444p_uvSubsample_Bilinear_p010le_yuv444pSubsample_Bilinear_yuv444p_yuv444p_uvSubsample_Bilinear_yuv444p_yuv444pSubsample_Bilinear_nv12_yuv444p_uvSubsample_Bilinear_nv12_yuv444pSubsample_Bilinear_yuv420p_yuv444p_uvSubsample_Bilinear_yuv420p_yuv444pSubsample_Bilinear_yuv444p16le_nv12_uvSubsample_Bilinear_yuv444p16le_nv12Subsample_Bilinear_p016le_nv12_uvSubsample_Bilinear_p016le_nv12Subsample_Bilinear_p010le_nv12_uvSubsample_Bilinear_p010le_nv12Subsample_Bilinear_yuv444p_nv12_uvSubsample_Bilinear_yuv444p_nv12Subsample_Bilinear_nv12_nv12_uvSubsample_Bilinear_nv12_nv12Subsample_Bilinear_yuv420p_nv12_uvSubsample_Bilinear_yuv420p_nv12Subsample_Bilinear_yuv444p16le_yuv420p_uvSubsample_Bilinear_yuv444p16le_yuv420pSubsample_Bilinear_p016le_yuv420p_uvSubsample_Bilinear_p016le_yuv420pSubsample_Bilinear_p010le_yuv420p_uvSubsample_Bilinear_p010le_yuv420pSubsample_Bilinear_yuv444p_yuv420p_uvSubsample_Bilinear_yuv444p_yuv420pSubsample_Bilinear_nv12_yuv420p_uvSubsample_Bilinear_nv12_yuv420pSubsample_Bilinear_yuv420p_yuv420p_uvSubsample_Bilinear_yuv420p_yuv420pSubsample_Nearest_rgb0_bgr0_uvSubsample_Nearest_rgb0_bgr0Subsample_Nearest_bgr0_rgb0_uvSubsample_Nearest_bgr0_rgb0Subsample_Nearest_rgb0_rgb0_uvSubsample_Nearest_rgb0_rgb0Subsample_Nearest_bgr0_bgr0_uvSubsample_Nearest_bgr0_bgr0Subsample_Nearest_yuv444p16le_yuv444p16le_uvSubsample_Nearest_yuv444p16le_yuv444p16leSubsample_Nearest_p016le_yuv444p16le_uvSubsample_Nearest_p016le_yuv444p16leSubsample_Nearest_p010le_yuv444p16le_uvSubsample_Nearest_p010le_yuv444p16leSubsample_Nearest_yuv444p_yuv444p16le_uvSubsample_Nearest_yuv444p_yuv444p16leSubsample_Nearest_nv12_yuv444p16le_uvSubsample_Nearest_nv12_yuv444p16leSubsample_Nearest_yuv420p_yuv444p16le_uvSubsample_Nearest_yuv420p_yuv444p16leSubsample_Nearest_yuv444p16le_p016le_uvSubsample_Nearest_yuv444p16le_p016leSubsample_Nearest_p016le_p016le_uvSubsample_Nearest_p016le_p016leSubsample_Nearest_p010le_p016le_uvSubsample_Nearest_p010le_p016leSubsample_Nearest_yuv444p_p016le_uvSubsample_Nearest_yuv444p_p016leSubsample_Nearest_nv12_p016le_uvSubsample_Nearest_nv12_p016leSubsample_Nearest_yuv420p_p016le_uvSubsample_Nearest_yuv420p_p016leSubsample_Nearest_yuv444p16le_p010le_uvSubsample_Nearest_yuv444p16le_p010leSubsample_Nearest_p016le_p010le_uvSubsample_Nearest_p016le_p010leSubsample_Nearest_p010le_p010le_uvSubsample_Nearest_p010le_p010leSubsample_Nearest_yuv444p_p010le_uvSubsample_Nearest_yuv444p_p010leSubsample_Nearest_nv12_p010le_uvSubsample_Nearest_nv12_p010leSubsample_Nearest_yuv420p_p010le_uvSubsample_Nearest_yuv420p_p010leSubsample_Nearest_yuv444p16le_yuv444p_uvSubsample_Nearest_yuv444p16le_yuv444pSubsample_Nearest_p016le_yuv444p_uvSubsample_Nearest_p016le_yuv444pSubsample_Nearest_p010le_yuv444p_uvSubsample_Nearest_p010le_yuv444pSubsample_Nearest_yuv444p_yuv444p_uvSubsample_Nearest_yuv444p_yuv444pSubsample_Nearest_nv12_yuv444p_uvSubsample_Nearest_nv12_yuv444pSubsample_Nearest_yuv420p_yuv444p_uvSubsample_Nearest_yuv420p_yuv444pSubsample_Nearest_yuv444p16le_nv12_uvSubsample_Nearest_yuv444p16le_nv12Subsample_Nearest_p016le_nv12_uvSubsample_Nearest_p016le_nv12Subsample_Nearest_p010le_nv12_uvSubsample_Nearest_p010le_nv12Subsample_Nearest_yuv444p_nv12_uvSubsample_Nearest_yuv444p_nv12Subsample_Nearest_nv12_nv12_uvSubsample_Nearest_nv12_nv12Subsample_Nearest_yuv420p_nv12_uvSubsample_Nearest_yuv420p_nv12Subsample_Nearest_yuv444p16le_yuv420p_uvSubsample_Nearest_yuv444p16le_yuv420pSubsample_Nearest_p016le_yuv420p_uvSubsample_Nearest_p016le_yuv420pSubsample_Nearest_p010le_yuv420p_uvSubsample_Nearest_p010le_yuv420pSubsample_Nearest_yuv444p_yuv420p_uvSubsample_Nearest_yuv444p_yuv420pSubsample_Nearest_nv12_yuv420p_uvSubsample_Nearest_nv12_yuv420pSubsample_Nearest_yuv420p_yuv420p_uvSubsample_Nearest_yuv420p_yuv420p2��G�uH��@"�X(eI��7Jd��K��@_"�X(�L���M��/NY��@�"�0OI��P��Q���@	"�0|	R�	�G
S�
"�P!p�
T1��U"�P0rV��<
Wr
"��0�
X��Y�"�P0?Zr�[:"��H�\��h]�"�``^:��_"�`!`w`��:an"�Xh�b��c�"��`>dr��e&"�Xh�f��Wg�"�`!`�h6��i�"�Xhdj��*k`"�P!p�l��m�"�P0/nb��o"��`�p��,qZ"�P0�r��s s� "��@!t@!��!u�!"�``P"v~"�#w6#"�X!h�#x�#�Q$y�$"�Xh�$z%��%{�%"��H-&|\&��&}�&"�Xhb'~�'�(F("�X!h�(��(�a)��)"�Xh�)�(*��*��*"�X!h`+��+�,�R,"�Xh�,��,�u-��-"��H.�B.��.��."�XhR/��/�0�40"��`�0��0�J1�x1"�P0�1�2��2��2"�`!`23�d3��3�4"�Xhy4��4�%5�T5"��@�5��5�`6��6"�Xh�6�7��7��7"�`!`B8�t8��8� 9"�Xh�9��9�M:��:"�� h�:�,;��;��;"��0Z<��<�=�F="�P0�=��=�a>��>"��0�>�(?��?��?"�P0L@�~@��@�*A"��0�A��A�KB�~B"�� @�B�C��C��C"��@8D�hD��D�E"�8H�E��E�)F�VF"��@�F��F�sG��G"�� @H�FH��H��H"��@`I��I�J�PJ"�X!h�J��J�uK��K"��0L�BL��L��L"��HWM��M��M�&N"��0�N��N�5O�dO"��H�O��O�pP��P"��0Q�.Q��Q��Q"�P!pHR�xR��R�S"��@�S��S�'T�TT"��`�T��T�VU��U"��@�U�V��V��V"�P!p(W�XW��W��W"��@cX��X�%Y�\Y"�� h�Y�Z��Z��Z"��02[�f[��[�\"�P0�\��\�9]�h]"��0�]�^��^��^"�P0$_�V_�_�`"�0k`��`#a�Va"� @�a��avb��b"�@c�@c�c��c"8HZd��de�.e"�@�e��eKf�~f"� @�f�g�g��g"�@8h�hhi�=i�i��i"�	9j�cj	k�8k
�k��k"
�	4l�^lm�3m�m��m"�	(/n�Yn
o�.o�o��o"�	(*p�Tp�p�0q"��q��qxr��r"�"s�Zs�s�"t"� �t��tQu��u"��u�$v�v��v"0\w��wx�Nx"�(�x��x�y��y"�0+z�bz�z�"{"�0�{��{P|��|"��|&}�}�}"�0E~v~B"�0��v���"�0�L�ށ�"�����C�	v�"��
���ʄ"�5�f��
�"�v���'�X�"�Ç� n���" �(�2�!���"!� V���"�4�""�0��̋#I�x�"#���$����"$�0�D�%ʎ��"%� i���&�G�"&�0���'r���"'� � O�(ؒ!�"(�0y�"��)/�#`�")�̔$��*w�%��"*�0�&<�+��'�"+�\�(��,�)5�",���*̘-R�+��"-� �,#�.��-Ϛ".�09�.h�/�/�"/�0~�0��0!�1M�"0�0��2�1f�3��"1� �47�2��5�"2�0M�6|�3�7H�"3@��8�4}�9��"4@ �:T�5ڣ;�"5@y�<��6(�=W�"6@��>�7v�?��"7@�@G�8ħA�"8@]�B��9�CH�"90��D�:i�E��":0�F4�;��G�";0O�H�<��I#�"<0��J��=A�Kt�"=0�L�>��Mů">00�N`�?�O �"?� ��Pñ@F�Qw�"@@�R�A��S��"A�*�TY�BʹU��"B@`�V��C	�W8�"C���XѶDE�Yq�"D@طZ�E��[��"E��\O�Fƹ]�"F0[�^��G��_,�"G���`��H/�aY�"H0��b�Ih�c��"I��d3�J��e׾"J0?�fl�K�g8�"K@��h�Lm�i��"L@�jD�M��k��"M@i�l��N�mG�"N@��n��Of�o��"O@�p7�P��q��"P@M�r|�Q�s8�"Q0��t��RY�u��"R0��v$�S��w��"S0?�xo�T��y�"T0{�z��U1�{d�"U0��|�V��}��"V0 �~P�W��*�X�����"X�,��W�Y��1�Z�����"Z�3��^�[
��8�\�����"\�:��e�]��?�^�����"^�A��l�_��L�"_� �����`�����"`�G����a��L�"ap�����b�����"b�$��X�c���$�"c������dY����"d0���0�e����"e�s����f:��o�"f�����g�����"g�H��}�h��5�"h������il����"i���O�j����"j������kM����"kx���-�l�����"l�\����m��H�"m@�����nd����"n����,�o�����"o�Q����p��/�"p0�����qQ����"q�0���%�r�����"r�@��p�s��� �"sX(�����t2��_�"t����u}����"u�0��Q�v���"v�l����w1��h�"w�0���x����"x�@��t�y���,�"yX(����zH��w�"z����{����"{@5��g�|��"|�}��}5�h"}� ��	~���"~$�T��"po����C"�����a��"�� �5����"�P���	�P	"�0�	��	��
��
"�p/�d��� "��(����A
�q
"�p�
�����"��(6�i���"�p����@�t"�0������"�p7�h���"��(����3�a"�p�������"�0'�[���"�p{���;�p"�x������"�p:�l���"�@����.�["�p����p��"�@�;����"�pG �t �� �(!"�x�!��!�?"�m""�p�"�#�~#��#"�@$�C$��$��$"�pE%�p%��%�$&"�x�&�&�;'i'"�p�'(��(�("�0C){)�
*?*"�p�*�*�m+�+"��(,A,��,	�,"�p\-
�-�.H."��(�.�.�i/
�/"�p040��0�0"�0c1�1�2K2"�p�2�2�k3�3"��(494��4�4"�pJ5x5�686"�0�6�6�^7�7"�p�7,8��89�o9�9"�0�9(:��:�:�k;�;"�0�; $<��<!�<�g="�="�0�=# >��>$�>�c?%�?"�0�?&@��@'�@"�P0nA(�A�AB)yB"�0�B*$C��C+�C"�@]D,�D�E-OE"�0�E.�E��F/�F"��)G0_G��G1H"�0�H2�H�QI3�I"�x�I41J��J5�J"�  `K6�K� L7TL"�p�L8�L�zM9�M"�  N:HN��N;O"�x�O<�O�IP=}P"�  �P> Q��Q?�Q"�  YR@�R�SAKS"�0�SB�S�oTC�T"�(UD=U��UE�U"�0NVF|V��VG0W"�X(�WH�W�GXIuX"�0�XJY��YK�Y"�P01ZLcZ��ZM["�  y[N�[�%\OT\"�@�\P�\�a]Q�]"�  �]R ^��^S�^"�P0E_Tw_��_U#`"�  �`V�`�NaW�a"�8�aX+b��bY�b"�(UcZ�c�d[<d"�0�d\�d�Se]�e"�(�e^f��f_�f"�(8g`ig��gah"�0zhb�h�.ic`i"�` �id�i�|je�j"�0kfDk��kg�k"�X(Zlh�l��li)m"�0�mj�m�Bnktn"�` �nlo��om�o"�0)pnXp��po$q"��0�qp�q�Yrq�r"��(�rr0s��ss�s"��Utt�t�uu3u"��(�uv�u�Rvw�v"���vx#w��wy�w"��(9xzhx��x{$y"���y|�y�Ez}uz"��0�z~{��{�{"��+|�[|��|��|"��0g}��}�~�P~"���~��~�q��"��0��<��Ȁ���"�8k�����"��S�"��(�����m����"�0��5�����Մ"��(<��h�����"�0~�����!��M�"��(�����`����"�  ���+�����ω"��07��d��ۊ��"�(p�������5�"��0���Č�D��t�"�  ߍ��������"��0��H��ݏ��"��0������I��}�"��(�� �����ؒ"��E��w����#�"��(������B��t�"�����������"��()��X�����"��������5��e�"��0Й��������"����K��›��"��0W�����
��@�"�������a����"��0���,�EH�FW��@v������@����͟�@��@��@'��@C���%p������ ����@���4���%]������@����̡��%�������%C���h���������΢�����%���3��T���r���%��������%ߣ����'���G��j�������%����Ϥ�������%2���S��@%|������ƥ������,��%Q��s���������%٦�����%!��D��e��������§��%����"��>���%`����@%���Ψ�������7�X�%}�������%�'�@F��
b�	@��
�
��@���
ت
@���
��@��
j����
��@߫�
-�S�y���
Ŭ����
8��[��
{��� ��!��"�#�$�$B�%�f�&��'���(Ԯ)���*�+�:�,�
Z�-�~�.��/��0ޯ1��2#�3@L�4@
r�5@��6@
��7@۰8@
��9@!�:@
C�;@e�<@
��=@��>@
˱?��@@
�A�5�B@
S�C�t�D@
��E���F@
ӲG��H@
�I�0�J@
O�K@x�L@
��M@³N@
�O@�P@
(�Q@M�R@
o�S@��T@
��U@մV@
��W@�X
4�Y@T�Z
q�[@��\
��]@ε^
�_�	�`	D�a�	m�b	��c�	��d@	�e�	�f	3�g�	Z�h	~�i�	��j	Ϸk�	��l	�m@	B�n	c�o�	��p@	��q�	͸r	�s�	�t	0�u�	U�v	w�w�	��x	ƹy�	�z	�{@	/�|	P�}�	u�~	���	���	غ��	���	��@	I���p��	�������	ܻ�����@	$���G��	j������@	����Ӽ��	������@	@���_��@	��������	ý����@	��� ���	C���c��@	�������	پ�����	 ���B��@	h������	����ο�@	�����@6��@R��@q��@���@���@��@��@���0��@Z��@���@������@�����@C���i��@�������@���@��@(��@K��@k������@�������@���@��@2���V��@w��@���@���@���@��@*��@J���n��@�������@�������@��<���b����������������������3���U���t����������@������@%���C��@d������@�������@�������@ ���?��h����������������������=���_���������������/&�&/%�%/$�$/#�#/"�"/!�!/ � /�/�/�/�/�/�/�/�/�/�/�/�/�/�/�/�/�/�/

/{/w/
s
/	o	/k/g/c/_/[/W/S/O/K/�G�/�C�/�?�/�;�/�7�/�3�/�/�/�+�/�'�/�#�/��/��/��/��/��/��/��/��/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/��/���/��/���/��/���/��/���/���/���/���/���/���/���/���/���/���/���/��/�{�/�w�/�s�/�o�/�k�/�g�/�c�/�_�/�[�/�W�/�S�/�O�/�K�/�G�/�C�/�?�/�;�/�7�/�3�/�/�/�+�/�'�/�#�/��/��/��/��/��/��/��/��/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/���/��/���/��/��/��/�y�/��/� s�/� o�/� k�/� g�/� c�/� _�/� [�/ W/~ S~/} O}/| K|/{ G{/z Cz/y ?y/x ;x/w 7w/v 3v/u /u/t +t/s 's/r #r/q q/p p/o o/n n/m m/l l/k k/j j/i �i/h �h/g �g/f �f/e �e/d �d/c �c/b �b/a �a/` �`/_ �_/^ �^/] �]/\ �\/[ �[/Z �Z/Y �Y/X �X/W �W/V �V/U �U/T �T/S �S/R �R/Q �Q/P �P/O(�O/N �N/M �M/L �L/K �K/J �J/I I/H {H/G wG/F sF/E oE/D kD/C(gC/B cB/A _A/@ [@/? W?/>0S>/==/<0M</;;/:5G:/99/85A8/77/6 ;6/5775/4 34/3(/3/2 +2/17'1/0 #0//(//. ./-(-/, ,/+7+/* */)H)/( (/'(�'/& �&/%H�%/$ �$/#(�#/" �"/!(�!/  � /H�/ �/7�/ �/(�/ �/7�/ �/(�/ �/(�/ �/7�/ �/H�/ �/(�/ �/
H�
/ �/(�/
 �
/	(�	/ �/H/ {/Hw/ s/(o/ k/Hg/ c/�(_�/� [�/�(W�/� S�/�HO�/� K�/�8G�/� C�/�(?�/� ;�/�87�/� 3�/�(/�/� +�/�('�/� #�/�8�/�6�/��/�6�/��/�6
�/��/�6�/���������������������������	

 !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~��������������������������������������������������������������������������������������������������������������������������������	

 !"#$%&70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�4���h������������������0 � p`		X	P	X	X	`	�	�
H
�
x
�
�
�
�
�h�������p�����(
`
X
`
�
�
�
�
P����  ����0php��X��������������pp0pPphp�70*
	@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�4���h������������������0 � p`		X	P	X	X	`	�	�
H
�
x
�
�
�
�
�h�������p�����(
`
X
`
�
�
�
�
P����  ����0php��X��������������pp0pPphp�70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�p`XPXX`��P�x�����p�������������(	�					P	@
�	8
0
8
8
@
p
`XPXX`x`(XPXX`�


p
�
�
�
@8@����P����  x���@8@p���p������ � �   �70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�p`XPXX`��P�x�����p�������������(	�					P	@
�	8
0
8
8
@
p
`XPXX`x`(XPXX`�


p
�
�
�
@8@����P����  x���@8@p���p������ � �   �70*
 @XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H!4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 ����0php�
�
X
�
�
�
PHP����8xpx��P � H�8��(������������������������x�������X�����PHP����8xpx�����XPX����Hxpx�#%�#%�$%�$%�$%%x%(%x%8%x%X%x%p%x%�70*
$@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����������pp0pPphp�70*
(@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x�8xXx8xXxpxx����������70*
,@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����������pp0pPphp�70*
0@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x�h���h����������70*
4@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�X4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����0�H�h�x��70*
8@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�X!4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 ����0php�
�
X
�
�
�
PHP����8xpx��X0�  (0P � H�0�����������������������������h�����XPX����H�x������(`X`����P�x��#%�#%�$%�$%%%%�%8%�%P%�%p%�%�%�%�70*
<@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����x(x8xXxpx�70*
@@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x�P�p�P�p���������70*
D@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����x(x8xXxpx�70*
H@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�X!4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 ����0php�
�
X
�
�
�
PHP����8xpx��X0�  (0P � H�0�����������������������������h�����XPX����H�x������(`X`����P�x��#%�#%�$%�$%%%%�%8%�%P%�%p%�%�%�%�70*
L@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����x(x8xXxpx�70*
P@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H!4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 0
�����
(
�
�
�
�
�
�
�
������������������������p�������X������x8phppx�0(0����8xpx�0(0����Hxpx(XPX�0(0�808����(`X`����p���@8@����0`X`�#%�#%�$%�$%�$%%x%(%x%8%x%X%x%p%x%�70*
T@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����������pp0pPphp�70*
X@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x�P0PP0PHPP�p��������70*
\@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����������pp0pPphp�70*
`@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x�0pPp0pPphpp����������70*
d@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�X4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����0�H�h�x��70*
h@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P!4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 0
�����
(
�
�
�
�
�
�
�
������������������������p�������X������x8phppx�0(0����8xpx�0(0����Hxpx(XPX�0(0@8@����0php��x���PHP����8php�#%�#%�$%�$%%%%�%0%�%H%�%h%�%x%�%�70*
l@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����x(x8xXxpx�70*
p@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x�(`H`(`H`X`h����������70*
t@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����x(x8xXxpx�70*
x@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P!4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 0
�����
(
�
�
�
�
�
�
�
������������������������p�������X������x8phppx�0(0����8xpx�0(0����Hxpx(XPX�0(0@8@����0php��x���PHP����8php�#%�#%�$%�$%%%%�%0%�%H%�%h%�%x%�%�70*
|@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����x(x8xXxpx�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P!4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 0
�����
(
�
�
�
�
�
�
�
������������������������p�������X������x8phppx�0(0����8xpx�0(0����Hxpx(XPX�0(0@8@����0php��x���PHP����8php�#%�#%�$%�$%%%%�%0%�%H%�%h%�%x%�%�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����x(x8xXxpx�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x�(`H`(`H`X`h����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����x(x8xXxpx�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x�P0PP0PHPP�p��������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����������pp0pPphp�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�X!4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 0
�����
(
�
�
�
�
�
�
�
������������������������p�������X������x8phppx�0(0����8xpx�0(0����Hxpx(XPX�0(0PHP����8xpx�����XPX����Hxpx�#%�#%�$%�$%%%%�%8%�%P%�%p%�%�%�%�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����x(x8xXxpx�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x�0pPp0pPphpp����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����x(x8xXxpx�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�X!4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 0
�����
(
�
�
�
�
�
�
�
������������������������p�������X������x8phppx�0(0����8xpx�0(0����Hxpx(XPX�0(0PHP����8xpx�����XPX����Hxpx�#%�#%�$%�$%%%%�%8%�%P%�%p%�%�%�%�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(0�����x(x8xXxpx�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�� 4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 ����0php�
�
X
�
�
�
PHP����8xpx����h�������X�������P�x����x8phppx�`(XPXX`xP@8@HPh8�0(008H�����(`X`����P���@8@����0php����P#�$p#�$P$�$p$�$�$�$�$%�$%�$%�$%�$%�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(08xXx8xXxpxx����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x������������pp0pPphp�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(08xXx8xXxpxx����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x������������pp0pPphp�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(08xXx8xXxpxx����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�� 4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 ����0php�
�
X
�
�
�
PHP����8xpx����X�������P�x�����Hxpxx��p0`X`hp�XPHPPXp@8088@X0�  (08������XPX����H�x��808����(`X`����8#x$X#x$8$x$X$x$p$x$x$�$�$�$�$�$�$�$�$�$�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(00pPp0pPphpp����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�04�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x������������XXX8XPX�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(00pPp0pPphpp����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�� 4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 ����0php�
�
X
�
�
�
PHP����8xpx����X�������P�x�����Hxpxx��p0`X`hp�XPHPPXp@8088@X0�  (08������XPX����H�x��808����(`X`����8#x$X#x$8$x$X$x$p$x$x$�$�$�$�$�$�$�$�$�$�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(00pPp0pPphpp����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P!4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 0
�����
(
�
�
�
�
�
�
�
������������������������p�������X������x8phppx�0(0����8xpx�0(0����Hxpx(XPX�0(0@8@����0php��x���PHP����8php�#%�#%�$%�$%%%%�%0%�%H%�%h%�%x%�%�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(08xXx8xXxpxx����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x�(`H`(`H`X`h����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(08xXx8xXxpxx����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x�(`H`(`H`X`h����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(08xXx8xXxpxx����������70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H!4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 0
�����
(
�
�
�
�
�
�
�
������������������������p�������X������x8phppx�0(0����8xpx�0(0����Hxpx(XPX�0(0�808����(`X`����p���@8@����0`X`�#%�#%�$%�$%�$%%x%(%x%8%x%X%x%p%x%�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(00pPp0pPphpp����������70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x�P0PP0PHPP�p��������70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(00pPp0pPphpp����������70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H!4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 0
�����
(
�
�
�
�
�
�
�
������������������������p�������X������x8phppx�0(0����8xpx�0(0����Hxpx(XPX�0(0�808����(`X`����p���@8@����0`X`�#%�#%�$%�$%�$%%x%(%x%8%x%X%x%p%x%�70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(00pPp0pPphpp����������70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�� 4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 ����0php�
�
X
�
�
�
PHP����8xpx����h�������X�������P�x����x8phppx�`(XPXX`xP@8@HPh8�0(008H�����(`X`����P���@8@����0php����P#�$p#�$P$�$p$�$�$�$�$%�$%�$%�$%�$%�70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(08xXx8xXxpxx����������70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x������������pp0pPphp�70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(08xXx8xXxpxx����������70*
 @XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H4�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x������������pp0pPphp�70*
$@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(08xXx8xXxpxx����������70*
(@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�� 4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 ����0php�
�
X
�
�
�
PHP����8xpx����X�������P�x�����Hxpxx��p0`X`hp�XPHPPXp@8088@X0�  (08������XPX����H�x��808����(`X`����8#x$X#x$8$x$X$x$p$x$x$�$�$�$�$�$�$�$�$�$�70*
,@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(00pPp0pPphpp����������70*
0@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�04�H0�  (0pP@8@HP�p0`X`hp��Hxpxx���x�������������	�	�	�	�	�	�	�	
�
�
�
�
�
�
�
h����  x���
8
0
8
�
�
�
�
8php����P�x������������XXX8XPX�70*
4@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(00pPp0pPphpp����������70*
8@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�� 4�h8�0(008�p0`X`hp��X�������p������������0	�����	H	
�	




P
 �
 ����0php�
�
X
�
�
�
PHP����8xpx����X�������P�x�����Hxpxx��p0`X`hp�XPHPPXp@8088@X0�  (08������XPX����H�x��808����(`X`����8#x$X#x$8$x$X$x$p$x$x$�$�$�$�$�$�$�$�$�$�70*
<@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��4�H � h@0(08@xX(@8@PX�`0PHPX`��Xxpx����p�������	x	�	�	�	�	�	�	�
p
�
�
�
�
�
PHP����0`X`����h
�
�
�
�
  x���0(00pPp0pPphpp����������70*
>@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
B@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��	70*
D@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
H@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��	70*
J@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
N@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��	70*
P@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
T@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��	70*
X@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
\@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
`@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
d@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
h@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
l@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
p@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
t@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
x@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
|@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�x70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
 @XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
$@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
(@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
,@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
0@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�x70*
4@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
8@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
<@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
@@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�x70*
D@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
H@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
L@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
P@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
T@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
X@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
\@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
`@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
d@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
h@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
l@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
p@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
t@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
v@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
z@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
|@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�x70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�p70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�x70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
 @XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
$@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
(@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
,@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
0@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
4@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
8@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
<@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
@@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
D@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
H@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
L@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
P@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�p70*
T@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
X@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
\@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
`@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
d@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
h@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�p70*
l@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
p@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
t@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
x@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�p70*
|@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�070*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�x70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�p70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�h70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�p70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�870*
@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
 @XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�H70*
$@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
(@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�070*
,@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
0@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
4@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
8@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
<@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
@@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�X70*
D@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
H@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�P70*
L@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
P@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�X70*
T@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
X@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
\@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
`@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
d@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
h@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
l@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
p@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
t@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
x@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
|@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�070*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�(70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(�70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(��70*
�@XX
T�P�L�
H�	D�@�8�!0�!(�! �!�!�!�!�!�(������������������s%6�?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?������?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?��#tI�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?�����?����������L�P�P����P����@� �����L-W����Q���?,g��'��-'�O�"�ğ-'N--70[,7�O�GB��,7N-�mK,,70[����,cK�P����@�`�'��L�)8� ��'@�8'(8,�)8� ����[��[
,7�[� ��,7�['�[
'�[� ���� [	��([,'�[� ��,'�[,70[��0[� ��	g �\,,'�0[��8� ���8g��\-�)8� ��--��\���6�8�@�@�g\-���K���6�`����8-�����6��D ���	*�L= ��_*7�L	G�P��\����	�Q'�Y'��Y���	7�Y��Y�@���D@�L`���\	*�L= ��_*G�L	G�P��\����	�Q7�Y7��Y���	g�Y�Y�@����@�D`���\�� ��*��\*��\������?X8?X8�Q����?��$
g��\
G X\�"�@�
�?X8
���
�?X9����������[���"��?h8����h\@��G�P��\	�\��X�P�Q	�P����w�Y
'h\
��\����
w��Y�Y'�Y� �������\	���\���@�,`���\���������[���� ����Q
@X9�����������?h8�@�D��	Wh\	G�P���W�\�\�P����	�Q�PW�Y� ��
7h\
��\
W��Y���	�Y7�Y�����?���\@`���\�����������[1��
���\���������
����?h8�@�D����h\G�P�����\	�\�P������Q	�P���Y� ��gh\�\���Y���g��Y
w�Y���������\	��\@�`�����
��\����� �����[���W X\��������?h8�@�D����h\G�P�����\	�\�P������Q	�P���Y� ��gh\��\���Y���g��Yw�Y���������\	���\@��`�������\�����"���?X8�������`���?X9��	�������	���[��	?h8�@�D���	�h\G�P���	��\
�\	�P������Q
�P���Y� ��gh\G�\���Y���g��Yw�Y���������\	G��\@��`�������\����� �����[���@X9�����������"��?h8����h\@��G�P��\	�\��X�P��Q	�P�������Ygh\G�\�������Yg��Yw�Y� �������\	G��\���@��`���\���������[�������������"��?h8���h\@��G�P�\	�\��X�P��Q	�P�������Ygh\G�\�������Yg��Yw�Y� �������\	G��\���@��`���\���������[���@���?h8��� ��	'h\	G�P'�\����?�\�P	�Q ��?�Pw�Y
gh\����
��\
w��Y	�Y����g�Y�����\���@��`���\������'X\
��@�P�X\X\G�P�����\�Qg�Y���+��Y��Y+��Y� ������\	��\���@��`�+��\��� ��
�\G�P������Qg�Y*���Y������Y*�Y���������\	��\@��`�����*��\���\ ��?G�P����Q����g�Y7��Y'��Y����'w�Y��7��\����?	��\@�`�'��\��D���\G�P���A����Q7�Y����'��Y&�Y&g�Y� ����'��\	��\���@��`�&��\�������X\
��
�X\����

GX\
G�P��\����
�Q'�Y���Y���
7�Y��Y���������\	���\@�y`�������\����\ ��?
G�P���
�Q����	'�Y/	���Y
��Y����/	��Y�����\����?	���\@p`�/��\��D����\
G�P���A���
�Q	'�Y����	���Y
'��Y	�Y� �������\	���\���@�f`���\��� ����\
G�P����
�Qg�YG��Y���
g�Y�Y������G��\	���\@�]`� ����.��L%W��\�"�@�$�?X9$���
$�?X8� ��W��\$@X8	W��\���g��\���\��ޱ"���������)�?X8�`��(g��\'4��wh\�$���
wh\wh\
wh\�$���g��Yg��Yg�Y� ���
g��Y���Y���Y�$����Y
���Y�?X9�$�����Y��Y��Y� ���
���Y%w��\w��\�$���4�h\�h\0�h\� �@�/
�h\$�������"��
���G��\���\�@�@�	w��\�����޿$�� (���(���\4�������1wh\
wh\
g��Y� ��3g�Ywh\wh\�$��wh\5wh\
���Y� �����Yg�Y@X8�$� �g�Y5 g��Y
���Y� ����Y��Y%���\� �� !g��Yg��\4
7�Y� �@�37�Y$���1��Y� ����Y
���\���\������\���\��ޱ�����2��Y(���������\���\��ޱ$������!wh\wh\�d��017�Y/27��Y"g��Y�`��!#g
�Y'4��#wh\�$��wh\wh\wh\�$���#g��Y5���Y 
��Y�$��	g��Y
g�Y��
�Y� ���4�����Y5���Y� ����Y���Yg��Y� �����Y!���Y55'�Y� ��3'��Y���Y��Y�$�����Y!���Y'�Y� ��?���Y��Y��Y�D����Y/!'��Y���Y�D����YCh8Ch8��D	g��\��Y	���\�`��Ch8	7��\Ch8Q���_	W��\���6��6�'�����6 �	w�8� ��@��
����w�8����
��h6����� h6�������\@����0����	���0��2@\�7@����'��L�	g�<�[�3@����������0�	����6�����6�����0@�'0AL�!��P����-@�	'0AL�!��P� ��)@���m[
��m[� �����\�������2����			���2	
��@�P�"��
�\
G�P����?	�0Y\���
��Q������\
G�Y
���Y� ��	���Y
W�Y��8����		���Yg\��Y����w�8g\����������l6�@���i6����@��m6@�������m7@�������Y��k[���� �� ��Y�@����Y7H\����[������k[�\��K[����!��Pw(\(8������8�<wG\���g\G\@����'@������\@�@���	7H�'@�����	7H@������W�P@��X\���@� ����@������L�P�P����P����@� �����L-W����Q���?,g��'��-'�O�"�ğ-'N--70[,7�O�GB��,7N-�mK,,70[����,cK�P����@�`�'��L�)8� ��'@�8'(8,�)8� ����[��[
,7�[� ��,7�['�[
'�[� ���� [	��([,'�[� ��,'�[,70[��0[� ��	g �\,,'�0[��8� ���8g��\-�)8� ��--��\���6�8�@�@�g\-���K���6�`����8-�����6��D ���	*�L= ��_*7�L	G�P��\����	�Q'�Y'��Y���	7�Y��Y�@���D@�L`���\	*�L= ��_*G�L	G�P��\����	�Q7�Y7��Y���	g�Y�Y�@����@�D`���\�� ��*��\*��\������?X8?X8�Q����?��$
g��\
G X\�"�@�
�?X8
���
�?X9����������[���"��?h8����h\@��G�P��\	�\��X�P�Q	�P����w�Y
'h\
��\����
w��Y�Y'�Y� �������\	���\���@�,`���\���������[���� ����Q
@X9�����������?h8�@�D��	Wh\	G�P���W�\�\�P����	�Q�PW�Y� ��
7h\
��\
W��Y���	�Y7�Y�����?���\@`���\�����������[1��
���\���������
����?h8�@�D����h\G�P�����\	�\�P������Q	�P���Y� ��gh\�\���Y���g��Y
w�Y���������\	��\@�`�����
��\����� �����[���W X\��������?h8�@�D����h\G�P�����\	�\�P������Q	�P���Y� ��gh\��\���Y���g��Yw�Y���������\	���\@��`�������\�����"���?X8�������`���?X9��	�������	���[��	?h8�@�D���	�h\G�P���	��\
�\	�P������Q
�P���Y� ��gh\G�\���Y���g��Yw�Y���������\	G��\@��`�������\����� �����[���@X9�����������"��?h8����h\@��G�P��\	�\��X�P��Q	�P�������Ygh\G�\�������Yg��Yw�Y� �������\	G��\���@��`���\���������[�������������"��?h8���h\@��G�P�\	�\��X�P��Q	�P�������Ygh\G�\�������Yg��Yw�Y� �������\	G��\���@��`���\���������[���@���?h8��� ��	'h\	G�P'�\����?�\�P	�Q ��?�Pw�Y
gh\����
��\
w��Y	�Y����g�Y�����\���@��`���\������'X\
��@�P�X\X\G�P�����\�Qg�Y���+��Y��Y+��Y� ������\	��\���@��`�+��\��� ��
�\G�P������Qg�Y*���Y������Y*�Y���������\	��\@��`�����*��\���\ ��?G�P����Q����g�Y7��Y'��Y����'w�Y��7��\����?	��\@�`�'��\��D���\G�P���A����Q7�Y����'��Y&�Y&g�Y� ����'��\	��\���@��`�&��\�������X\
��
�X\����

GX\
G�P��\����
�Q'�Y���Y���
7�Y��Y���������\	���\@�y`�������\����\ ��?
G�P���
�Q����	'�Y/	���Y
��Y����/	��Y�����\����?	���\@p`�/��\��D����\
G�P���A���
�Q	'�Y����	���Y
'��Y	�Y� �������\	���\���@�f`���\��� ����\
G�P����
�Qg�YG��Y���
g�Y�Y������G��\	���\@�]`� ����.��L%W��\�"�@�$�?X9$���
$�?X8� ��W��\$@X8	W��\���g��\���\��ޱ"���������)�?X8�`��(g��\'4��wh\�$���
wh\wh\
wh\�$���g��Yg��Yg�Y� ���
g��Y���Y���Y�$����Y
���Y�?X9�$�����Y��Y��Y� ���
���Y%w��\w��\�$���4�h\�h\0�h\� �@�/
�h\$�������"��
���G��\���\�@�@�	w��\�����޿$�� (���(���\4�������1wh\
wh\
g��Y� ��3g�Ywh\wh\�$��wh\5wh\
���Y� �����Yg�Y@X8�$� �g�Y5 g��Y
���Y� ����Y��Y%���\� �� !g��Yg��\4
7�Y� �@�37�Y$���1��Y� ����Y
���\���\������\���\��ޱ�����2��Y(���������\���\��ޱ$������!wh\wh\�d��017�Y/27��Y"g��Y�`��!#g
�Y'4��#wh\�$��wh\wh\wh\�$���#g��Y5���Y 
��Y�$��	g��Y
g�Y��
�Y� ���4�����Y5���Y� ����Y���Yg��Y� �����Y!���Y55'�Y� ��3'��Y���Y��Y�$�����Y!���Y'�Y� ��?���Y��Y��Y�D����Y/!'��Y���Y�D����YCh8Ch8��D	g��\��Y	���\�`��Ch8	7��\Ch8Q���_	W��\���6��6�'�����6 �	w�8� ��@��
����w�8����
��h6����� h6�������\@����0����	���0��2@\�7@����'��L�	g�<�[�3@����������0�	����6�����6�����0@�'0AL�!��P����-@�	'0AL�!��P� ��)@���m[
��m[� �����\�������2����			���2	
��@�P�"��
�\
G�P����?	�0Y\���
��Q������\
G�Y
���Y� ��	���Y
W�Y��8����		���Yg\��Y����w�8g\����������l6�@���i6����@��m6@�������m7@�������Y��k[���� �� ��Y�@����Y7H\����[������k[�\��K[����!��Pw(\(8������8�<wG\���g\G\@����'@������\@�@���	7H�'@�����	7H@������W�P@��X\���@� ����@������L�P�P����P����@� �����L-W����Q���?,g��'��-'�O�"�ğ-'N--70[,7�O�GB��,7N-�mK,,70[����,cK�P����@`�'��L�)8� ��'@�8'(8,�)8� ��	��[,7�[
,W�[� ��,W�[,7�[��[� ��7�[
7�[	��([� ���� [,�0[,,g�0[� ���0[	w �\-�)8����--��\7�\7\�B��-���K-�� �� ���	*�L*7�L �B�?	G�P��\	�Q����'�Y'��Y	7�Y������Y�@�@M`� ����\	*�L*G�L �B�?	G�P��\	�Q����7�Y7��Y	g�Y�����Y�@�@E`��D��\��*��\1���?*��\���?X8�@��?X8�Q�����$
g��\
G X\
�?X8����
���
�?X9����������[��?h8�@�D����h\G�P�����\	�\�P�����Q	�Pw�Y� ��
'h\
��\
w��Y����Y'�Y���������\	���\@-`�������\����� �����[�����Q�@��
@X9�������@���?h8��� ��	Wh\	G�PW�\����?�\�P	�Q ��?�PW�Y
7h\����
��\
W��Y	�Y����7�Y�����\���@`���\���������[
���\���������
����C���?h8
��� ���h\G�P��\����?	�\�P��Q ��?	�P���Ygh\�����\���Yg��Y����
w�Y�����\����?	��\@�`�
��\�����������[�������W X\����@���?h8
��� ���h\G�P��\����?	�\�P��Q ��?	�P���Ygh\������\���Yg��Y����w�Y�����\����?	���\@��`���\���������?X8� �@��������?X9������	���	���[��@���	?h8
��� ��	�h\G�P	��\����?
�\	�P��Q ��?
�P���Ygh\����G�\���Yg��Y����w�Y�����\����?	G��\@��`���\�����������[� ����@X9�����������?h8�@�D����h\G�P�����\	�\�P������Q	�P���Y� ��gh\G�\���Y���g��Yw�Y���������\	G��\@�`�������\����� �����[��������������?h8�@�D���h\G�P����\	�\�P������Q	�P���Y� ��gh\G�\���Y���g��Yw�Y���������\	G��\@�`�������\�����������[������"��?h8��	'h\@��	G�P'�\�\��X�P	�Q�P����w�Y
gh\
��\����
w��Y	�Yg�Y���������\@�`�������\��������'X\
���X\����X\G�P�\�����Qg�Y+��Y�����Y+��Y��������\	��\@�`�����+��\��
�\ ��?G�P����Q����g�Y*���Y���Y����*�Y�����\����?	��\@��`�*��\��D���\G�P���A����Qg�Y����7��Y'��Y'w�Y� ����7��\	��\���@�`�'��\��� ���\G�P������Q7�Y'��Y���&�Y&g�Y������'��\	��\@�`�����&��\���X\����
��
�X\

GX\ ��?
G�P��\
�Q���@�'�Y���Y
7�Y������Y�����\����?	���\@�y`���\��D����\
G�P���A���
�Q	'�Y����/	���Y
��Y/	��Y� �������\	���\���@p`�/��\��� ����\
G�P�����
�Q	'�Y	���Y���
'��Y	�Y���������\	���\@g`�������\����\ ��?
G�P���
�Q����g�YG��Y
g�Y�����Y��G��\����	���\@�]`����@���.��L%W��\$�?X9�"�@�$���
$�?X8W��\�@��$@X8	W��\g��\�@�@����\�����ޱ$�����)�?X8(g��\� ���'4��wh\
wh\�d��wh\
wh\g��Y�$���g��Yg�Y
g��Y�%�����Y���Y��Y�@��
���Y�?X9��Y�$����Y��Y
���Y�$��%w��\w��\4�h\�$���h\0�h\/
�h\�"�@�$������
����@��G��\���\	w��\�"�@������� (����`��(���\4��1wh\�$���
wh\
g��Y3g�Y� ���wh\wh\wh\� ���5wh\
���Y���Y� ��g�Y@X8g�Y�!���5 g��Y
���Y��Y� ����Y%���\ !g��Y� ���g��\4
7�Y37�Y�"��$���1��Y��Y� ���
���\���\���\� �@����\������� �@�2��Y(������\�@�@����\�������$��!wh\wh\017�Y� ���/27��Y"g��Y!#g
�Y� ���'4��#wh\wh\�`��wh\wh\#g��Y�$��5���Y 
��Y	g��Y����
g�Y��
�Y4���$������Y5���Y��Y� �����Yg��Y��Y�$���!���Y55'�Y3'��Y� � ����Y��Y��Y�$��!���Y'�Y���Y� ����Y��Y��Y� ��/!'��Y���Y��Y���Ch8Ch8	g��\� ����Y	���\Ch83@�D	7��\Ch8	W��\��������6��6���6��� �	w�8@������
����w�8
��h6��������� h6���\� ��@����0	���0������2@\�7@�'��L�����	g�<�[�3@�����0����	����6�����6
1@������'0AL�!��P�-@������	'0AL�!��P�)@�������m[
��m[���\� ���������2			���2���	
�#������
�\
G�P	�0Y\� �����
��Q��\����
G�Y
���Y	���Y����
W�Y��8		���Y����g\��Yw�8����g\������l6�����@���i6�@������m6@����m7����@���Y� ����k[���� �� ��Y��Y����7H\����[��k[�����\��K[!��P����w(\(8��8�����<wG\g\����G\@������'@��\����@�@�	7H�����'@�	7H�@�D@���W�P���@��X\@��� ����@��P������L�P�P����P����@� �����L-W����Q���?,g��'��-'�O�"�ğ-'N--70[,7�O�GB��,7N-�mK,,70[����,cK�P����@`�'��L�)8� ��'@�8'(8,�)8� ��	��[,7�[
,W�[� ��,W�[,7�[��[� ��7�[
7�[	��([� ���� [,�0[,,g�0[� ���0[	w �\-�)8����--��\7�\7\�B��-���K-�� �� ���	*�L*7�L �B�?	G�P��\	�Q����'�Y'��Y	7�Y������Y�@�@M`� ����\	*�L*G�L �B�?	G�P��\	�Q����7�Y7��Y	g�Y�����Y�@�@E`��D��\��*��\1���?*��\���?X8�@��?X8�Q�����$
g��\
G X\
�?X8����
���
�?X9����������[��?h8�@�D����h\G�P�����\	�\�P�����Q	�Pw�Y� ��
'h\
��\
w��Y����Y'�Y���������\	���\@-`�������\����� �����[�����Q�@��
@X9�������@���?h8��� ��	Wh\	G�PW�\����?�\�P	�Q ��?�PW�Y
7h\����
��\
W��Y	�Y����7�Y�����\���@`���\���������[
���\���������
����C���?h8
��� ���h\G�P��\����?	�\�P��Q ��?	�P���Ygh\�����\���Yg��Y����
w�Y�����\����?	��\@�`�
��\�����������[�������W X\����@���?h8
��� ���h\G�P��\����?	�\�P��Q ��?	�P���Ygh\������\���Yg��Y����w�Y�����\����?	���\@��`���\���������?X8� �@��������?X9������	���	���[��@���	?h8
��� ��	�h\G�P	��\����?
�\	�P��Q ��?
�P���Ygh\����G�\���Yg��Y����w�Y�����\����?	G��\@��`���\�����������[� ����@X9�����������?h8�@�D����h\G�P�����\	�\�P������Q	�P���Y� ��gh\G�\���Y���g��Yw�Y���������\	G��\@�`�������\����� �����[��������������?h8�@�D���h\G�P����\	�\�P������Q	�P���Y� ��gh\G�\���Y���g��Yw�Y���������\	G��\@�`�������\�����������[������"��?h8��	'h\@��	G�P'�\�\��X�P	�Q�P����w�Y
gh\
��\����
w��Y	�Yg�Y���������\@�`�������\��������'X\
���X\����X\G�P�\�����Qg�Y+��Y�����Y+��Y��������\	��\@�`�����+��\��
�\ ��?G�P����Q����g�Y*���Y���Y����*�Y�����\����?	��\@��`�*��\��D���\G�P���A����Qg�Y����7��Y'��Y'w�Y� ����7��\	��\���@�`�'��\��� ���\G�P������Q7�Y'��Y���&�Y&g�Y������'��\	��\@�`�����&��\���X\����
��
�X\

GX\ ��?
G�P��\
�Q���@�'�Y���Y
7�Y������Y�����\����?	���\@�y`���\��D����\
G�P���A���
�Q	'�Y����/	���Y
��Y/	��Y� �������\	���\���@p`�/��\��� ����\
G�P�����
�Q	'�Y	���Y���
'��Y	�Y���������\	���\@g`�������\����\ ��?
G�P���
�Q����g�YG��Y
g�Y�����Y��G��\����	���\@�]`����@���.��L%W��\$�?X9�"�@�$���
$�?X8W��\�@��$@X8	W��\g��\�@�@����\�����ޱ$�����)�?X8(g��\� ���'4��wh\
wh\�d��wh\
wh\g��Y�$���g��Yg�Y
g��Y�%�����Y���Y��Y�@��
���Y�?X9��Y�$����Y��Y
���Y�$��%w��\w��\4�h\�$���h\0�h\/
�h\�"�@�$������
����@��G��\���\	w��\�"�@������� (����`��(���\4��1wh\�$���
wh\
g��Y3g�Y� ���wh\wh\wh\� ���5wh\
���Y���Y� ��g�Y@X8g�Y�!���5 g��Y
���Y��Y� ����Y%���\ !g��Y� ���g��\4
7�Y37�Y�"��$���1��Y��Y� ���
���\���\���\� �@����\������� �@�2��Y(������\�@�@����\�������$��!wh\wh\017�Y� ���/27��Y"g��Y!#g
�Y� ���'4��#wh\wh\�`��wh\wh\#g��Y�$��5���Y 
��Y	g��Y����
g�Y��
�Y4���$������Y5���Y��Y� �����Yg��Y��Y�$���!���Y55'�Y3'��Y� � ����Y��Y��Y�$��!���Y'�Y���Y� ����Y��Y��Y� ��/!'��Y���Y��Y���Ch8Ch8	g��\� ����Y	���\Ch83@�D	7��\Ch8	W��\��������6��6���6��� �	w�8@������
����w�8
��h6��������� h6���\� ��@����0	���0������2@\�7@�'��L�����	g�<�[�3@�����0����	����6�����6
1@������'0AL�!��P�-@������	'0AL�!��P�)@�������m[
��m[���\� ���������2			���2���	
�#������
�\
G�P	�0Y\� �����
��Q��\����
G�Y
���Y	���Y����
W�Y��8		���Y����g\��Yw�8����g\������l6�����@���i6�@������m6@����m7����@���Y� ����k[���� �� ��Y��Y����7H\����[��k[�����\��K[!��P����w(\(8��8�����<wG\g\����G\@������'@��\����@�@�	7H�����'@�	7H�@�D@���W�P���@��X\@��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<0*�L1*7�L0G�P���1�\0��QG��Y����17��Y0g��YG�Y����?@�@�`����\��D<0*�L1*G�L0G�P���1�\0��QG��Y����
1G��Y0���Y
W�Y����?@�@��`�
���\� ����*�\"��1���?*'�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������	
�?X9������[�`��?h8Gh\'��P��H��G�\G�P�@��w�\�P�P� ��_�Q���1�h\������Y1��\1���Y������Y"��Y�����?0���\@��`�"���\�����������[� �����Q

@X9� �@�		���
?h8gh\���!��*��P���@��g�\G�P��\����?�P�P�Q������1�h\��Y����1��\1���Y���Y����!��Y��0���\���@�`�!���\���@���	���[
���\� ��

�����	?h8� ��	�h\ ��+��P��H����\G�P�@����\�P�P� ��_	�Q���1h\����7	�Y1��\17��Y�����Y �Y�����?0���\@��`� ���\��������
���[� ����� X\
?h8���
�h\(�����@����\G�P�\����?�P�P
�Q������17h\W
�Y����1�\1W��Y'��Y����(7	�Y	��0��\���@�`�(���\���@����?X8���"������?X9)��� ��������[?h8����'h\	��'�\��G�PG�\�P=���P
�Q���� ��1�h\�
�Y1W�\����1���Y���Y)��Y����
��0W��\@�`�����)���\�����"�����[���@X9� �@����?h87h\����$��
��7�\��G�Pw�\�P=���P��Q���� ��1�h\���Y1��\����1���Y���Y$�
�Y������0���\@�`�����$���\�����"�����[������B��?h8gh\&����H��#g�\G�P�@��%��\#�P%�P� ��_�Q���1�h\������Y 1��\1���Y������Y&��Y�����?0���\@�u`�&���\��������'���[� �@���?h8�h\����%����,��\��#G�P-��\,�P=��-�P%��Q���� ��1�h\%#W��Y(1��\����1W��Y���Y%%��Y����
��0���\@e`�����%���\��������!'X\
��X\����#�X\#G�P((7�\����#�Q��Y-���Y���#��Y���Y
������1���\07��\@Y`��������\��( 7�\ ��?(#G�P���#�Q����.(��Y-.��Y#��Y����.���Y
��1��\����?07��\@�O`����\��D��(!7�\ #G�P���A���-#�Q. ��Y���� .��Y-#��Y .��Y� ��
��1��\07��\���@F`� ���\��� ��("7�\!#G�P�����(#��Q/!���Y!/'��Y���(#�Y!/���Y
������1'��\07��\@=`�����!���\��#$�X\����
��##gX\7#WX\ ��?"7G�P()w�\#7�Q���@�""7�Y("���Y#7���Y����""7�Y
��1���\����?0w��\@�1`�"���\��D��($w�\(7G�P���A���#7�Q.(7�Y����).G��Y#7��Y#.7��Y� ��
��1G��\0w��\���@(`�#���\��� ��(&w�\$7G�P�����)7�Q/$��Y$/g��Y���)7G�Y$/��Y
������1g��\0w��\@`�����$���\��(%w�\ ��?&7G�P���(7�Q����/&��Y,/W��Y&7���Y����,/g�Y
��1W��\����0w��\@�`������%�?X9&�?X93W�/ ر"�@�5��/ �4&W�/ �2&��/ ����(�?X8)@X8/��/ �� �@�/'��k[1��/ �0&��/ رB�@�.&��/ �'�?X8-'W�/ ��`��6@X874��33h\�����75h\//h\44��Y� ��� 1h\22��Y50��Y����!.�Y -��Y4'��/ ر"�@�7'��/ �.'��/ �-6��/ ر"�@�/6W�/ �06��/ �16��/ ��`��+��W4��34��Y� �@�4'��L24�)857���Y�`��44@�874��.���Y� ��!4'�[--���Y.4'� [�@��32(82G�[74'�([� ��!4'�[#-7h\-�)8� ���!7 �\77�[.7�[�`��3G�[4�� /��Y�"��.��0['�0[3-G�[� ��4-G�[" '��Y50���Y�!�@�1���Y'�\�)8� ��3-G�0[��"5G�Y����!3�\��Y\�@���w	���\@�6����'\ ��L!�L����?  ��
��G�\��G�P w�\�P=��  �P�Q���� ��1h\G�Y(1��\������Yw��YG��Y����
��0���\@�`��������\��������/*��k[�������
��g�\G�P�@�t
��\�P

�P� ����Q���1�h\����g�Y(1��\��Y���G��Yg�Y
�����?0���\@��`����\��������/+��k[�������
���@��	��\G�P��\����_		�P�P�Q������1	�h\w�Y����(1��\��Yg��Y����w�Y
��0���\���@�`����\���������
������
��\G�P�@�t�\

�P�P� �����Q���1
h\����
���Y1�\
��Y���w��Y
���Y�����?0��\@��`����\��������������
��	��'�\2��G�PG�\�P]���P	�Q����' ��1Gh\
��Y1W�\����
��Y	���Y
��Y����?��0W��\@�`��������\����� ��	���
��
���@��7�\
G�Pw�\����_�P	�P�Q������1�h\
��Y����1��\	��Y
���Y����	���Y��0���\���@�`�	���\������
���
������g�\
G�P�@�t��\�P�P� ���
�Q���1�h\����
��Y1��\
��Y������Y
��Y�����?0���\@�{`�
���\���������
��������\G�P�@�t��\�P�P� �����Q���1�h\����W��Y1��\��Y������YW��Y�����?0���\@�m`����\���������X\����
��
gX\

wX\4 ��_
G�P��\
��Q���@����Yw��Y
��Y������Y��1w��\����?0���\@�a`����\�������\
G�P���B���
��Q���Y����g��Y
w�Y���Y� ����1g��\0���\���@X`����\��� ����\
G�P�����
�Q��YG��Y���
g�Y��Y������?1G��\0���\@O`��������\����\1 ��_
G�P���
�Q������Y���Y
G�Y������Y��1���\����?0���\@�E`����\�����
	�X\
��@��
�X\WX\
G�P������\��Q

���Y���
���Y��Y

�Y� ����1���\0���\���@:`�
���\��� ��	��\G�P������Q��Y���Y������Y��Y������?1���\0���\@1`��������\��
��\1 ��_	G�P�����Q����	���Y	���Y��Y����	���Y��1���\����?0���\@�'`�	���\�������\
G�P���B�����Q
�Y����
W��Y���Y,�Y� ����1W��\0���\����@`���W�O ر"�@�&W�O �
'W�O �6W�O ��"�@���O �&��O �'��O ��"�@�6��O ���O �&��O ��"6@�'��O ���O �&��O ر"�@�'��O �6��O �6��O �������L�LW4��$��gh\G��Ygh\�@��74��gh\
w��Y�@���
gh\G�
�YG��Y� �����YG�Yw�
�Y����w��Y���Y�h\����w��Y��
�Y��Y����?���Y��Y��Y����?�w	���\ �����,0w�8�A������.,����-1w�8/.��h6����/-����//��"h62���\� ��
@�21���030���0�����272E\�8@�2'��L�����0'���[�4@�1����0�����1����6�0����62`��P����
1@�1'0FL7����P� ��.@�1����6�0'0EL����/����P�)@�//��m[� ��2���\11
���22�
���� ��/.��m[00
���222
����.,��"��4.\ ��/4G�P.4�0Y\3-�������-/�Q03�\//���Y����10���Y-.�Y-/���Y����33��8..��Y,/���Y����23'\0,w�800'\����10����/1��l6
@�����/0��i6�@�/0�m6����@�/0���m7,,� ��
@�2/���Y/0��k[����22���1022 ����3/���Y-/���Y12H\� ��73����[/1��k[-0�\� ��--�K[/`���P22�(\����-2(8.���8/.��<����//'G\/-�\,/�G\����@�,,,,�'���@�,2��\@����@�007�H,0�'���@�,07�H@�� ��,��,,W�P@�����,1X\@� �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�q`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�W`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�;`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\�)8�`���w	��\W\�B��?����K�� �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@�+`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@�#`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@�`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@�`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@�`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@��`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@��`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@�`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@�`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@�`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@|`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@�s`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@k`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@``����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@�W`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@O`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@F`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0��B�@�"��/0�G�/0���/0ز"�@�'G�/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\�`��"'��L�4��

Wh\� ��Wh\G�YG�Y����
"�)8g4��g��Y� ��	g��Y"@�8 �)8�$��	w�Y
w�Y

(8� ���� ['�[
'�[�$��		7�Y%7��Y ��[�`���[G4��Wh\� ����0[�[
 ��[�@��
Wh\G��Y"�([� ��� ��0[
 '�[ '�[�`��G�Y"���\
 ��0[�`��'��\'4��g��Y�$��Wh\Wh\g�
�Y�"��
!��\!!�)8w�Y�$��G��YG��Yw�Y� ��
�\!!�\��Y�%��g��Yg��Y%��Y�@����L	
��6w��Y�@��w�Y	�L7�Y����7��Y��L�w�@���w	��\	�L= ��_	g��\ �� �����
w�8A������
����
w�8��h6� ��
����� h6G��\�������\@����0�������0�'2@\�7@����'��L�'�<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[��m[� �����\�������2����	���2	
��@�P�"��\G�P����?�0Y\

����Q����
��\��Y��Y� ����Y'��Y

��8������Y
\
���Y����
w�8

\
����������l6�@�
��i6����@�
�m6@����
���m7

@��������Y
��k[���� ��
 ���Y�@�����Y�H\����[������k[
�\��K[����!��P�(\
(8������8��<�G\���
�\
�G\@���



�'@�����
��\@�@���
7H

�'@�����
7H@�
�����

W�P@�
GX\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�q`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�W`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�;`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\�)8�`���w	��\W\�B��?����K�� �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@.`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@&`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@�`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@��`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@��`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@��`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@�`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@�`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@��`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@��`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@��`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@�~`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@v`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@�m`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@�b`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@Z`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@�Q`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@�H`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0ر"�@�G�/0�"��/0�'G�/0ر"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\�`��'��L�4��

Wh\� ��Wh\
G�YG��Y�����)8w4��
g�Y� ��	g��Y��8(8�$��	w�Y

w�Y�[�"���
 '�[ ��[ ��[���		7�Y%
7��YW4���$��Wh\
Wh\�([� �@� ��0[ '�[�[�$��G��YG�Y� [��� �)8  ��0['4��� ���
g��YWh\Wh\� ��
���\'�['�[� ���g��YG��YG��Y� ��
w�Y !�\�0[� ���w�Yg��Yg��Y� ��

��Y!!�)8 \� ���%��Yw��Yw�Y� ����\7�Y7��Y�`��	!�\��L�w�@���w	��\	 ��66 ��	g��\	�L��L�����(8�(8	�L���G\	gG\ �����	 ���
w�8� ���@��
����
w�8������h6
����� h6���G��\���\@��������0���0�'2@\����7@�'��L�'�<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ����m[���\�����������2	���2	����
��"��\ ��G�P�0Y\

��������Q
��\��Y������Y��Y'��Y� ��

��8��Y
\����
���Y
w�8

\����
������l6@�����
��i6�@�
�m6����@�
���m7

� ��@����Y
��k[�������
 �������Y���Y�H\� ������[��k[
�\� ����K[!��P�(\����
(8��8��<�����G\
�\
�G\����@�



�'���@�
��\@����@�
7H

�'���@�
7H@�� ��
��

W�P@�����
GX\@� �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@~`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@j`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@a`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@N`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@E`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@4`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �
G� �G� ر"�@�G� �G� ��� �� 6@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G��Y

Wh\���'��L7��YW4���@��G�Y�)8��Y� ���7��YWh\
(8���Wh\�Y'4��� ��G��Y@�8G�
�Y� ���
�h\��[
��[���7��Yw�[4��� ��7�Y'��Y
'�[�"���0[
'�[��Y�"����)8w�[w� [� ��w�([�0[7	�Y�!����	�Y
'�['�[� ��g��\w�\�)8����	�	�Y�0[		�w����g�\		���\7\�"��?����K��	�(8� ���G\ �������	w�8�@��
	����� ��w�8
��h6��������� h6w��\���\� ��@����0���0�����2@\�7@�'��L������<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[
��m[���\� ���������2	���2���		�#�������\G�P
�0Y\� ������Q��\������Y���Y
���Y������Y���8

���Y����	���Y\	w�8����\������l6�����@���i6�@������m6@����m7����		@����Y� ����k[���� �� ���Y
���Y�����H\����[��k[�����\�K[!��P�����(\(8
��8����

��<

G\
�\����	
�G\@�		����		�'@�	��\����@�@�7H����	�'@�	7H�@�D@�	��		W�P���@�	wX\@��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<0*�L1*7�L0G�P���1�\0��QG��Y����17��Y0g��YG�Y����?@�@�`����\��D<0*�L1*G�L0G�P���1�\0��QG��Y����
1G��Y0���Y
W�Y����?@�@��`�
���\� ����*�\"��1���?*'�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������	
�?X9������[�`��?h8Gh\'��P��H��G�\G�P�@��w�\�P�P� ��_�Q���1�h\������Y1��\1���Y������Y"��Y�����?0���\@��`�"���\�����������[� �����Q

@X9� �@�		���
?h8gh\���!��*��P���@��g�\G�P��\����?�P�P�Q������1�h\��Y����1��\1���Y���Y����!��Y��0���\���@�`�!���\���@���	���[
���\� ��

�����	?h8� ��	�h\ ��+��P��H����\G�P�@����\�P�P� ��_	�Q���1h\����7	�Y1��\17��Y�����Y �Y�����?0���\@��`� ���\��������
���[� ����� X\
?h8���
�h\(�����@����\G�P�\����?�P�P
�Q������17h\W
�Y����1�\1W��Y'��Y����(7	�Y	��0��\���@�`�(���\���@����?X8���"������?X9)��� ��������[?h8����'h\	��'�\��G�PG�\�P=���P
�Q���� ��1�h\�
�Y1W�\����1���Y���Y)��Y����
��0W��\@�`�����)���\�����"�����[���@X9� �@����?h87h\����$��
��7�\��G�Pw�\�P=���P��Q���� ��1�h\���Y1��\����1���Y���Y$�
�Y������0���\@�`�����$���\�����"�����[������B��?h8gh\&����H��#g�\G�P�@��%��\#�P%�P� ��_�Q���1�h\������Y 1��\1���Y������Y&��Y�����?0���\@�v`�&���\��������'���[� �@���?h8�h\����%����,��\��#G�P-��\,�P=��-�P%��Q���� ��1�h\%#W��Y(1��\����1W��Y���Y%%��Y����
��0���\@f`�����%���\��������!'X\
��X\����#�X\#G�P((7�\����#�Q��Y-���Y���#��Y���Y
������1���\07��\@Z`��������\��( 7�\ ��?(#G�P���#�Q����.(��Y-.��Y#��Y����.���Y
��1��\����?07��\@�P`����\��D��(!7�\ #G�P���A���-#�Q. ��Y���� .��Y-#��Y .��Y� ��
��1��\07��\���@G`� ���\��� ��("7�\!#G�P�����(#��Q/!���Y!/'��Y���(#�Y!/���Y
������1'��\07��\@>`�����!���\��#$�X\����
��##gX\7#WX\ ��?"7G�P()w�\#7�Q���@�""7�Y("���Y#7���Y����""7�Y
��1���\����?0w��\@�2`�"���\��D��($w�\(7G�P���A���#7�Q.(7�Y����).G��Y#7��Y#.7��Y� ��
��1G��\0w��\���@)`�#���\��� ��(&w�\$7G�P�����)7�Q/$��Y$/g��Y���)7G�Y$/��Y
������1g��\0w��\@ `�����$���\��(%w�\ ��?&7G�P���(7�Q����/&��Y,/W��Y&7���Y����,/g�Y
��1W��\����0w��\@�`������%�?X9&�?X93W�/ ر"�@�5��/ �4&W�/ �2&��/ ����(�?X8)@X8/��/ �� �@�/'��k[1��/ �0&��/ رB�@�.&��/ �'�?X8-'W�/ ��`��6@X874��33h\�����75h\//h\44��Y� ��� 1h\22��Y50��Y����!.�Y -��Y4'��/ ر"�@�7'��/ �.'��/ �-6��/ ر"�@�/6W�/ �06��/ �16��/ ��`��,��W4��34��Y� �@�4'��L24�)857���Y�`��44@�874��.���Y� ��!4'�[--���Y.4'� [�@��32(82G�[74'�([� ��!4'�[#-7h\-�)8� ���!7 �\77�[.7�[�`��3G�[4�� /��Y�"��.��0['�0[3-G�[� ��4-G�[" '��Y50���Y�!�@�1���Y'�\�)8� ��3-G�0[��"5G�Y����!3�\��Y\�@��Ch8	���\@�6� ��?'\ ��L"�6� �!�L"  ��
���@��?G�\G�P w�\����_�P  �P�Q������1h\G�Y����(1��\��Yw��Y����G��Y
��0���\���@�`����\������/*��k[��������
��g�\2��G�P
��\�P]��

�P�Q����' ��1�h\g�Y(1��\������YG��Yg�Y����?
��0���\@�`��������\��������/+��k[�������
��	��\G�P�@�t��\		�P�P� ����Q���1	�h\����w�Y(1��\��Y���g��Yw�Y
�����?0���\@��`����\��������������
����
��\2��G�P�\

�P]���P��Q����' ��1
h\
���Y1�\����
��Yw��Y
���Y����?��0��\@�`��������\����� �����
��	���@��'�\G�PG�\����_�P�P	�Q������1Gh\
��Y����1W�\
��Y	���Y����
��Y��0W��\���@�`����\������	���
����
��7�\
G�P�@�tw�\�P	�P� ����Q���1�h\����
��Y1��\	��Y���
���Y	���Y�����?0���\@��`�	���\��������
������
����g�\2��
G�P��\�P]���P
�Q����' ��1�h\
��Y1��\����
��Y���Y
��Y����?��0���\@|`�����
���\��������
������\2��G�P��\�P]���P��Q����' ��1�h\W��Y1��\������Y���YW��Y����?��0���\@n`��������\���������X\
��
gX\����

wX\
G�P��\����
��Q���Yw��Y���
��Y��Y������?1w��\0���\@b`��������\����\1 ��_
G�P���
��Q�������Yg��Y
w�Y�������Y��1g��\����?0���\@�X`����\�������\
G�P���B���
�Q��Y����G��Y
g�Y��Y� ����1G��\0���\���@O`����\��� ����\
G�P�����
�Q��Y���Y���
G�Y��Y������?1���\0���\@F`��������\��
	�X\����
��
�X\WX\4 ��_
G�P��\��Q���@�

���Y
���Y��Y����

�Y��1���\����?0���\@�:`�
���\�����	��\G�P���B����Q��Y�������Y���Y��Y� ����1���\0���\���@1`����\��� ��
��\	G�P�������Q	���Y	���Y�����Y	���Y������?1���\0���\@(`�����	���\����\1 ��_
G�P����Q����
�Y
W��Y���Y����,�Y��1W��\����0���\@�`���"�@�W�O �&W�O �
'W�O ��"�@�6W�O ���O �&��O ��"�@�'��O �6��O ���O ��"�@�&��O �'��O ���O ر"�@�&��O �'��O �6��O ؿ��_6��O ���L�L�����W4��gh\G��Y�d��gh\74��gh\� ��
w��Y
gh\G�
�Y�$��G��Y���YG�Y����w�
�Yw��Y���Y� � ��h\w��Y��
�Y������Y���Y��Y�@�t��YCh8	���\� ���6 �������,0w�8�A��.,����� ��-1w�8/.��h6/-�������//��"h62���\�@�����21���030���0�272E\�����8@�2'��L�0'���[� �@��4@�1����0�1����6����0����62`��P�1@������1'0FL7����P.@������1����6�0'0EL/����P����)@�//��m[2���\� ��11
���22�
���/.��m[����00
���222
.,��@�P�"��4.\/4G�P����?.4�0Y\3-���-/�Q����03�\//���Y10���Y� ��-.�Y-/���Y33��8� ��..��Y,/���Y23'\����0,w�800'\10��������/1��l6�@�/0��i6����@�/0�m6@����/0���m7,,
@�����2/���Y/0��k[22���� ��1022 3/���Y�@��-/���Y12H\73����[����/1��k[-0�\--�K[����/`���P22�(\-2(8����.���8/.��<//'G\���/-�\,/�G\@���,,,,�'@�����,2��\@�@���007�H,0�'@�����,07�H@�,�����,,W�P@�,1X\���@� ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@��`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@��`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@�`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@��`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@��`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@��`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@��`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@�}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@�i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@�``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@�M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@�D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@�3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\Ch8�����)8	��\W\�"��?����K���6��� ���	w�8� ���@��
	����w�8����
��h6����� h6���w��\���\@��������0���0�2@\����7@�'��L��<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ��
��m[���\�����������2	���2	����	��"���\ ��G�P
�0Y\��������Q��\��Y�������Y
���Y��Y�������8

���Y	���Y����\	w�8\����������l6@�������i6�@��m6����@����m7		� ��@����Y��k[������� �������Y
���Y�H\� ������[��k[�\� ���K[!��P�(\����(8
��8

��<����

G\
�\	
�G\����@�				�'���@�	��\@����@�7H	�'���@�	7H@�� ��	��		W�P@�����	wX\@� �����@��P�P����P�P�P� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@�,`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@�$`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@
`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@�`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@�`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@�`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@��`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@��`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@�`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@�`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@�`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@}`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@�t`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@l`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@a`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@�X`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@P`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@G`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0��B�@�"��/0�G�/0���/0ز"�@�'G�/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\�`��"'��L�4��

Wh\� ��Wh\G�YG�Y����
"�)8g4��g��Y� ��	g��Y"@�8 �)8�$��	w�Y
w�Y

(8� ���� ['�[
'�[�$��		7�Y%7��Y ��[�`���[G4��Wh\� ����0[�[
 ��[�@��
Wh\G��Y"�([� ��� ��0[
 '�[ '�[�`��G�Y"���\
 ��0[�`��'��\'4��g��Y�$��Wh\Wh\g�
�Y�"��
!��\!!�)8w�Y�$��G��YG��Yw�Y� ��
�\!!�\��Y�%��g��Yg��Y%��Y�@����L	
��6w��Y�@��w�Y	�L7�Y����7��Y��LCh8�@��Ch8	��\	�L=@��_	g��\�6	�6� �� ��	 �������
w�8�@��
����� ��
w�8��h6
��������� h6G��\���\� ��@����0���0�����'2@\�7@�'��L�����'�<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[��m[���\� ���������2	���2���	
�#������\G�P�0Y\� ��

����Q
��\������Y��Y��Y����'��Y

��8��Y����
\
���Y
w�8����

\
������l6�����@�
��i6�@�����
�m6@�
���m7����

@����Y� ��
��k[���
� �� ���Y���Y�����H\����[��k[����
�\��K[!��P�����(\
(8��8������<�G\
�\����
�G\@�

����

�'@�
��\����@�@�
7H����

�'@�
7H�@�D@�
��

W�P���@�
GX\@��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@��`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@��`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@�`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@��`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@��`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@��`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@��`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@�}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@�i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@�``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@�M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@�D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@�3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\Ch8�����)8	��\W\�"��?����K���6��� ���	w�8� ���@��
	����w�8����
��h6����� h6���w��\���\@��������0���0�2@\����7@�'��L��<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ��
��m[���\�����������2	���2	����	��"���\ ��G�P
�0Y\��������Q��\��Y�������Y
���Y��Y�������8

���Y	���Y����\	w�8\����������l6@�������i6�@��m6����@����m7		� ��@����Y��k[������� �������Y
���Y�H\� ������[��k[�\� ���K[!��P�(\����(8
��8

��<����

G\
�\	
�G\����@�				�'���@�	��\@����@�7H	�'���@�	7H@�� ��	��		W�P@�����	wX\@� �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<0*�L1*7�L0G�P���1�\0��QG��Y����17��Y0g��YG�Y����?@�@�`����\��D<0*�L1*G�L0G�P���1�\0��QG��Y����
1G��Y0���Y
W�Y����?@�@��`�
���\� ����*�\"��1���?*'�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������	
�?X9������[�`��?h8Gh\'��P��H��G�\G�P�@��w�\�P�P� ��_�Q���1�h\������Y1��\1���Y������Y"��Y�����?0���\@��`�"���\�����������[� �����Q

@X9� �@�		���
?h8gh\���!��*��P���@��g�\G�P��\����?�P�P�Q������1�h\��Y����1��\1���Y���Y����!��Y��0���\���@�`�!���\���@���	���[
���\� ��

�����	?h8� ��	�h\ ��+��P��H����\G�P�@����\�P�P� ��_	�Q���1h\����7	�Y1��\17��Y�����Y �Y�����?0���\@��`� ���\��������
���[� ����� X\
?h8���
�h\(�����@����\G�P�\����?�P�P
�Q������17h\W
�Y����1�\1W��Y'��Y����(7	�Y	��0��\���@�`�(���\���@����?X8���"������?X9)��� ��������[?h8����'h\	��'�\��G�PG�\�P=���P
�Q���� ��1�h\�
�Y1W�\����1���Y���Y)��Y����
��0W��\@�`�����)���\�����"�����[���@X9� �@����?h87h\����$��
��7�\��G�Pw�\�P=���P��Q���� ��1�h\���Y1��\����1���Y���Y$�
�Y������0���\@�`�����$���\�����"�����[������B��?h8gh\&����H��#g�\G�P�@��%��\#�P%�P� ��_�Q���1�h\������Y 1��\1���Y������Y&��Y�����?0���\@�v`�&���\��������'���[� �@���?h8�h\����%����,��\��#G�P-��\,�P=��-�P%��Q���� ��1�h\%#W��Y(1��\����1W��Y���Y%%��Y����
��0���\@f`�����%���\��������!'X\
��X\����#�X\#G�P((7�\����#�Q��Y-���Y���#��Y���Y
������1���\07��\@Z`��������\��( 7�\ ��?(#G�P���#�Q����.(��Y-.��Y#��Y����.���Y
��1��\����?07��\@�P`����\��D��(!7�\ #G�P���A���-#�Q. ��Y���� .��Y-#��Y .��Y� ��
��1��\07��\���@G`� ���\��� ��("7�\!#G�P�����(#��Q/!���Y!/'��Y���(#�Y!/���Y
������1'��\07��\@>`�����!���\��#$�X\����
��##gX\7#WX\ ��?"7G�P()w�\#7�Q���@�""7�Y("���Y#7���Y����""7�Y
��1���\����?0w��\@�2`�"���\��D��($w�\(7G�P���A���#7�Q.(7�Y����).G��Y#7��Y#.7��Y� ��
��1G��\0w��\���@)`�#���\��� ��(&w�\$7G�P�����)7�Q/$��Y$/g��Y���)7G�Y$/��Y
������1g��\0w��\@ `�����$���\��(%w�\ ��?&7G�P���(7�Q����/&��Y,/W��Y&7���Y����,/g�Y
��1W��\����0w��\@�`������%�?X9&�?X93W�/ ر"�@�5��/ �4&W�/ �2&��/ ����(�?X8)@X8/��/ �� �@�/'��k[1��/ �0&��/ رB�@�.&��/ �'�?X8-'W�/ ��`��6@X874��33h\�����75h\//h\44��Y� ��� 1h\22��Y50��Y����!.�Y -��Y4'��/ ر"�@�7'��/ �.'��/ �-6��/ ر"�@�/6W�/ �06��/ �16��/ ��`��,��W4��34��Y� �@�4'��L24�)857���Y�`��44@�874��.���Y� ��!4'�[--���Y.4'� [�@��32(82G�[74'�([� ��!4'�[#-7h\-�)8� ���!7 �\77�[.7�[�`��3G�[4�� /��Y�"��.��0['�0[3-G�[� ��4-G�[" '��Y50���Y�!�@�1���Y'�\�)8� ��3-G�0[��"5G�Y����!3�\��Y\�@��Ch8	���\@�6� ��?'\ ��L"�6� �!�L"  ��
���@��?G�\G�P w�\����_�P  �P�Q������1h\G�Y����(1��\��Yw��Y����G��Y
��0���\���@�`����\������/*��k[��������
��g�\2��G�P
��\�P]��

�P�Q����' ��1�h\g�Y(1��\������YG��Yg�Y����?
��0���\@�`��������\��������/+��k[�������
��	��\G�P�@�t��\		�P�P� ����Q���1	�h\����w�Y(1��\��Y���g��Yw�Y
�����?0���\@��`����\��������������
����
��\2��G�P�\

�P]���P��Q����' ��1
h\
���Y1�\����
��Yw��Y
���Y����?��0��\@�`��������\����� �����
��	���@��'�\G�PG�\����_�P�P	�Q������1Gh\
��Y����1W�\
��Y	���Y����
��Y��0W��\���@�`����\������	���
����
��7�\
G�P�@�tw�\�P	�P� ����Q���1�h\����
��Y1��\	��Y���
���Y	���Y�����?0���\@��`�	���\��������
������
����g�\2��
G�P��\�P]���P
�Q����' ��1�h\
��Y1��\����
��Y���Y
��Y����?��0���\@|`�����
���\��������
������\2��G�P��\�P]���P��Q����' ��1�h\W��Y1��\������Y���YW��Y����?��0���\@n`��������\���������X\
��
gX\����

wX\
G�P��\����
��Q���Yw��Y���
��Y��Y������?1w��\0���\@b`��������\����\1 ��_
G�P���
��Q�������Yg��Y
w�Y�������Y��1g��\����?0���\@�X`����\�������\
G�P���B���
�Q��Y����G��Y
g�Y��Y� ����1G��\0���\���@O`����\��� ����\
G�P�����
�Q��Y���Y���
G�Y��Y������?1���\0���\@F`��������\��
	�X\����
��
�X\WX\4 ��_
G�P��\��Q���@�

���Y
���Y��Y����

�Y��1���\����?0���\@�:`�
���\�����	��\G�P���B����Q��Y�������Y���Y��Y� ����1���\0���\���@1`����\��� ��
��\	G�P�������Q	���Y	���Y�����Y	���Y������?1���\0���\@(`�����	���\����\1 ��_
G�P����Q����
�Y
W��Y���Y����,�Y��1W��\����0���\@�`���"�@�W�O �&W�O �
'W�O ��"�@�6W�O ���O �&��O ��"�@�'��O �6��O ���O ��"�@�&��O �'��O ���O ر"�@�&��O �'��O �6��O ؿ��_6��O ���L�L�����W4��gh\G��Y�d��gh\74��gh\� ��
w��Y
gh\G�
�Y�$��G��Y���YG�Y����w�
�Yw��Y���Y� � ��h\w��Y��
�Y������Y���Y��Y�@�t��YCh8	���\� ���6 �������,0w�8�A��.,����� ��-1w�8/.��h6/-�������//��"h62���\�@�����21���030���0�272E\�����8@�2'��L�0'���[� �@��4@�1����0�1����6����0����62`��P�1@������1'0FL7����P.@������1����6�0'0EL/����P����)@�//��m[2���\� ��11
���22�
���/.��m[����00
���222
.,��@�P�"��4.\/4G�P����?.4�0Y\3-���-/�Q����03�\//���Y10���Y� ��-.�Y-/���Y33��8� ��..��Y,/���Y23'\����0,w�800'\10��������/1��l6�@�/0��i6����@�/0�m6@����/0���m7,,
@�����2/���Y/0��k[22���� ��1022 3/���Y�@��-/���Y12H\73����[����/1��k[-0�\--�K[����/`���P22�(\-2(8����.���8/.��<//'G\���/-�\,/�G\@���,,,,�'@�����,2��\@�@���007�H,0�'@�����,07�H@�,�����,,W�P@�,1X\���@� ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@��`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@��`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@�`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@��`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@��`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@��`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@��`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@�}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@�i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@�``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@�M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@�D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@�3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\Ch8�����)8	��\W\�"��?����K���6��� ���	w�8� ���@��
	����w�8����
��h6����� h6���w��\���\@��������0���0�2@\����7@�'��L��<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ��
��m[���\�����������2	���2	����	��"���\ ��G�P
�0Y\��������Q��\��Y�������Y
���Y��Y�������8

���Y	���Y����\	w�8\����������l6@�������i6�@��m6����@����m7		� ��@����Y��k[������� �������Y
���Y�H\� ������[��k[�\� ���K[!��P�(\����(8
��8

��<����

G\
�\	
�G\����@�				�'���@�	��\@����@�7H	�'���@�	7H@�� ��	��		W�P@�����	wX\@� �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NG�0[7�O�GB��7�N�mK7�0[����cK�P���D<E*�LF*7�LEG�P���FW�\E��QG��Y����FG��YE7#�YW��Y����?@�@�`����\��D<E*�LF*G�LEG�P���FW�\E�QW�Y����FW��YE�#�YG��Y����?@�@��`����\� ����*'�\��1���?*�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������
�?X9������[�`��?h8	Gh\&��P��H��G�\	G�P�@����\�P
�P� ��_	�Q���F�h\������YF��\F���Y���
	g#�Y��Y�����?E���\@��`����\�����������[� ����
@X9��Q� �@�
���?h8wh\���
��=��P���@��w�\G�P��\����?
�P�P��Q������F
�h\��Y����F��\
F��Y�#�Y����
���Y��E���\���@�`�
���\���@���
���[
���\� �������
?h8� ��
�h\��>��P��H����\G�P�@���\�P�P� ��_
�Q���F7h\����W
�YF�\FW��Y���'#�Y7	�Y�����?E��\@��`����\�����������[� ����� X\?h8����h\�����@����\G�P7�\����?�P�P��Q������Fgh\���Y����FG�\F���YW#�Y����g�
�Y	��EG��\���@�`����\���@����?X8���"������?X9��� ��������[?h8����gh\	�� g�\��G�P!��\ �P=��!�P�Q���� ��F�h\��YF��\����F���Y�#�Y��Y����
��E���\@�`��������\�����"�����[���@X9� �@����?h8wh\������
��#w�\��"G�P$��\#�P=�� $�P!�Q���� ��Fh\!"�YF��\����F��Y �#�Y!�Y������E���\@�`��������\�����"�����[������B�� ?h8!�h\"����H��'��\$!G�P�@��(�\"'�P#(�P� ��_%!�Q���F"7h\����%$W�Y F�\"FW��Y���#!'#�Y"%7�Y�����?E��\@�u`�"���\��������'���[� �@���#?h8$�h\����%����*��\��)$G�P+7�\%*�P=��'+�P($��Q���� ��F%wh\()���Y(FG�\����%F���Y'$W#�Y%(w��Y����
��EG��\@e`�����%���\����� @�/&��k[+��/ �'�?X9��@�&�?X9(�?X8,'g�/ ر"@�-g�/ �/(g�/ �)@X8�"@�*�?X80)g�/ �1'��/ ر"@�2(��/ �3)��/ �4'��/ ر"@�5��/ �6(��/ �7)��/ �� @�.@X88'��/ �9��/ ر"��:(��/ �;)��/ ������<��
��<G�\2��@	G�P��\<<�P]��?�PA	 �Q���' ��F<�h\A@ �Y(F��\����Ag��Y?	G#�Y?A��Y����?
��E���\@G`�����<���\������/=��k[��	�����
��w�\	G�P�@�t��\�P�P� �����Q���F�h\����		G��Y(F��\	g��Y���w#�Y		G��Y
����!�?E���\@�7`�	���\��������/>��k[�������
���@��
��\G�P�\����_

�P�P�Q������F
h\=w�Y����(F�\=g��YG#�Y����=w�Y
��E��\���@(`����\���������
��������\G�P�@�t7�\�P�P� ���
��Q���F�h\�������YFG�\g��Y���w#�Y���Y����!�?EG��\@�`����\��������������
��	��g�\2��G�P��\�P]���P
�Q����' ��F�h\��YF��\����g��Y
�#�Y��Y����?��E���\@`��������\����� ��
���
��
���@��w�\
G�P��\����_�P�P�Q������F�h\
��Y����F��\
g��Y�#�Y����
��Y��E���\���@�`�
���\���������
��������\!G�P�@�t �\�P  �P� ���!��Q���Fh\�������YF�\g��Y���!�#�Y���Y����!�?E��\@��`����\���������
��������\$G�P�@�t#7�\�P#�P� ���$��Q���FWh\�������YFG�\g��Y���$W#�Y���Y����!�?EG��\@��`����\���������X\����
��'X\WX\4 ��_G�P��\�Q���@��Y���Yg�Y������Y��F���\���!�?E���\@��`����\�����
gX\
��@��'X\WX\G�P�����\��Q7��Y���g��Y�YG��Y� ����Fg��\E��\���@�`����\��� ��
�\G�P������Q�Y���Y���g��Y�Y������?F���\E��\@�`��������\���\� ���4��
G�P--gh\� ��_��,,7��Y��Q���@�
G��Y
'��Y�	�Y����
G��Y��F'��\���!�?E��\@��`�
���\�������\�4��1��G�P//��Y�������Q'��YW��Y���'�
�Y	�Y������?FW��\E��\@�`��������\����\� ���4��G�P/0��Y���B�����Q?'�Y����?���Y�Y??'�Y� ����F���\E���\����@�`���+gh\1���G�P"��\�4�������
��17�Y2��Y����	�Q�4��3�Y� ��G	�Y�h\'��Y����/�	�YG�Y?W
�Y� ����F'��\E���\����@�`���g4��� ��55gh\G�P%��\���
��447��YG4�����B�46��Y�Q'�Y� ��?7�YW��Y��Y�����Y?W�Y������FW��\E���\@�`�����	�X\'4���@�����99gh\GX\����887��YwX\8:��Y2���G�Pg�\;�Y������Y
��Q�w����
���Y	���\
w��Y�����Y

��Y������Fw��\Eg��\@s`�����?
���\��g�\ ��?G�P�����Q������YG��Yw�Y������Y��FG��\����?Eg��\@�i`����\��D��	g�\G�P���A����Q��Y�������YG��Y��Y� ����F���\Eg��\���@``����\��� ��<g�\	G�P�������Q	���Y	���Y�����Y	���Y������F���\Eg��\@W`�����	���\��
�X\����
���X\WX\ ��?G�P��\�Q���@��Y���Y�Y������Y��F���\����?E���\@�K`����\��D��
��\G�P���A����Q�Y�������Y��Y�Y� ����F���\E���\���@B`����\��� ����\
G�P������Q
�Y
���Y������Y
�Y������F���\E���\@9`�����
���\����\ ��?G�P����Q������YW��Y?��Y����?��Y��FW��\����E���\@�/`���"�@�g�O �'g�O �(g�O ��"�@�)g�O ���O �'��O ر">@�(��O �)��O ���O ��"�@�
'��O �(��O ���O ر"�@�'��O �)��O �(��O رd��)��O ��4��Gh\����	�Yw4��w	�Y� ��Gh\��Y'��L�����Y�)8G4��� ���w�YGh\Gh\� ��'(8�
�Y��Y�&��W�[W�['��8����h\w�Y4��� ��	��Y�0[�[�@��g�Y��Yw��Y� ����[� [�([�"���)8
���Y��Y� ���W�[W�[g �\�&��?W�[W�[
��Y� ��0[W0[

�w�"���\
	���\�)8� ��g��\7\���K�@��
6�� �����?Ew�8�A������A?����@Fw�8/A��h6����B@����/B��"h6C���\� ��
@�CF���0DE���0�����CG2E\�8@�C'��L�����E7�<#�[�4@�F����0�����F����6�E����6C`��P����
1@�F'0FL7����P� ��.@�F����6�E'0EL����/����P�)@�/B��m[� ��C���\FF
���2C�
���� ��/A��m[EE
���2CC
����B?��"��EBW\ ��GEG�PAE�0Y\D@���� ��BG� �Q@G'�#�YGDg�\����EG��YBAW�#�YB@'�"�Y� ��DD���8AA'�#�YDD7\����?@!�YC?w�8CCG\����EC����/E��l6
@�����/C��i6�@�/C�m6����@�/C���m7??� ��
@�E@!�Y/C��k[����EE���DCEE ����F@!�Y@@!�YDEGH\� ��7F���[/D��k[@C�\� ��@@��!K[/`���PEE(\����@E(8A���8BA �<����BBWG\B@'\?B�G\����@�????�'���@�?D��\@����@�EE7#H?E�'���@�?E7#H@�� ��?��??W�P@�����?FWX\@� �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�q`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�W`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�;`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\�)8�`���w	��\W\�B��?����K�� �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@�(`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@� `����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@	`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@�`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@�`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@�`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@��`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@��`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@�`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@�`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@�`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@y`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@�p`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@h`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@]`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@�T`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@L`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@C`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0ر"�@�G�/0�"��/0�'G�/0ر"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ����4��

Wh\Wh\�$��G�YG�Y'��L� ���g4��
g��Y	g��Y�`���)8Wh\	w�Y� ��
w�Y
'(8
Wh\� ��G4��G��Y �)8� ��		7�Y%7��Y ��[� ��
 ��['@�8G�Y�@��
g��YWh\ �0[� ���
�[ 7�[ 7�[� ��'4��G��Yg�Y�&����[�([
Wh\� ��� [  �0[7�[� ���7�[
w�YG��Y�!��
g��Yw�Y��\� ����0[

��Yg��Y� ��
w��Y%��Y!�\� ��W�\w�Y

7�Y� ��!!�)87��Y

�w� ��!!'\
	���\���K�@���w	g��\��� ��6 �������
w�8�@��
����� ��
w�8��h6
��������� h6G��\���\� ��@����0���0�����'2@\�7@�'��L�����'�<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[��m[���\� ���������2	���2���	
�#������\G�P�0Y\� ��

����Q
��\������Y��Y��Y����'��Y

��8��Y����
\
���Y
w�8����

\
������l6�����@�
��i6�@�����
�m6@�
���m7����

@����Y� ��
��k[���
� �� ���Y���Y�����H\����[��k[����
�\��K[!��P�����(\
(8��8������<�G\
�\����
�G\@�

����

�'@�
��\����@�@�
7H����

�'@�
7H�@�D@�
��

W�P���@�
GX\@��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�q`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�W`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�;`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\�)8�`���w	��\W\�B��?����K�� �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@�*`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@�"`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@�`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@�`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@�`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@��`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@��`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@�`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@�`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@�`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@{`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@�r`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@j`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@_`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@�V`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@N`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@E`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0ر"�@�G�/0�"��/0�'G�/0��"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ���w4��

Wh\Wh\�$��G�YG�Y
'��L� ���G4��g��Y	g��Y� ���	Wh\w�Y
w�Y�$���G��Y7�Y%7��Y����Wh\
�)8G��Y���
'(8
'@�8'4���$��
g��Y	g��Y ��[� ��� ��[Wh\
Wh\�$��
w�Y	w��Y�[� ����[G��YG��Y�$��%
��Y	�Y� [� ���([
g��Yg��Y�B�� �)8 '�0[���\� ��
w��Yw�Y7�[�$���	7�[ 7�[ 7�[�@��
7�Y7��Y��0[�@��  �0[�w�w��	��\���\	g��\����!�\!!�)8!!�\�"��_���K�(8�(8� ����G\gG\� ���6 �������
w�8�@��
����� ��
w�8��h6
��������� h6G��\���\� ��@����0���0�����'2@\�7@�'��L�����'�<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[��m[���\� ���������2	���2���	
�#������\G�P�0Y\� ��

����Q
��\������Y��Y��Y����'��Y

��8��Y����
\
���Y
w�8����

\
������l6�����@�
��i6�@�����
�m6@�
���m7����

@����Y� ��
��k[���
� �� ���Y���Y�����H\����[��k[����
�\��K[!��P�����(\
(8��8������<�G\
�\����
�G\@�

����

�'@�
��\����@�@�
7H����

�'@�
7H�@�D@�
��

W�P���@�
GX\@��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@~`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@j`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@a`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@N`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@E`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@4`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �
G� �G� ر"�@�G� �G� ��� �� 6@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G��Y

Wh\���'��L7��YW4���@��G�Y�)8��Y� ���7��YWh\
(8���Wh\�Y'4��� ��G��Y@�8G�
�Y� ���
�h\��[
��[���7��Yw�[4��� ��7�Y'��Y
'�[�"���0[
'�[��Y�"����)8w�[w� [� ��w�([�0[7	�Y�!����	�Y
'�['�[� ��g��\w�\�)8����	�	�Y�0[		�w����g�\		���\7\�"��?����K��	�(8� ���G\ �������	w�8�@��
	����� ��w�8
��h6��������� h6w��\���\� ��@����0���0�����2@\�7@�'��L������<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[
��m[���\� ���������2	���2���		�#�������\G�P
�0Y\� ������Q��\������Y���Y
���Y������Y���8

���Y����	���Y\	w�8����\������l6�����@���i6�@������m6@����m7����		@����Y� ����k[���� �� ���Y
���Y�����H\����[��k[�����\�K[!��P�����(\(8
��8����

��<

G\
�\����	
�G\@�		����		�'@�	��\����@�@�7H����	�'@�	7H�@�D@�	��		W�P���@�	wX\@��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NG�0[7�O�GB��7�N�mK7�0[����cK�P���D<E*�LF*7�LEG�P���FW�\E��QG��Y����FG��YE7#�YW��Y����?@�@`����\��D<E*�LF*G�LEG�P���FW�\E�QW�Y����FW��YE�#�YG��Y����?@�@�`����\� ����*'�\��1���?*�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������
�?X9������[�`��?h8	Gh\&��P��H��G�\	G�P�@����\�P
�P� ��_	�Q���F�h\������YF��\F���Y���
	g#�Y��Y�����?E���\@�`����\�����������[� ����
@X9��Q� �@�
���?h8wh\���
��=��P���@��w�\G�P��\����?
�P�P��Q������F
�h\��Y����F��\
F��Y�#�Y����
���Y��E���\���@��`�
���\���@���
���[
���\� �������
?h8� ��
�h\��>��P��H����\G�P�@���\�P�P� ��_
�Q���F7h\����W
�YF�\FW��Y���'#�Y7	�Y�����?E��\@�`����\�����������[� ����� X\?h8����h\�����@����\G�P7�\����?�P�P��Q������Fgh\���Y����FG�\F���YW#�Y����g�
�Y	��EG��\���@��`����\���@����?X8���"������?X9��� ��������[?h8����gh\	�� g�\��G�P!��\ �P=��!�P�Q���� ��F�h\��YF��\����F���Y�#�Y��Y����
��E���\@��`��������\�����"�����[���@X9� �@����?h8wh\������
��#w�\��"G�P$��\#�P=�� $�P!�Q���� ��Fh\!"�YF��\����F��Y �#�Y!�Y������E���\@��`��������\�����"�����[������B�� ?h8!�h\"����H��'��\$!G�P�@��(�\"'�P#(�P� ��_%!�Q���F"7h\����%$W�Y F�\"FW��Y���#!'#�Y"%7�Y�����?E��\@v`�"���\��������'���[� �@���#?h8$�h\����%����*��\��)$G�P+7�\%*�P=��'+�P($��Q���� ��F%wh\()���Y(FG�\����%F���Y'$W#�Y%(w��Y����
��EG��\@�e`�����%���\����� @�/&��k[+��/ �'�?X9��@�&�?X9(�?X8,'g�/ ر"@�-g�/ �/(g�/ �)@X8�"@�*�?X80)g�/ �1'��/ ر"@�2(��/ �3)��/ �4'��/ ر"@�5��/ �6(��/ �7)��/ �� @�.@X88'��/ �9��/ ر"��:(��/ �;)��/ ������<��
��<G�\2��@	G�P��\<<�P]��?�PA	 �Q���' ��F<�h\A@ �Y(F��\����Ag��Y?	G#�Y?A��Y����?
��E���\@�G`�����<���\������/=��k[��	�����
��w�\	G�P�@�t��\�P�P� �����Q���F�h\����		G��Y(F��\	g��Y���w#�Y		G��Y
����!�?E���\@8`�	���\��������/>��k[�������
���@��
��\G�P�\����_

�P�P�Q������F
h\=w�Y����(F�\=g��YG#�Y����=w�Y
��E��\���@�(`����\���������
��������\G�P�@�t7�\�P�P� ���
��Q���F�h\�������YFG�\g��Y���w#�Y���Y����!�?EG��\@`����\��������������
��	��g�\2��G�P��\�P]���P
�Q����' ��F�h\��YF��\����g��Y
�#�Y��Y����?��E���\@�`��������\����� ��
���
��
���@��w�\
G�P��\����_�P�P�Q������F�h\
��Y����F��\
g��Y�#�Y����
��Y��E���\���@��`�
���\���������
��������\!G�P�@�t �\�P  �P� ���!��Q���Fh\�������YF�\g��Y���!�#�Y���Y����!�?E��\@�`����\���������
��������\$G�P�@�t#7�\�P#�P� ���$��Q���FWh\�������YFG�\g��Y���$W#�Y���Y����!�?EG��\@�`����\���������X\����
��'X\WX\4 ��_G�P��\�Q���@��Y���Yg�Y������Y��F���\���!�?E���\@�`����\�����
gX\
��@��'X\WX\G�P�����\��Q7��Y���g��Y�YG��Y� ����Fg��\E��\���@��`����\��� ��
�\G�P������Q�Y���Y���g��Y�Y������?F���\E��\@��`��������\���\� ���4��
G�P--gh\� ��_��,,7��Y��Q���@�
G��Y
'��Y�	�Y����
G��Y��F'��\���!�?E��\@�`�
���\�������\�4��1��G�P//��Y�������Q'��YW��Y���'�
�Y	�Y������?FW��\E��\@��`��������\����\� ���4��G�P/0��Y���B�����Q?'�Y����?���Y�Y??'�Y� ����F���\E���\����@��`���+gh\1���G�P"��\�4�������
��17�Y2��Y����	�Q�4��3�Y� ��G	�Y�h\'��Y����/�	�YG�Y?W
�Y� ����F'��\E���\����@��`���g4��� ��55gh\G�P%��\���
��447��YG4�����B�46��Y�Q'�Y� ��?7�YW��Y��Y�����Y?W�Y������FW��\E���\@��`�����	�X\'4���@����99gh\GX\����887��YwX\8:��Y2���G�Pg�\;�Y������Y
��Q
���Y�`��Ch8	���\
w��Y������Y�6

'�Y� ����Fw��\Eg��\���@�r`�
���\��� ��g�\G�P�������Q��YG��Y���w�Y��Y������FG��\Eg��\@�i`��������\��	g�\ ��?G�P����Q������Y���YG��Y������Y��F���\����?Eg��\@``����\��D��<g�\	G�P���A�����Q	���Y����	���Y��Y	���Y� ����F���\Eg��\���@�V`�	���\������
�X\
���X\����WX\G�P��\�����Q�Y���Y����Y��Y������F���\E���\@�K`��������\��
��\ ��?G�P����Q�����Y���Y��Y�����Y��F���\����?E���\@B`����\��D����\
G�P���A����Q
�Y����
���Y���Y
�Y� ����F���\E���\���@�8`�
���\��� ����\G�P�����Q��YW��Y���?��Y?��Y������FW��\E���\@�/`� �@���g�O �'g�O ��"�@�(g�O �)g�O ���O ر"�@�'��O �(��O �)��O ر"�@���O �'��O �(��O ر"�@���O �'��O �)��O ر"���(��O �)��O �g4��� ���Gh\Gh\'��L�����Y�	�Y74��� ��w�YGh\w	�Y�'@���)8Gh\�
�Y� ����Y��Y'��8� ��4��	��Y'(8� ���h\w�Y�[� ��w��Y�[g�Y� �����Y��[��[�&� ��([��Y�)8� ��
���Y� [�0[�&���W�[W�[W�[� ��
��YW�[G��\�@����0[

Ch8W0[ �@�
	���\�\�)8� ��G�\7\���K� ��
�6��6��� ���?Ew�8� ��A��A?����@Fw�8����/A��h6B@����/B��"h6����C���\�@�CF���0����DE���0�CG2E\9@����C'��L�E7�<#�[
5@��"���F����0�F����6�E����6����C`��P�1@�F'0FL����7����P.@�F����6�����E'0EL/����P�)@�� ��/B��m[C���\FF
���2����C�
���/A��m[EE
���2���CC
B?�#������EBW\GEG�PAE�0Y\����D@���BG� �Q@G'�#�Y����GDg�\EG��YBAW�#�Y����B@'�"�YDD���8AA'�#�Y����DD7\?@!�YC?w�8����CCG\EC����/E��l6�����@�/C��i6�@�����/C�m6@�/C���m7����??
@�E@!�Y� ��/C��k[EE���DC� ��EE F@!�Y@@!�Y����DEGH\7F���[/D��k[����@C�\@@��!K[/`���P����EE(\@E(8A���8����BA �<BBWG\B@'\����?B�G\@�??����??�'@�?D��\����@�@�EE7#H����?E�'@�?E7#H�@�D@�?��??W�P���@�?FWX\@��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@��`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@��`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@�`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@��`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@��`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@��`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@��`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@�}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@�i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@�``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@�M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@�D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@�3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\Ch8�����)8	��\W\�"��?����K���6��� ���	w�8� ���@��
	����w�8����
��h6����� h6���w��\���\@��������0���0�2@\����7@�'��L��<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ��
��m[���\�����������2	���2	����	��"���\ ��G�P
�0Y\��������Q��\��Y�������Y
���Y��Y�������8

���Y	���Y����\	w�8\����������l6@�������i6�@��m6����@����m7		� ��@����Y��k[������� �������Y
���Y�H\� ������[��k[�\� ���K[!��P�(\����(8
��8

��<����

G\
�\	
�G\����@�				�'���@�	��\@����@�7H	�'���@�	7H@�� ��	��		W�P@�����	wX\@� �����@��P�P����P�P�P� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@*`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@"`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@�
`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@��`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@��`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@��`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@�`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@�`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@��`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@��`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@��`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@�z`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@r`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@�i`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@�^`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@V`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@�M`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@�D`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0ر"�@�G�/0�"��/0�'G�/0ر"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ����4��

Wh\Wh\�$��G�YG�Y'��L� ���w4��
g��Y	g��Y� ����)8	w�Y
w�Y� ���
'(8
Wh\'��8�$��		7�Y%7��YWh\�&�� ��[ ��[
�[� ���G4��G�Y�[� ��G��Y �0[� [�@����([
Wh\
g��Y� ��Wh\g��Y '�[�`�����\4��G��Y� �� '�[G��Y
w�Y�@�@�w�Y �)8  0[�$��
g��Yg��Y

��Y� ���%��Y'�['�[�%��
w��Yw�Y!�\� ��!!�)8�0[

7�Y� ��7��YW��\

Ch8�`��Ch8
	���\!!7\1��@�	g��\���K������
�6�66��� ���
w�8� ���@��
����
w�8������h6
����� h6���G��\���\@��������0���0�'2@\����7@�'��L�'�<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ����m[���\�����������2	���2	����
��"��\ ��G�P�0Y\

��������Q
��\��Y������Y��Y'��Y� ��

��8��Y
\����
���Y
w�8

\����
������l6@�����
��i6�@�
�m6����@�
���m7

� ��@����Y
��k[�������
 �������Y���Y�H\� ������[��k[
�\� ����K[!��P�(\����
(8��8��<�����G\
�\
�G\����@�



�'���@�
��\@����@�
7H

�'���@�
7H@�� ��
��

W�P@�����
GX\@� �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@��`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@��`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@�`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@��`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@��`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@��`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@��`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@�}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@�i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@�``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@�M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@�D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@�3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\Ch8�����)8	��\W\�"��?����K���6��� ���	w�8� ���@��
	����w�8����
��h6����� h6���w��\���\@��������0���0�2@\����7@�'��L��<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ��
��m[���\�����������2	���2	����	��"���\ ��G�P
�0Y\��������Q��\��Y�������Y
���Y��Y�������8

���Y	���Y����\	w�8\����������l6@�������i6�@��m6����@����m7		� ��@����Y��k[������� �������Y
���Y�H\� ������[��k[�\� ���K[!��P�(\����(8
��8

��<����

G\
�\	
�G\����@�				�'���@�	��\@����@�7H	�'���@�	7H@�� ��	��		W�P@�����	wX\@� �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NG�0[7�O�GB��7�N�mK7�0[����cK�P���D<E*�LF*7�LEG�P���FW�\E��QG��Y����FG��YE7#�YW��Y����?@�@`����\��D<E*�LF*G�LEG�P���FW�\E�QW�Y����FW��YE�#�YG��Y����?@�@�`����\� ����*'�\��1���?*�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������
�?X9������[�`��?h8	Gh\&��P��H��G�\	G�P�@����\�P
�P� ��_	�Q���F�h\������YF��\F���Y���
	g#�Y��Y�����?E���\@�`����\�����������[� ����
@X9��Q� �@�
���?h8wh\���
��=��P���@��w�\G�P��\����?
�P�P��Q������F
�h\��Y����F��\
F��Y�#�Y����
���Y��E���\���@��`�
���\���@���
���[
���\� �������
?h8� ��
�h\��>��P��H����\G�P�@���\�P�P� ��_
�Q���F7h\����W
�YF�\FW��Y���'#�Y7	�Y�����?E��\@�`����\�����������[� ����� X\?h8����h\�����@����\G�P7�\����?�P�P��Q������Fgh\���Y����FG�\F���YW#�Y����g�
�Y	��EG��\���@��`����\���@����?X8���"������?X9��� ��������[?h8����gh\	�� g�\��G�P!��\ �P=��!�P�Q���� ��F�h\��YF��\����F���Y�#�Y��Y����
��E���\@��`��������\�����"�����[���@X9� �@����?h8wh\������
��#w�\��"G�P$��\#�P=�� $�P!�Q���� ��Fh\!"�YF��\����F��Y �#�Y!�Y������E���\@��`��������\�����"�����[������B�� ?h8!�h\"����H��'��\$!G�P�@��(�\"'�P#(�P� ��_%!�Q���F"7h\����%$W�Y F�\"FW��Y���#!'#�Y"%7�Y�����?E��\@v`�"���\��������'���[� �@���#?h8$�h\����%����*��\��)$G�P+7�\%*�P=��'+�P($��Q���� ��F%wh\()���Y(FG�\����%F���Y'$W#�Y%(w��Y����
��EG��\@�e`�����%���\����� @�/&��k[+��/ �'�?X9��@�&�?X9(�?X8,'g�/ ر"@�-g�/ �/(g�/ �)@X8�"@�*�?X80)g�/ �1'��/ ر"@�2(��/ �3)��/ �4'��/ ر"@�5��/ �6(��/ �7)��/ �� @�.@X88'��/ �9��/ ر"��:(��/ �;)��/ ������<��
��<G�\2��@	G�P��\<<�P]��?�PA	 �Q���' ��F<�h\A@ �Y(F��\����Ag��Y?	G#�Y?A��Y����?
��E���\@�G`�����<���\������/=��k[��	�����
��w�\	G�P�@�t��\�P�P� �����Q���F�h\����		G��Y(F��\	g��Y���w#�Y		G��Y
����!�?E���\@8`�	���\��������/>��k[�������
���@��
��\G�P�\����_

�P�P�Q������F
h\=w�Y����(F�\=g��YG#�Y����=w�Y
��E��\���@�(`����\���������
��������\G�P�@�t7�\�P�P� ���
��Q���F�h\�������YFG�\g��Y���w#�Y���Y����!�?EG��\@`����\��������������
��	��g�\2��G�P��\�P]���P
�Q����' ��F�h\��YF��\����g��Y
�#�Y��Y����?��E���\@�`��������\����� ��
���
��
���@��w�\
G�P��\����_�P�P�Q������F�h\
��Y����F��\
g��Y�#�Y����
��Y��E���\���@��`�
���\���������
��������\!G�P�@�t �\�P  �P� ���!��Q���Fh\�������YF�\g��Y���!�#�Y���Y����!�?E��\@�`����\���������
��������\$G�P�@�t#7�\�P#�P� ���$��Q���FWh\�������YFG�\g��Y���$W#�Y���Y����!�?EG��\@�`����\���������X\����
��'X\WX\4 ��_G�P��\�Q���@��Y���Yg�Y������Y��F���\���!�?E���\@�`����\�����
gX\
��@��'X\WX\G�P�����\��Q7��Y���g��Y�YG��Y� ����Fg��\E��\���@��`����\��� ��
�\G�P������Q�Y���Y���g��Y�Y������?F���\E��\@��`��������\���\� ���4��
G�P--gh\� ��_��,,7��Y��Q���@�
G��Y
'��Y�	�Y����
G��Y��F'��\���!�?E��\@�`�
���\�������\�4��1��G�P//��Y�������Q'��YW��Y���'�
�Y	�Y������?FW��\E��\@��`��������\����\� ���4��G�P/0��Y���B�����Q?'�Y����?���Y�Y??'�Y� ����F���\E���\����@��`���+gh\1���G�P"��\�4�������
��17�Y2��Y����	�Q�4��3�Y� ��G	�Y�h\'��Y����/�	�YG�Y?W
�Y� ����F'��\E���\����@��`���g4��� ��55gh\G�P%��\���
��447��YG4�����B�46��Y�Q'�Y� ��?7�YW��Y��Y�����Y?W�Y������FW��\E���\@��`�����	�X\'4���@����99gh\GX\����887��YwX\8:��Y2���G�Pg�\;�Y������Y
��Q
���Y�`��Ch8	���\
w��Y������Y�6

'�Y� ����Fw��\Eg��\���@�r`�
���\��� ��g�\G�P�������Q��YG��Y���w�Y��Y������FG��\Eg��\@�i`��������\��	g�\ ��?G�P����Q������Y���YG��Y������Y��F���\����?Eg��\@``����\��D��<g�\	G�P���A�����Q	���Y����	���Y��Y	���Y� ����F���\Eg��\���@�V`�	���\������
�X\
���X\����WX\G�P��\�����Q�Y���Y����Y��Y������F���\E���\@�K`��������\��
��\ ��?G�P����Q�����Y���Y��Y�����Y��F���\����?E���\@B`����\��D����\
G�P���A����Q
�Y����
���Y���Y
�Y� ����F���\E���\���@�8`�
���\��� ����\G�P�����Q��YW��Y���?��Y?��Y������FW��\E���\@�/`� �@���g�O �'g�O ��"�@�(g�O �)g�O ���O ر"�@�'��O �(��O �)��O ر"�@���O �'��O �(��O ر"�@���O �'��O �)��O ر"���(��O �)��O �g4��� ���Gh\Gh\'��L�����Y�	�Y74��� ��w�YGh\w	�Y�'@���)8Gh\�
�Y� ����Y��Y'��8� ��4��	��Y'(8� ���h\w�Y�[� ��w��Y�[g�Y� �����Y��[��[�&� ��([��Y�)8� ��
���Y� [�0[�&���W�[W�[W�[� ��
��YW�[G��\�@����0[

Ch8W0[ �@�
	���\�\�)8� ��G�\7\���K� ��
�6��6��� ���?Ew�8� ��A��A?����@Fw�8����/A��h6B@����/B��"h6����C���\�@�CF���0����DE���0�CG2E\9@����C'��L�E7�<#�[
5@��"���F����0�F����6�E����6����C`��P�1@�F'0FL����7����P.@�F����6�����E'0EL/����P�)@�� ��/B��m[C���\FF
���2����C�
���/A��m[EE
���2���CC
B?�#������EBW\GEG�PAE�0Y\����D@���BG� �Q@G'�#�Y����GDg�\EG��YBAW�#�Y����B@'�"�YDD���8AA'�#�Y����DD7\?@!�YC?w�8����CCG\EC����/E��l6�����@�/C��i6�@�����/C�m6@�/C���m7����??
@�E@!�Y� ��/C��k[EE���DC� ��EE F@!�Y@@!�Y����DEGH\7F���[/D��k[����@C�\@@��!K[/`���P����EE(\@E(8A���8����BA �<BBWG\B@'\����?B�G\@�??����??�'@�?D��\����@�@�EE7#H����?E�'@�?E7#H�@�D@�?��??W�P���@�?FWX\@��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@��`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@��`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@�`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@��`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@��`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@��`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@��`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@�}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@�i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@�``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@�M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@�D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@�3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\Ch8�����)8	��\W\�"��?����K���6��� ���	w�8� ���@��
	����w�8����
��h6����� h6���w��\���\@��������0���0�2@\����7@�'��L��<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ��
��m[���\�����������2	���2	����	��"���\ ��G�P
�0Y\��������Q��\��Y�������Y
���Y��Y�������8

���Y	���Y����\	w�8\����������l6@�������i6�@��m6����@����m7		� ��@����Y��k[������� �������Y
���Y�H\� ������[��k[�\� ���K[!��P�(\����(8
��8

��<����

G\
�\	
�G\����@�				�'���@�	��\@����@�7H	�'���@�	7H@�� ��	��		W�P@�����	wX\@� �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NG�0[7�O�GB��7�N�mK7�0[����cK�P���D<E*�LF*7�LEG�P���FW�\E��QG��Y����FG��YE7#�YW��Y����?@�@`����\��D<E*�LF*G�LEG�P���FW�\E�QW�Y����FW��YE�#�YG��Y����?@�@�`����\� ����*'�\��1���?*�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������
�?X9������[�`��?h8	Gh\&��P��H��G�\	G�P�@����\�P
�P� ��_	�Q���F�h\������YF��\F���Y���
	g#�Y��Y�����?E���\@�`����\�����������[� ����
@X9��Q� �@�
���?h8wh\���
��=��P���@��w�\G�P��\����?
�P�P��Q������F
�h\��Y����F��\
F��Y�#�Y����
���Y��E���\���@��`�
���\���@���
���[
���\� �������
?h8� ��
�h\��>��P��H����\G�P�@���\�P�P� ��_
�Q���F7h\����W
�YF�\FW��Y���'#�Y7	�Y�����?E��\@�`����\�����������[� ����� X\?h8����h\�����@����\G�P7�\����?�P�P��Q������Fgh\���Y����FG�\F���YW#�Y����g�
�Y	��EG��\���@��`����\���@����?X8���"������?X9��� ��������[?h8����gh\	�� g�\��G�P!��\ �P=��!�P�Q���� ��F�h\��YF��\����F���Y�#�Y��Y����
��E���\@��`��������\�����"�����[���@X9� �@����?h8wh\������
��#w�\��"G�P$��\#�P=�� $�P!�Q���� ��Fh\!"�YF��\����F��Y �#�Y!�Y������E���\@��`��������\�����"�����[������B�� ?h8!�h\"����H��'��\$!G�P�@��(�\"'�P#(�P� ��_%!�Q���F"7h\����%$W�Y F�\"FW��Y���#!'#�Y"%7�Y�����?E��\@v`�"���\��������'���[� �@���#?h8$�h\����%����*��\��)$G�P+7�\%*�P=��'+�P($��Q���� ��F%wh\()���Y(FG�\����%F���Y'$W#�Y%(w��Y����
��EG��\@�e`�����%���\����� @�/&��k[+��/ �'�?X9��@�&�?X9(�?X8,'g�/ ر"@�-g�/ �/(g�/ �)@X8�"@�*�?X80)g�/ �1'��/ ر"@�2(��/ �3)��/ �4'��/ ر"@�5��/ �6(��/ �7)��/ �� @�.@X88'��/ �9��/ ر"��:(��/ �;)��/ ������<��
��<G�\2��@	G�P��\<<�P]��?�PA	 �Q���' ��F<�h\A@ �Y(F��\����Ag��Y?	G#�Y?A��Y����?
��E���\@�G`�����<���\������/=��k[��	�����
��w�\	G�P�@�t��\�P�P� �����Q���F�h\����		G��Y(F��\	g��Y���w#�Y		G��Y
����!�?E���\@8`�	���\��������/>��k[�������
���@��
��\G�P�\����_

�P�P�Q������F
h\=w�Y����(F�\=g��YG#�Y����=w�Y
��E��\���@�(`����\���������
��������\G�P�@�t7�\�P�P� ���
��Q���F�h\�������YFG�\g��Y���w#�Y���Y����!�?EG��\@`����\��������������
��	��g�\2��G�P��\�P]���P
�Q����' ��F�h\��YF��\����g��Y
�#�Y��Y����?��E���\@�`��������\����� ��
���
��
���@��w�\
G�P��\����_�P�P�Q������F�h\
��Y����F��\
g��Y�#�Y����
��Y��E���\���@��`�
���\���������
��������\!G�P�@�t �\�P  �P� ���!��Q���Fh\�������YF�\g��Y���!�#�Y���Y����!�?E��\@�`����\���������
��������\$G�P�@�t#7�\�P#�P� ���$��Q���FWh\�������YFG�\g��Y���$W#�Y���Y����!�?EG��\@�`����\���������X\����
��'X\WX\4 ��_G�P��\�Q���@��Y���Yg�Y������Y��F���\���!�?E���\@�`����\�����
gX\
��@��'X\WX\G�P�����\��Q7��Y���g��Y�YG��Y� ����Fg��\E��\���@��`����\��� ��
�\G�P������Q�Y���Y���g��Y�Y������?F���\E��\@��`��������\���\� ���4��
G�P--gh\� ��_��,,7��Y��Q���@�
G��Y
'��Y�	�Y����
G��Y��F'��\���!�?E��\@�`�
���\�������\�4��1��G�P//��Y�������Q'��YW��Y���'�
�Y	�Y������?FW��\E��\@��`��������\����\� ���4��G�P/0��Y���B�����Q?'�Y����?���Y�Y??'�Y� ����F���\E���\����@��`���+gh\1���G�P"��\�4�������
��17�Y2��Y����	�Q�4��3�Y� ��G	�Y�h\'��Y����/�	�YG�Y?W
�Y� ����F'��\E���\����@��`���g4��� ��55gh\G�P%��\���
��447��YG4�����B�46��Y�Q'�Y� ��?7�YW��Y��Y�����Y?W�Y������FW��\E���\@��`�����	�X\'4���@����99gh\GX\����887��YwX\8:��Y2���G�Pg�\;�Y������Y
��Q
���Y�`���w	���\
w��Y������Y�

'�Y� ����Fw��\Eg��\���@�r`�
���\��� ��g�\G�P�������Q��YG��Y���w�Y��Y������FG��\Eg��\@�i`��������\��	g�\ ��?G�P����Q������Y���YG��Y������Y��F���\����?Eg��\@``����\��D��<g�\	G�P���A�����Q	���Y����	���Y��Y	���Y� ����F���\Eg��\���@�V`�	���\������
�X\
���X\����WX\G�P��\�����Q�Y���Y����Y��Y������F���\E���\@�K`��������\��
��\ ��?G�P����Q�����Y���Y��Y�����Y��F���\����?E���\@B`����\��D����\
G�P���A����Q
�Y����
���Y���Y
�Y� ����F���\E���\���@�8`�
���\��� ����\G�P�����Q��YW��Y���?��Y?��Y������FW��\E���\@�/`� �@���g�O �'g�O ��"�@�(g�O �)g�O ���O ر"�@�'��O �(��O �)��O ر"�@���O �'��O �(��O ر"�@���O �'��O �)��O ر"���(��O �)��O �g4��� ���Gh\Gh\'��L�����Y�	�Y74��� ��w�YGh\w	�Y�'@���)8Gh\�
�Y� ����Y��Y'��8� ��4��	��Y'(8� ���h\w�Y�[� ��w��Y�[g�Y� �����Y��[��[�&� ��([��Y�)8� ��
���Y� [�0[�&���W�[W�[W�[� ��
��YW�[G��\�@����0[

�wW0[ �@�
	���\�\�)8� ��G�\7\���K� ��
���6��� ���?Ew�8� ��A��A?����@Fw�8����/A��h6B@����/B��"h6����C���\�@�CF���0����DE���0�CG2E\9@����C'��L�E7�<#�[
5@��"���F����0�F����6�E����6����C`��P�1@�F'0FL����7����P.@�F����6�����E'0EL/����P�)@�� ��/B��m[C���\FF
���2����C�
���/A��m[EE
���2���CC
B?�#������EBW\GEG�PAE�0Y\����D@���BG� �Q@G'�#�Y����GDg�\EG��YBAW�#�Y����B@'�"�YDD���8AA'�#�Y����DD7\?@!�YC?w�8����CCG\EC����/E��l6�����@�/C��i6�@�����/C�m6@�/C���m7����??
@�E@!�Y� ��/C��k[EE���DC� ��EE F@!�Y@@!�Y����DEGH\7F���[/D��k[����@C�\@@��!K[/`���P����EE(\@E(8A���8����BA �<BBWG\B@'\����?B�G\@�??����??�'@�?D��\����@�@�EE7#H����?E�'@�?E7#H�@�D@�?��??W�P���@�?FWX\@��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@��`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@��`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@�`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@��`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@��`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@��`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@��`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@�}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@�i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@�``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@�M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@�D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@�3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\�w�����)8	��\W\�"��?����K������ ���	w�8� ���@��
	����w�8����
��h6����� h6���w��\���\@��������0���0�2@\����7@�'��L��<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ��
��m[���\�����������2	���2	����	��"���\ ��G�P
�0Y\��������Q��\��Y�������Y
���Y��Y�������8

���Y	���Y����\	w�8\����������l6@�������i6�@��m6����@����m7		� ��@����Y��k[������� �������Y
���Y�H\� ������[��k[�\� ���K[!��P�(\����(8
��8

��<����

G\
�\	
�G\����@�				�'���@�	��\@����@�7H	�'���@�	7H@�� ��	��		W�P@�����	wX\@� �����@��P�P����P�P�P� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@*`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@"`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@�
`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@��`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@��`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@��`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@�`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@�`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@��`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@��`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@��`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@�z`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@r`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@�i`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@�^`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@V`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@�M`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@�D`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0ر"�@�G�/0�"��/0�'G�/0ر"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ����4��

Wh\Wh\�$��G�YG�Y'��L� ���w4��
g��Y	g��Y� ����)8	w�Y
w�Y� ���
'(8
Wh\'��8�$��		7�Y%7��YWh\�&�� ��[ ��[
�[� ���G4��G�Y�[� ��G��Y �0[� [�@����([
Wh\
g��Y� ��Wh\g��Y '�[�`�����\4��G��Y� �� '�[G��Y
w�Y�@�@�w�Y �)8  0[�$��
g��Yg��Y

��Y� ���%��Y'�['�[�%��
w��Yw�Y!�\� ��!!�)8�0[

7�Y� ��7��YW��\

�w�`���w
	���\!!7\1��@�	g��\���K������
��6��� ���
w�8� ���@��
����
w�8������h6
����� h6���G��\���\@��������0���0�'2@\����7@�'��L�'�<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ����m[���\�����������2	���2	����
��"��\ ��G�P�0Y\

��������Q
��\��Y������Y��Y'��Y� ��

��8��Y
\����
���Y
w�8

\����
������l6@�����
��i6�@�
�m6����@�
���m7

� ��@����Y
��k[�������
 �������Y���Y�H\� ������[��k[
�\� ����K[!��P�(\����
(8��8��<�����G\
�\
�G\����@�



�'���@�
��\@����@�
7H

�'���@�
7H@�� ��
��

W�P@�����
GX\@� �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@��`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@��`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@�`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@��`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@��`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@��`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@��`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@�}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@�i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@�``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@�M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@�D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@�3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\�w�����)8	��\W\�"��?����K������ ���	w�8� ���@��
	����w�8����
��h6����� h6���w��\���\@��������0���0�2@\����7@�'��L��<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ��
��m[���\�����������2	���2	����	��"���\ ��G�P
�0Y\��������Q��\��Y�������Y
���Y��Y�������8

���Y	���Y����\	w�8\����������l6@�������i6�@��m6����@����m7		� ��@����Y��k[������� �������Y
���Y�H\� ������[��k[�\� ���K[!��P�(\����(8
��8

��<����

G\
�\	
�G\����@�				�'���@�	��\@����@�7H	�'���@�	7H@�� ��	��		W�P@�����	wX\@� �����@��P�P����P�P�P� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@�(`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@� `����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@	`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@�`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@�`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@�`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@��`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@��`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@�`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@�`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@�`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@y`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@�p`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@h`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@]`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@�T`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@L`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@C`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0ر"�@�G�/0�"��/0�'G�/0ر"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ����4��

Wh\Wh\�$��G�YG�Y'��L� ���g4��
g��Y	g��Y�`���)8Wh\	w�Y� ��
w�Y
'(8
Wh\� ��G4��G��Y �)8� ��		7�Y%7��Y ��[� ��
 ��['@�8G�Y�@��
g��YWh\ �0[� ���
�[ 7�[ 7�[� ��'4��G��Yg�Y�&����[�([
Wh\� ��� [  �0[7�[� ���7�[
w�YG��Y�!��
g��Yw�Y��\� ����0[

��Yg��Y� ��
w��Y%��Y!�\� ��W�\w�Y

7�Y� ��!!�)87��Y

�w� ��!!'\
	���\���K�@���w	g��\��� ��6 �������
w�8�@��
����� ��
w�8��h6
��������� h6G��\���\� ��@����0���0�����'2@\�7@�'��L�����'�<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[��m[���\� ���������2	���2���	
�#������\G�P�0Y\� ��

����Q
��\������Y��Y��Y����'��Y

��8��Y����
\
���Y
w�8����

\
������l6�����@�
��i6�@�����
�m6@�
���m7����

@����Y� ��
��k[���
� �� ���Y���Y�����H\����[��k[����
�\��K[!��P�����(\
(8��8������<�G\
�\����
�G\@�

����

�'@�
��\����@�@�
7H����

�'@�
7H�@�D@�
��

W�P���@�
GX\@��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�q`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�W`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�;`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �G� �G� ��"�@�
G� �G� ��� �� �@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G�YWh\�@��W4��Wh\7�Y� ��G�Y'��LWh\�$��G��Y
�Y

7��Y�`���)874��G�Y� ���)87��Y
�Y����@�8(8
�h\� ���w�['�[G�[�"��G�['�[
'��Y� ��4����Y7�Y�&���
w�[w� [w�([�"��G�0[W0['�[�"��'�[7��Y�Y�@��?� �\�0[���Y� �@�W�\w�\�)8�`���w	��\W\�B��?����K�� �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'�NG�0[7�O�GB��7�N�mK7�0[����cK�P���D<E*�LF*7�LEG�P���FW�\E��QG��Y����FG��YE7#�YW��Y����?@�@�`����\��D<E*�LF*G�LEG�P���FW�\E�QW�Y����FW��YE�#�YG��Y����?@�@��`����\� ����*'�\��1���?*�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������
�?X9������[�`��?h8	Gh\&��P��H��G�\	G�P�@����\�P
�P� ��_	�Q���F�h\������YF��\F���Y���
	g#�Y��Y�����?E���\@��`����\�����������[� ����
@X9��Q� �@�
���?h8wh\���
��=��P���@��w�\G�P��\����?
�P�P��Q������F
�h\��Y����F��\
F��Y�#�Y����
���Y��E���\���@�`�
���\���@���
���[
���\� �������
?h8� ��
�h\��>��P��H����\G�P�@���\�P�P� ��_
�Q���F7h\����W
�YF�\FW��Y���'#�Y7	�Y�����?E��\@��`����\�����������[� ����� X\?h8����h\�����@����\G�P7�\����?�P�P��Q������Fgh\���Y����FG�\F���YW#�Y����g�
�Y	��EG��\���@�`����\���@����?X8���"������?X9��� ��������[?h8����gh\	�� g�\��G�P!��\ �P=��!�P�Q���� ��F�h\��YF��\����F���Y�#�Y��Y����
��E���\@�`��������\�����"�����[���@X9� �@����?h8wh\������
��#w�\��"G�P$��\#�P=�� $�P!�Q���� ��Fh\!"�YF��\����F��Y �#�Y!�Y������E���\@�`��������\�����"�����[������B�� ?h8!�h\"����H��'��\$!G�P�@��(�\"'�P#(�P� ��_%!�Q���F"7h\����%$W�Y F�\"FW��Y���#!'#�Y"%7�Y�����?E��\@�v`�"���\��������'���[� �@���#?h8$�h\����%����*��\��)$G�P+7�\%*�P=��'+�P($��Q���� ��F%wh\()���Y(FG�\����%F���Y'$W#�Y%(w��Y����
��EG��\@f`�����%���\����� @�/&��k[+��/ �'�?X9��@�&�?X9(�?X8,'g�/ ر"@�-g�/ �/(g�/ �)@X8�"@�*�?X80)g�/ �1'��/ ر"@�2(��/ �3)��/ �4'��/ ر"@�5��/ �6(��/ �7)��/ �� @�.@X88'��/ �9��/ ر"��:(��/ �;)��/ ������<��
��<G�\2��@	G�P��\<<�P]��?�PA	 �Q���' ��F<�h\A@ �Y(F��\����Ag��Y?	G#�Y?A��Y����?
��E���\@H`�����<���\������/=��k[��	�����
��w�\	G�P�@�t��\�P�P� �����Q���F�h\����		G��Y(F��\	g��Y���w#�Y		G��Y
����!�?E���\@�8`�	���\��������/>��k[�������
���@��
��\G�P�\����_

�P�P�Q������F
h\=w�Y����(F�\=g��YG#�Y����=w�Y
��E��\���@)`����\���������
��������\G�P�@�t7�\�P�P� ���
��Q���F�h\�������YFG�\g��Y���w#�Y���Y����!�?EG��\@�`����\��������������
��	��g�\2��G�P��\�P]���P
�Q����' ��F�h\��YF��\����g��Y
�#�Y��Y����?��E���\@`��������\����� ��
���
��
���@��w�\
G�P��\����_�P�P�Q������F�h\
��Y����F��\
g��Y�#�Y����
��Y��E���\���@�`�
���\���������
��������\!G�P�@�t �\�P  �P� ���!��Q���Fh\�������YF�\g��Y���!�#�Y���Y����!�?E��\@��`����\���������
��������\$G�P�@�t#7�\�P#�P� ���$��Q���FWh\�������YFG�\g��Y���$W#�Y���Y����!�?EG��\@��`����\���������X\����
��'X\WX\4 ��_G�P��\�Q���@��Y���Yg�Y������Y��F���\���!�?E���\@��`����\�����
gX\
��@��'X\WX\G�P�����\��Q7��Y���g��Y�YG��Y� ����Fg��\E��\���@�`����\��� ��
�\G�P������Q�Y���Y���g��Y�Y������?F���\E��\@�`��������\���\� ���4��
G�P--gh\� ��_��,,7��Y��Q���@�
G��Y
'��Y�	�Y����
G��Y��F'��\���!�?E��\@��`�
���\�������\�4��1��G�P//��Y�������Q'��YW��Y���'�
�Y	�Y������?FW��\E��\@�`��������\����\� ���4��G�P/0��Y���B�����Q?'�Y����?���Y�Y??'�Y� ����F���\E���\����@�`���+gh\1���G�P"��\�4�������
��17�Y2��Y����	�Q�4��3�Y� ��G	�Y�h\'��Y����/�	�YG�Y?W
�Y� ����F'��\E���\����@�`���g4��� ��55gh\G�P%��\���
��447��YG4�����B�46��Y�Q'�Y� ��?7�YW��Y��Y�����Y?W�Y������FW��\E���\@�`�����	�X\'4���@����99gh\GX\����887��YwX\8:��Y2���
G�Pg�\;�Y������Y��QCh8����

��Y	���\
w��Y������Y�6

'�Y�������Fw��\����?Eg��\@�r`�
���\��D��g�\G�P���A�����Q��Y����G��Yw�Y��Y� ����FG��\Eg��\���@i`����\��� ��	g�\G�P������Q��Y���Y���G��Y��Y������F���\Eg��\@``��������\��<g�\ ��?	G�P�����Q����	���Y	���Y��Y����	���Y��F���\����?Eg��\@�V`�	���\�����
�X\
��@�P�X\WX\G�P������\�Q�Y������Y�Y��Y� ����F���\E���\���@K`����\��� ��
��\G�P������Q�Y���Y�����Y�Y������F���\E���\@B`��������\����\ ��?
G�P����Q����
�Y
���Y���Y����
�Y��F���\����?E���\@�8`�
���\��D����\G�P���A�����Q��Y����W��Y?��Y?��Y� ����FW��\E���\����@/`���g�O ��"�@�'g�O �(g�O �)g�O ر"�@���O �'��O �(��O ��"�@�)��O ���O �'��O ��"�@�(��O ���O �'��O ��"�@�)��O �(��O �)��O �� ��g4��Gh\Gh\� ���'��L��Y�	�Y� ��'4��w�YGh\� ��w	�YGh\�)8�$���
�Y��Y��Y�@��	��Y'��8�h\�$��w�Y'(8w��Y�&���[�[g�Y�$����Y��[��Y�"����[�([�)8� ���
���YW�[	W�[�"���0[� [W�[� ��
��YW�[��0[�@��G��\

Ch8W0[ ��
	���\G�\�\�����)8
�6G\�������K��� ��6 �������?Ew�8�A��A?����� ��@Fw�8/A��h6B@�������/B��"h6C���\�@�����CF���0DE���0�CG2E\�����8@�C'��L�E7�<#�[� �@��4@�F����0�F����6����E����6C`��P�1@������F'0FL7����P.@������F����6�E'0EL/����P����)@�/B��m[C���\� ��FF
���2C�
���/A��m[����EE
���2CC
B?��@�P�"��EBW\GEG�P����?AE�0Y\D@���BG� �Q����@G'�#�YGDg�\EG��Y� ��BAW�#�YB@'�"�YDD���8����AA'�#�YDD7\?@!�Y����C?w�8CCG\EC��������/E��l6�@�/C��i6����@�/C�m6@����/C���m7??
@�����E@!�Y/C��k[EE���� ��DCEE F@!�Y�@��@@!�YDEGH\7F���[����/D��k[@C�\@@��!K[����/`���PEE(\@E(8����A���8BA �<BBWG\���B@'\?B�G\@���????�'@�����?D��\@�@���EE7#H?E�'@�����?E7#H@�?�����??W�P@�?FWX\���@� ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@��`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@��`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@�`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@��`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@��`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@��`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@��`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@�}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@�i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@�``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@�M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@�D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@�3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �
G� �G� ر"�@�G� �G� ��� �� 6@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G��Y

Wh\���'��L7��Yg4���@��G�Y�)8��Y� ���7��YWh\
(8� ��Wh\�Y@�8� ���74��G��Y��[� ����[G�
�Y
�h\�"��
'�[7��Y�)8� ��0[w� ['��Y�&� �
w�[��Y7�Y�F��w�['�[w�([�&��
'�[7	�Y�	�Y� ��g��\�0[
'�[� �@�	�	�Yw�\�0[��L		Ch8�)8		���\� ��g�\7\����K������	�6���� ���	w�8� ���@��
	����w�8����
��h6����� h6���w��\���\@��������0���0�2@\����7@�'��L��<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ��
��m[���\�����������2	���2	����	��"���\ ��G�P
�0Y\��������Q��\��Y�������Y
���Y��Y�������8

���Y	���Y����\	w�8\����������l6@�������i6�@��m6����@����m7		� ��@����Y��k[������� �������Y
���Y�H\� ������[��k[�\� ���K[!��P�(\����(8
��8

��<����

G\
�\	
�G\����@�				�'���@�	��\@����@�7H	�'���@�	7H@�� ��	��		W�P@�����	wX\@� �����@��P�P����P�P�P� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@�*`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@�"`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@�`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@�`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@�`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@��`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@��`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@�`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@�`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@�`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@{`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@�r`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@j`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@_`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@�V`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@N`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@E`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0��"�@�"��/0�G�/0�'G�/0ر"�@�&G�/0���/0�'��/0ر$��&��/0�$$'h\%%'h\� ����4��

Wh\Wh\���G�YG�YW4��� ��g��Y	g��Y	Wh\�$��w�Y
w�Y
'��L� ���G��Y7�Y%7��Y����Wh\
�)8G��Y���
'(8
'@�8'4���$��g�
�Y	g��YWh\� ��Wh\
 ��[
 ��[�&��� [	w��YG��Y� ��G��Y�[ ��0[� ��
w�Y		�Yg��Y� ��g��Y�([ �)8� ��%
��Y�[w�Y� ��w�Y 7�[
 7�[�&��7�[7�[7��Y� ��7��Y� �\  ��0[�@��70[Ch8Ch8��	��\!�\	g��\����w�\!!�)8!!G\�"��_���K�6�6� ������� ��6 �������
w�8�@��
����� ��
w�8��h6
��������� h6G��\���\� ��@����0���0�����'2@\�7@�'��L�����'�<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[��m[���\� ���������2	���2���	
�#������\G�P�0Y\� ��

����Q
��\������Y��Y��Y����'��Y

��8��Y����
\
���Y
w�8����

\
������l6�����@�
��i6�@�����
�m6@�
���m7����

@����Y� ��
��k[���
� �� ���Y���Y�����H\����[��k[����
�\��K[!��P�����(\
(8��8������<�G\
�\����
�G\@�

����

�'@�
��\����@�@�
7H����

�'@�
7H�@�D@�
��

W�P���@�
GX\@��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@��`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@��`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@�`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@��`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@��`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@��`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@��`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@�}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@�i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@�``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@�M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@�D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@�3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �
G� �G� ر"�@�G� �G� ��� �� 6@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G��Y

Wh\���'��L7��Yg4���@��G�Y�)8��Y� ���7��YWh\
(8� ��Wh\�Y@�8� ���74��G��Y��[� ����[G�
�Y
�h\�"��
'�[7��Y�)8� ��0[w� ['��Y�&� �
w�[��Y7�Y�F��w�['�[w�([�&��
'�[7	�Y�	�Y� ��g��\�0[
'�[� �@�	�	�Yw�\�0[��L		Ch8�)8		���\� ��g�\7\����K������	�6���� ���	w�8� ���@��
	����w�8����
��h6����� h6���w��\���\@��������0���0�2@\����7@�'��L��<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ��
��m[���\�����������2	���2	����	��"���\ ��G�P
�0Y\��������Q��\��Y�������Y
���Y��Y�������8

���Y	���Y����\	w�8\����������l6@�������i6�@��m6����@����m7		� ��@����Y��k[������� �������Y
���Y�H\� ������[��k[�\� ���K[!��P�(\����(8
��8

��<����

G\
�\	
�G\����@�				�'���@�	��\@����@�7H	�'���@�	7H@�� ��	��		W�P@�����	wX\@� �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NG�0[7�O�GB��7�N�mK7�0[����cK�P���D<E*�LF*7�LEG�P���FW�\E��QG��Y����FG��YE7#�YW��Y����?@�@�`����\��D<E*�LF*G�LEG�P���FW�\E�QW�Y����FW��YE�#�YG��Y����?@�@��`����\� ����*'�\��1���?*�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������
�?X9������[�`��?h8	Gh\&��P��H��G�\	G�P�@����\�P
�P� ��_	�Q���F�h\������YF��\F���Y���
	g#�Y��Y�����?E���\@��`����\�����������[� ����
@X9��Q� �@�
���?h8wh\���
��=��P���@��w�\G�P��\����?
�P�P��Q������F
�h\��Y����F��\
F��Y�#�Y����
���Y��E���\���@�`�
���\���@���
���[
���\� �������
?h8� ��
�h\��>��P��H����\G�P�@���\�P�P� ��_
�Q���F7h\����W
�YF�\FW��Y���'#�Y7	�Y�����?E��\@��`����\�����������[� ����� X\?h8����h\�����@����\G�P7�\����?�P�P��Q������Fgh\���Y����FG�\F���YW#�Y����g�
�Y	��EG��\���@�`����\���@����?X8���"������?X9��� ��������[?h8����gh\	�� g�\��G�P!��\ �P=��!�P�Q���� ��F�h\��YF��\����F���Y�#�Y��Y����
��E���\@�`��������\�����"�����[���@X9� �@����?h8wh\������
��#w�\��"G�P$��\#�P=�� $�P!�Q���� ��Fh\!"�YF��\����F��Y �#�Y!�Y������E���\@�`��������\�����"�����[������B�� ?h8!�h\"����H��'��\$!G�P�@��(�\"'�P#(�P� ��_%!�Q���F"7h\����%$W�Y F�\"FW��Y���#!'#�Y"%7�Y�����?E��\@�v`�"���\��������'���[� �@���#?h8$�h\����%����*��\��)$G�P+7�\%*�P=��'+�P($��Q���� ��F%wh\()���Y(FG�\����%F���Y'$W#�Y%(w��Y����
��EG��\@f`�����%���\����� @�/&��k[+��/ �'�?X9��@�&�?X9(�?X8,'g�/ ر"@�-g�/ �/(g�/ �)@X8�"@�*�?X80)g�/ �1'��/ ر"@�2(��/ �3)��/ �4'��/ ر"@�5��/ �6(��/ �7)��/ �� @�.@X88'��/ �9��/ ر"��:(��/ �;)��/ ������<��
��<G�\2��@	G�P��\<<�P]��?�PA	 �Q���' ��F<�h\A@ �Y(F��\����Ag��Y?	G#�Y?A��Y����?
��E���\@H`�����<���\������/=��k[��	�����
��w�\	G�P�@�t��\�P�P� �����Q���F�h\����		G��Y(F��\	g��Y���w#�Y		G��Y
����!�?E���\@�8`�	���\��������/>��k[�������
���@��
��\G�P�\����_

�P�P�Q������F
h\=w�Y����(F�\=g��YG#�Y����=w�Y
��E��\���@)`����\���������
��������\G�P�@�t7�\�P�P� ���
��Q���F�h\�������YFG�\g��Y���w#�Y���Y����!�?EG��\@�`����\��������������
��	��g�\2��G�P��\�P]���P
�Q����' ��F�h\��YF��\����g��Y
�#�Y��Y����?��E���\@`��������\����� ��
���
��
���@��w�\
G�P��\����_�P�P�Q������F�h\
��Y����F��\
g��Y�#�Y����
��Y��E���\���@�`�
���\���������
��������\!G�P�@�t �\�P  �P� ���!��Q���Fh\�������YF�\g��Y���!�#�Y���Y����!�?E��\@��`����\���������
��������\$G�P�@�t#7�\�P#�P� ���$��Q���FWh\�������YFG�\g��Y���$W#�Y���Y����!�?EG��\@��`����\���������X\����
��'X\WX\4 ��_G�P��\�Q���@��Y���Yg�Y������Y��F���\���!�?E���\@��`����\�����
gX\
��@��'X\WX\G�P�����\��Q7��Y���g��Y�YG��Y� ����Fg��\E��\���@�`����\��� ��
�\G�P������Q�Y���Y���g��Y�Y������?F���\E��\@�`��������\���\� ���4��
G�P--gh\� ��_��,,7��Y��Q���@�
G��Y
'��Y�	�Y����
G��Y��F'��\���!�?E��\@��`�
���\�������\�4��1��G�P//��Y�������Q'��YW��Y���'�
�Y	�Y������?FW��\E��\@�`��������\����\� ���4��G�P/0��Y���B�����Q?'�Y����?���Y�Y??'�Y� ����F���\E���\����@�`���+gh\1���G�P"��\�4�������
��17�Y2��Y����	�Q�4��3�Y� ��G	�Y�h\'��Y����/�	�YG�Y?W
�Y� ����F'��\E���\����@�`���g4��� ��55gh\G�P%��\���
��447��YG4�����B�46��Y�Q'�Y� ��?7�YW��Y��Y�����Y?W�Y������FW��\E���\@�`�����	�X\'4���@����99gh\GX\����887��YwX\8:��Y2���
G�Pg�\;�Y������Y��QCh8����

��Y	���\
w��Y������Y�6

'�Y�������Fw��\����?Eg��\@�r`�
���\��D��g�\G�P���A�����Q��Y����G��Yw�Y��Y� ����FG��\Eg��\���@i`����\��� ��	g�\G�P������Q��Y���Y���G��Y��Y������F���\Eg��\@``��������\��<g�\ ��?	G�P�����Q����	���Y	���Y��Y����	���Y��F���\����?Eg��\@�V`�	���\�����
�X\
��@�P�X\WX\G�P������\�Q�Y������Y�Y��Y� ����F���\E���\���@K`����\��� ��
��\G�P������Q�Y���Y�����Y�Y������F���\E���\@B`��������\����\ ��?
G�P����Q����
�Y
���Y���Y����
�Y��F���\����?E���\@�8`�
���\��D����\G�P���A�����Q��Y����W��Y?��Y?��Y� ����FW��\E���\����@/`���g�O ��"�@�'g�O �(g�O �)g�O ر"�@���O �'��O �(��O ��"�@�)��O ���O �'��O ��"�@�(��O ���O �'��O ��"�@�)��O �(��O �)��O �� ��g4��Gh\Gh\� ���'��L��Y�	�Y� ��'4��w�YGh\� ��w	�YGh\�)8�$���
�Y��Y��Y�@��	��Y'��8�h\�$��w�Y'(8w��Y�&���[�[g�Y�$����Y��[��Y�"����[�([�)8� ���
���YW�[	W�[�"���0[� [W�[� ��
��YW�[��0[�@��G��\

Ch8W0[ ��
	���\G�\�\�����)8
�6G\�������K��� ��6 �������?Ew�8�A��A?����� ��@Fw�8/A��h6B@�������/B��"h6C���\�@�����CF���0DE���0�CG2E\�����8@�C'��L�E7�<#�[� �@��4@�F����0�F����6����E����6C`��P�1@������F'0FL7����P.@������F����6�E'0EL/����P����)@�/B��m[C���\� ��FF
���2C�
���/A��m[����EE
���2CC
B?��@�P�"��EBW\GEG�P����?AE�0Y\D@���BG� �Q����@G'�#�YGDg�\EG��Y� ��BAW�#�YB@'�"�YDD���8����AA'�#�YDD7\?@!�Y����C?w�8CCG\EC��������/E��l6�@�/C��i6����@�/C�m6@����/C���m7??
@�����E@!�Y/C��k[EE���� ��DCEE F@!�Y�@��@@!�YDEGH\7F���[����/D��k[@C�\@@��!K[����/`���PEE(\@E(8����A���8BA �<BBWG\���B@'\?B�G\@���????�'@�����?D��\@�@���EE7#H?E�'@�����?E7#H@�?�����??W�P@�?FWX\���@� ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@��`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@��`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@�`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@��`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@��`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@��`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@��`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@�}`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@r`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@�i`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@�``����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@X`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@�M`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@�D`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@<`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@�3`�@�@����?X9�?X9��@�� ��?X8� ��@�@�@X8� ��?X8�"�@�� �
G� �G� ر"�@�G� �G� ��� �� 6@�@X8�� ��� رB�@��� ��� ��� ر"����� ��� ��4��� ���Wh\G��Y

Wh\���'��L7��Yg4���@��G�Y�)8��Y� ���7��YWh\
(8� ��Wh\�Y@�8� ���74��G��Y��[� ����[G�
�Y
�h\�"��
'�[7��Y�)8� ��0[w� ['��Y�&� �
w�[��Y7�Y�F��w�['�[w�([�&��
'�[7	�Y�	�Y� ��g��\�0[
'�[� �@�	�	�Yw�\�0[��L		Ch8�)8		���\� ��g�\7\����K������	�6���� ���	w�8� ���@��
	����w�8����
��h6����� h6���w��\���\@��������0���0�2@\����7@�'��L��<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ��
��m[���\�����������2	���2	����	��"���\ ��G�P
�0Y\��������Q��\��Y�������Y
���Y��Y�������8

���Y	���Y����\	w�8\����������l6@�������i6�@��m6����@����m7		� ��@����Y��k[������� �������Y
���Y�H\� ������[��k[�\� ���K[!��P�(\����(8
��8

��<����

G\
�\	
�G\����@�				�'���@�	��\@����@�7H	�'���@�	7H@�� ��	��		W�P@�����	wX\@� �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D</*�L.*7�L/G�P���.��\/�Q7�Y����.7��Y/G�YW�Y����?@�@�`����\��D</*�L.*G�L/G�P���.��\/�QW�Y����.W��Y
/G�Y
��Y����?@�@�`�
���\� ����*�\"��1���?*'�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������	
�?X9������[�`��?h8Gh\&��P��H��G�\G�P�@��w�\�P�P� ��_�Q���.�h\������Y.��\.���Y�����Y"��Y�����?/���\@�`�"���\�����������[� �����Q

@X9� �@�		���
?h8gh\���!��(��P���@��g�\G�P��\����?�P�P�Q������.�h\��Y����.��\.���Y��Y����!��Y��/���\���@��`�!���\���@���	���[
���\� ��

�����	?h8� ��	�h\ ��)��P��H����\G�P�@����\�P�P� ��_	�Q���.h\����7	�Y.��\.7��Y����Y �Y�����?/���\@�`� ���\��������
���[� ����� X\
?h8���
�h\4�����@����\G�P�\����?�P�P
�Q�������.7h\W
�Y����.�\.W��Y'�Y����*7	�Y	��/��\����?@��`���4���\��@����?X8���"������?X90��� ��������[?h8����'h\	��'�\��G�PG�\�P=���P
�Q��� ��.�h\�
�Y.W�\����.���Y��Y*��Y����
��/W��\@��`�����0���\���"�����[���@X9� �@����?h87h\����#��
��7�\��G�Pw�\�P=���P��Q���� ��.�h\���Y.��\����.���Y��Y#�
�Y������/���\@�`�����#���\�����"�����[������B��?h8gh\%����H��$g�\G�P�@��'��\$�P'�P� ��_�Q���.�h\������Y .��\.���Y�����Y%��Y�����?/���\@n`�%���\��������'���[� �@���?h8�h\����$����*��\��$G�P��\*�P=���P'�Q���� ��.�h\'$w�Y(.��\����.w��Y$��Y$'G�Y����
��/���\@�]`�����$���\��������!'X\
��X\����'GX\'G�P(4w�\����'�Q��Y-G��Y���'��Y���Y
������.G��\/w��\@�Q`��������\��( w�\ ��?*'G�P���'�Q����-*��Y*-��Y'��Y����-��Y
��.��\����?/w��\@H`����\��D��(!w�\ 'G�P���A���*'�Q+ ��Y���� +��Y*'��Y +��Y� ��
��.��\/w��\���@�>`� ���\��� ��("w�\!'G�P�����*'��Q+!���Y!+'��Y���*'�Y!+���Y
������.'��\/w��\@�5`�����!���\��"#X\����
��""WX\6"GX\ ��?'6G�P(0g�\"6��Q���@�"''��Y'"��Y,6w�Y����""���Y
��.��\����?/g��\@*`�"���\��D��(#g�\'6G�P���A���*6��Q*'���Y����0*7��Y'6��Y0*w�Y� ��
��.7��\/g��\���@� `�0���\��� ��(%g�\#6G�P�����*6��Q,#���Y#,W��Y���*67��Y#,���Y
������.W��\/g��\@�`�����#���\��($g�\ ��?%6G�P���'6��Q����'%w��Y%'G��Y*6W�Y����*'���Y
��.G��\����/g��\@`���B�@�1��/ �%�?X9/&��k[�B�@�+%��/ �&�?X8'@X8���2&��/ �$�?X93'��/ ر"�@�/G�/ �.%G�/ �-&G�/ سd��,'G�/ �g4��41h\����4+�Y74��42��Y� ��+3��Y//h\4@X8�@��++h\0�?X86�/ ��"�@�3%�/ �2&�/ �1G�/ ����74��..��Y/%G�/ �����--��Y74��5,���Y�"�@�,'�/ �.&G�/ �-'G�/ �� ��!��"5'��Y+'�O� ���'4��66h\ 1h\�$��53�Y /�Y/'�N�B���!'�N52���Y1'�)N�!�� .��Y,,���Y'�N�"��.'�?N��0[ -��Y� ��#,7�Y��1� �\�@��?�\�)8  ���Y�@��\ �w ��L ��?	���\!�L"�(8����"  ��
��G�\2��G�P w�\�P��  �P�Q���� ��.h\!G�Y(.��\����!���Yw�Y!G��Y����
��/���\@��`��������\��������/(��k[�������
��g�\G�P�@�t
��\�P

�P� ����Q���.�h\����g�Y(.��\���Y���G�Yg�Y
�����?/���\@�`����\��������/)��k[�������
���@��	��\G�P��\����_		�P�P�Q������.	�h\w�Y����(.��\���Yg�Y����w�Y
��/���\���@��`����\���������
������
��\G�P�@�t�\

�P�P� �����Q���.
h\����	���Y.�\	���Y���w�Y	���Y�����?/��\@�`����\��������������
��	��'�\2��G�PG�\�P]���P	�Q����' ��.Gh\��Y.W�\�������Y	��Y��Y����?��/W��\@��`��������\����� ��	���
��
���@��7�\
G�Pw�\����_�P	�P�Q������.�h\
��Y����.��\	���Y
��Y����	���Y��/���\���@��`�	���\������
���
������g�\
G�P�@�t��\�P�P� ���
�Q���.�h\����
��Y.��\

���Y�����Y

��Y�����?/���\@|`�
���\���������
��������\G�P�@�t��\�P�P� �����Q���.�h\����W��Y.��\���Y�����YW��Y�����?/���\@n`����\���������X\����
��
gX\

wX\4 ��_
G�P��\
��Q���@����Yw��Y
��Y������Y��.w��\����?/���\@b`����\�������\
G�P���B���
��Q���Y����g��Y
w�Y���Y� ����.g��\/���\���@�X`����\��� ����\
G�P�����
�Q��YG��Y���
g�Y��Y������?.G��\/���\@�O`��������\����\1 ��_
G�P���
�Q������Y���Y
G�Y������Y��.���\����?/���\@F`����\�����
	�X\
��@��
�X\WX\
G�P������\��Q

���Y���
���Y��Y

�Y� ����.���\/���\���@�:`�
���\��� ��	��\G�P������Q��Y���Y������Y��Y������?.���\/���\@�1`��������\��
��\1 ��_	G�P�����Q����	���Y	���Y��Y����	���Y��.���\����?/���\@(`�	���\�������\
G�P���B�����Q
�Y����
W��Y���Y*�Y� ����.W��\/���\����@�`���G�O ر"�@�%G�O �
&G�O �'G�O ��"�@���O �%��O �&��O ��"�@�'��O ��O �%�O ��"6@�&�O �G�O �%G�O ر"�@�&G�O �'�O �'G�O �������L�LW4��$��gh\G��Ygh\�@��74��gh\
w��Y�@���
gh\G�
�YG��Y� �����YG�Yw�
�Y����w��Y���Y�h\����w��Y��
�Y��Y����?���Y��Y��Y����?�w	���\�(8��� ���*/w�8� ��A��,*����+.w�8����/,��h6-+����/-��"h6����1���\�@�1.���0����2/���0�1'2E\9@����1'��L�/�<�[
5@��"���.����0�.����6�/����6����1`��P�1@�.'0FL����7����P.@�.����6�����/'0EL/����P�)@�� ��/-��m[1���\..
���2����1�
���/,��m[//
���2���11
,*�#������3,�\-3G�P,3�0Y\� ��/+���+-�Q./��\����--���Y2.���Y+,'�Y����+-��Y//��8,,��Y����*-���Y1/\.*w�8����..\/.����//��l6�����@�/.��i6�@�����/.�m6@�/.���m7����**
@�1-���Y� ��/.��k[11���/.� ��11 2-���Y+-���Y����/1�H\72����[//��k[����+.�\++�K[/`���P����11�(\+1(8,���8����,,��<,,G\,+�\����*,�G\@�**����**�'@�*1��\����@�@�./7H����*.�'@�*/7H�@�D@�*��**W�P���@�*.�X\@��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@u`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�i`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@a`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@X`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�O`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@E`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@<`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�3`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@+`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��L�w	��\�@���L�(8 �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@�#`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@�`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@�`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@�`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@�`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@��`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@��`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@�`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@�`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@}`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@t`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@�k`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@c`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@X`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@�O`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@G`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@>`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0��"�@�"��/0�G�/0�'G�/0��"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ���w4��

Wh\Wh\�$��G�YG�Y
 '�N�b�� '�?NG4��g��Y� ��	g��Y	Wh\ '�N�$��w�Y
w�YG��Y�"�@� '�)N
 '�O '�N�$��7�Y%7��YWh\� ��74��g�
�Y
 �0[������ �\G��Y	g��Y���Wh\Wh\4��� ��	w��YG��YG��Y� ��w�Y
�\!!�)8� ��	�Yg��Yg��Y� ��%��Y!\��L�%��w��Yw�Y!�L����7�Y7��Y�w�@���w	��\��L6 ��?	g��\!�L�(8�@��	�(8 ��	 �����
w�8A������
����
w�8��h6� ��
����� h6G��\�������\@����0�������0�'2@\�7@����'��L�'�<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[��m[� �����\�������2����	���2	
��@�P�"��\G�P����?�0Y\

����Q����
��\��Y��Y� ����Y'��Y

��8������Y
\
���Y����
w�8

\
����������l6�@�
��i6����@�
�m6@����
���m7

@��������Y
��k[���� ��
 ���Y�@�����Y�H\����[������k[
�\��K[����!��P�(\
(8������8��<�G\���
�\
�G\@���



�'@�����
��\@�@���
7H

�'@�����
7H@�
�����

W�P@�
GX\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@u`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�i`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@a`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@X`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�O`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@E`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@<`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�3`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@+`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��L�w	��\�@���L�(8 �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@�#`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@�`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@�`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@�`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@�`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@��`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@��`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@�`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@�`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@}`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@t`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@�k`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@c`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@X`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@�O`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@G`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@>`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0��"�@�"��/0�G�/0�'G�/0��"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ���w4��

Wh\Wh\�$��G�YG�Y
 '�N�b�� '�?NG4��g��Y� ��	g��Y	Wh\ '�N�$��w�Y
w�YG��Y�"�@� '�)N
 '�O '�N�$��7�Y%7��YWh\� ��74��g�
�Y
 �0[������ �\G��Y	g��Y���Wh\Wh\4��� ��	w��YG��YG��Y� ��w�Y
�\!!�)8� ��	�Yg��Yg��Y� ��%��Y!\��L�%��w��Yw�Y!�L����7�Y7��Y�w�@���w	��\��L6 ��?	g��\!�L�(8�@��	�(8 ��	 �����
w�8A������
����
w�8��h6� ��
����� h6G��\�������\@����0�������0�'2@\�7@����'��L�'�<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[��m[� �����\�������2����	���2	
��@�P�"��\G�P����?�0Y\

����Q����
��\��Y��Y� ����Y'��Y

��8������Y
\
���Y����
w�8

\
����������l6�@�
��i6����@�
�m6@����
���m7

@��������Y
��k[���� ��
 ���Y�@�����Y�H\����[������k[
�\��K[����!��P�(\
(8������8��<�G\���
�\
�G\@���



�'@�����
��\@�@���
7H

�'@�����
7H@�
�����

W�P@�
GX\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@u`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�i`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@a`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@X`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�O`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@E`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@<`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�3`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@+`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��L�w	��\�@���L�(8 �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D</*�L.*7�L/G�P���.��\/�Q7�Y����.7��Y/G�YW�Y����?@�@��`����\��D</*�L.*G�L/G�P���.��\/�QW�Y����.W��Y
/G�Y
��Y����?@�@��`�
���\� ����*�\"��1���?*'�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������	
�?X9������[�`��?h8Gh\&��P��H��G�\G�P�@��w�\�P�P� ��_�Q���.�h\������Y.��\.���Y�����Y"��Y�����?/���\@��`�"���\�����������[� �����Q

@X9� �@�		���
?h8gh\���!��(��P���@��g�\G�P��\����?�P�P�Q������.�h\��Y����.��\.���Y��Y����!��Y��/���\���@�`�!���\���@���	���[
���\� ��

�����	?h8� ��	�h\ ��)��P��H����\G�P�@����\�P�P� ��_	�Q���.h\����7	�Y.��\.7��Y����Y �Y�����?/���\@��`� ���\��������
���[� ����� X\
?h8���
�h\4�����@����\G�P�\����?�P�P
�Q�������.7h\W
�Y����.�\.W��Y'�Y����*7	�Y	��/��\����?@�`���4���\��@����?X8���"������?X90��� ��������[?h8����'h\	��'�\��G�PG�\�P=���P
�Q��� ��.�h\�
�Y.W�\����.���Y��Y*��Y����
��/W��\@�`�����0���\���"�����[���@X9� �@����?h87h\����#��
��7�\��G�Pw�\�P=���P��Q���� ��.�h\���Y.��\����.���Y��Y#�
�Y������/���\@~`�����#���\�����"�����[������B��?h8gh\%����H��$g�\G�P�@��'��\$�P'�P� ��_�Q���.�h\������Y .��\.���Y�����Y%��Y�����?/���\@�l`�%���\��������'���[� �@���?h8�h\����$����*��\��$G�P��\*�P=���P'�Q���� ��.�h\'$w�Y(.��\����.w��Y$��Y$'G�Y����
��/���\@\`�����$���\��������!'X\
��X\����'GX\'G�P(4w�\����'�Q��Y-G��Y���'��Y���Y
������.G��\/w��\@P`��������\��( w�\ ��?*'G�P���'�Q����-*��Y*-��Y'��Y����-��Y
��.��\����?/w��\@�F`����\��D��(!w�\ 'G�P���A���*'�Q+ ��Y���� +��Y*'��Y +��Y� ��
��.��\/w��\���@=`� ���\��� ��("w�\!'G�P�����*'��Q+!���Y!+'��Y���*'�Y!+���Y
������.'��\/w��\@4`�����!���\��"#X\����
��""WX\6"GX\ ��?'6G�P(0g�\"6��Q���@�"''��Y'"��Y,6w�Y����""���Y
��.��\����?/g��\@�(`�"���\��D��(#g�\'6G�P���A���*6��Q*'���Y����0*7��Y'6��Y0*w�Y� ��
��.7��\/g��\���@`�0���\��� ��(%g�\#6G�P�����*6��Q,#���Y#,W��Y���*67��Y#,���Y
������.W��\/g��\@`�����#���\��($g�\ ��?%6G�P���'6��Q����'%w��Y%'G��Y*6W�Y����*'���Y
��.G��\����/g��\@�`���B�@�1��/ �%�?X9/&��k[�B�@�+%��/ �&�?X8'@X8���2&��/ �$�?X93'��/ ر"�@�/G�/ �.%G�/ �-&G�/ سd��,'G�/ �g4��41h\����4+�Y74��42��Y� ��+3��Y//h\4@X8�@��++h\0�?X86�/ ��"�@�3%�/ �2&�/ �1G�/ ����74��..��Y/%G�/ �����--��Y74��5,���Y�"�@�,'�/ �.&G�/ �-'G�/ �� ��� ��"5'��Y+'�O� ���'4��66h\ 1h\�$��53�Y /�Y/'�N�B���!'�N52���Y1'�)N�!�� .��Y,,���Y'�N�"��.'�?N��0[ -��Y� ��#,7�Y��1� �\�@��?�\�)8  ���Y�@��\ Ch8 ��L@��#	���\!�L  ���H
��G�\G�P�@�� w�\�P  �P� ��_�Q���.h\����!G�Y(.��\!���Y���w�Y!G��Y
�����?/���\@��`����\��������/(��k[�������
���@��g�\G�P
��\����_�P

�P�Q����?��.�h\g�Y����(.��\���YG�Y����g�Y
��/���\���@�`����\������/)��k[��������
��	��\2��G�P��\		�P]���P�Q����' ��.	�h\w�Y(.��\�������Yg�Yw�Y����?
��/���\@�`��������\����� �����
�����@��
��\G�P�\����_

�P�P��Q������.
h\	���Y����.�\	���Yw�Y����	���Y��/��\���@�`����\���������
����	��'�\G�P�@�tG�\�P�P� ���	�Q���.Gh\������Y.W�\���Y���	��Y��Y�����?/W��\@��`����\��������	������
��
��7�\2��
G�Pw�\�P]��	�P�Q����' ��.�h\
��Y.��\����	���Y
��Y	���Y����?��/���\@�`�����	���\����� ��
���
�����@��g�\
G�P��\����_�P�P
�Q������.�h\
��Y����.��\

���Y��Y����

��Y��/���\���@{`�
���\�� �����
�����@����\G�P��\����_�P�P��Q������.�h\W��Y����.��\���Y��Y����W��Y��/���\���@m`����\�������X\
��@��
gX\

wX\
G�P������\
��Q���Y���w��Y
��Y��Y� ����.w��\/���\���@a`����\��� ����\
G�P�����
��Q���Yg��Y���
w�Y���Y������?.g��\/���\@X`��������\����\1 ��_
G�P���
�Q������YG��Y
g�Y������Y��.G��\����?/���\@�N`����\�������\
G�P���B���
�Q��Y�������Y
G�Y��Y� ����.���\/���\���@E`����\������
	�X\
��
�X\����WX\
G�P��\������Q

���Y
���Y�����Y

�Y������?.���\/���\@:`�����
���\��	��\1 ��_G�P����Q������Y���Y���Y������Y��.���\����?/���\@�0`����\�����
��\	G�P���B�����Q	���Y����	���Y��Y	���Y� ����.���\/���\���@'`�	���\��� ����\
G�P�����Q
�Y
W��Y������Y*�Y������?.W��\/���\@`� �@���G�O �%G�O ��"�@�
&G�O �'G�O ���O ��"�@�%��O �&��O �'��O ر"�@��O �%�O �&�O ر"�@�G�O �%G�O �&G�O ر��'�O �'G�O ���L�`���LW4��gh\����G��Ygh\74��� ��gh\
w��Y
gh\�D���G�
�YG��Y���Y�����G�Yw�
�Yw��Y�$�����Y�h\w��Y� ����
�Y��Y���Y������Y��YCh8 ��	���\ �������*/w�8�A��,*����� ��+.w�8/,��h6-+�������/-��"h61���\�@�����1.���02/���0�1'2E\�����8@�1'��L�/�<�[� �@��4@�.����0�.����6����/����61`��P�1@������.'0FL7����P.@������.����6�/'0EL/����P����)@�/-��m[1���\� ��..
���21�
���/,��m[����//
���211
,*��@�P�"��3,�\-3G�P����?,3�0Y\/+���+-�Q����./��\--���Y2.���Y� ��+,'�Y+-��Y//��8� ��,,��Y*-���Y1/\����.*w�8..\/.��������//��l6�@�/.��i6����@�/.�m6@����/.���m7**
@�����1-���Y/.��k[11���� ��/.11 2-���Y�@��+-���Y/1�H\72����[����//��k[+.�\++�K[����/`���P11�(\+1(8����,���8,,��<,,G\���,+�\*,�G\@���****�'@�����*1��\@�@���./7H*.�'@�����*/7H@�*�����**W�P@�*.�X\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�
`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@t`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�h`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@``��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@W`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�N`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@D`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@;`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�2`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@*`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��LCh8	��\� ���L �������	w�8�@��
	����� ��w�8
��h6��������� h6w��\���\� ��@����0���0�����2@\�7@�'��L������<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[
��m[���\� ���������2	���2���		�#�������\G�P
�0Y\� ������Q��\������Y���Y
���Y������Y���8

���Y����	���Y\	w�8����\������l6�����@���i6�@������m6@����m7����		@����Y� ����k[���� �� ���Y
���Y�����H\����[��k[�����\�K[!��P�����(\(8
��8����

��<

G\
�\����	
�G\@�		����		�'@�	��\����@�@�7H����	�'@�	7H�@�D@�	��		W�P���@�	wX\@��� ����@��P� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@"`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@�`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@��`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@��`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@��`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@�`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@�`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@��`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@��`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@�{`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@�r`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@j`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@�a`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@�V`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@N`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@�E`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@�<`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0��"�@�"��/0�G�/0�'G�/0��"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ���w4��

Wh\Wh\�$��G�YG�Y
 '�N�b�� '�?NG4��g��Y� ��	g��Y	Wh\ '�N�$��w�Y
w�YG��Y�"�@� '�)N
 '�O '�N�$��7�Y%7��YWh\� ��74��g�
�Y
 �0[������ �\G��Y	g��Y���Wh\Wh\4��� ��	w��YG��YG��Y� ��w�Y
�\!!�)8� ��	�Yg��Yg��Y� ��%��Y!\��L�%��w��Yw�Y!�L�$��7�Y7��Y��L��HCh8Ch8	��\� ��?!�L	g��\ ����� ���
w�8� ���@��
����
w�8������h6
����� h6���G��\���\@��������0���0�'2@\����7@�'��L�'�<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ����m[���\�����������2	���2	����
��"��\ ��G�P�0Y\

��������Q
��\��Y������Y��Y'��Y� ��

��8��Y
\����
���Y
w�8

\����
������l6@�����
��i6�@�
�m6����@�
���m7

� ��@����Y
��k[�������
 �������Y���Y�H\� ������[��k[
�\� ����K[!��P�(\����
(8��8��<�����G\
�\
�G\����@�



�'���@�
��\@����@�
7H

�'���@�
7H@�� ��
��

W�P@�����
GX\@� �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�
`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@t`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�h`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@``��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@W`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�N`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@D`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@;`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�2`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@*`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��LCh8	��\� ���L �������	w�8�@��
	����� ��w�8
��h6��������� h6w��\���\� ��@����0���0�����2@\�7@�'��L������<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[
��m[���\� ���������2	���2���		�#�������\G�P
�0Y\� ������Q��\������Y���Y
���Y������Y���8

���Y����	���Y\	w�8����\������l6�����@���i6�@������m6@����m7����		@����Y� ����k[���� �� ���Y
���Y�����H\����[��k[�����\�K[!��P�����(\(8
��8����

��<

G\
�\����	
�G\@�		����		�'@�	��\����@�@�7H����	�'@�	7H�@�D@�	��		W�P���@�	wX\@��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D</*�L.*7�L/G�P���.��\/�Q7�Y����.7��Y/G�YW�Y����?@�@��`����\��D</*�L.*G�L/G�P���.��\/�QW�Y����.W��Y
/G�Y
��Y����?@�@��`�
���\� ����*�\"��1���?*'�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������	
�?X9������[�`��?h8Gh\&��P��H��G�\G�P�@��w�\�P�P� ��_�Q���.�h\������Y.��\.���Y�����Y"��Y�����?/���\@��`�"���\�����������[� �����Q

@X9� �@�		���
?h8gh\���!��(��P���@��g�\G�P��\����?�P�P�Q������.�h\��Y����.��\.���Y��Y����!��Y��/���\���@�`�!���\���@���	���[
���\� ��

�����	?h8� ��	�h\ ��)��P��H����\G�P�@����\�P�P� ��_	�Q���.h\����7	�Y.��\.7��Y����Y �Y�����?/���\@��`� ���\��������
���[� ����� X\
?h8���
�h\4�����@����\G�P�\����?�P�P
�Q�������.7h\W
�Y����.�\.W��Y'�Y����*7	�Y	��/��\����?@�`���4���\��@����?X8���"������?X90��� ��������[?h8����'h\	��'�\��G�PG�\�P=���P
�Q��� ��.�h\�
�Y.W�\����.���Y��Y*��Y����
��/W��\@�`�����0���\���"�����[���@X9� �@����?h87h\����#��
��7�\��G�Pw�\�P=���P��Q���� ��.�h\���Y.��\����.���Y��Y#�
�Y������/���\@~`�����#���\�����"�����[������B��?h8gh\%����H��$g�\G�P�@��'��\$�P'�P� ��_�Q���.�h\������Y .��\.���Y�����Y%��Y�����?/���\@�l`�%���\��������'���[� �@���?h8�h\����$����*��\��$G�P��\*�P=���P'�Q���� ��.�h\'$w�Y(.��\����.w��Y$��Y$'G�Y����
��/���\@\`�����$���\��������!'X\
��X\����'GX\'G�P(4w�\����'�Q��Y-G��Y���'��Y���Y
������.G��\/w��\@P`��������\��( w�\ ��?*'G�P���'�Q����-*��Y*-��Y'��Y����-��Y
��.��\����?/w��\@�F`����\��D��(!w�\ 'G�P���A���*'�Q+ ��Y���� +��Y*'��Y +��Y� ��
��.��\/w��\���@=`� ���\��� ��("w�\!'G�P�����*'��Q+!���Y!+'��Y���*'�Y!+���Y
������.'��\/w��\@4`�����!���\��"#X\����
��""WX\6"GX\ ��?'6G�P(0g�\"6��Q���@�"''��Y'"��Y,6w�Y����""���Y
��.��\����?/g��\@�(`�"���\��D��(#g�\'6G�P���A���*6��Q*'���Y����0*7��Y'6��Y0*w�Y� ��
��.7��\/g��\���@`�0���\��� ��(%g�\#6G�P�����*6��Q,#���Y#,W��Y���*67��Y#,���Y
������.W��\/g��\@`�����#���\��($g�\ ��?%6G�P���'6��Q����'%w��Y%'G��Y*6W�Y����*'���Y
��.G��\����/g��\@�`���B�@�1��/ �%�?X9/&��k[�B�@�+%��/ �&�?X8'@X8���2&��/ �$�?X93'��/ ر"�@�/G�/ �.%G�/ �-&G�/ سd��,'G�/ �g4��41h\����4+�Y74��42��Y� ��+3��Y//h\4@X8�@��++h\0�?X86�/ ��"�@�3%�/ �2&�/ �1G�/ ����74��..��Y/%G�/ �����--��Y74��5,���Y�"�@�,'�/ �.&G�/ �-'G�/ �� ��� ��"5'��Y+'�O� ���'4��66h\ 1h\�$��53�Y /�Y/'�N�B���!'�N52���Y1'�)N�!�� .��Y,,���Y'�N�"��.'�?N��0[ -��Y� ��#,7�Y��1� �\�@��?�\�)8  ���Y�@��\ Ch8 ��L@��#	���\!�L  ���H
��G�\G�P�@�� w�\�P  �P� ��_�Q���.h\����!G�Y(.��\!���Y���w�Y!G��Y
�����?/���\@��`����\��������/(��k[�������
���@��g�\G�P
��\����_�P

�P�Q����?��.�h\g�Y����(.��\���YG�Y����g�Y
��/���\���@�`����\������/)��k[��������
��	��\2��G�P��\		�P]���P�Q����' ��.	�h\w�Y(.��\�������Yg�Yw�Y����?
��/���\@�`��������\����� �����
�����@��
��\G�P�\����_

�P�P��Q������.
h\	���Y����.�\	���Yw�Y����	���Y��/��\���@�`����\���������
����	��'�\G�P�@�tG�\�P�P� ���	�Q���.Gh\������Y.W�\���Y���	��Y��Y�����?/W��\@��`����\��������	������
��
��7�\2��
G�Pw�\�P]��	�P�Q����' ��.�h\
��Y.��\����	���Y
��Y	���Y����?��/���\@�`�����	���\����� ��
���
�����@��g�\
G�P��\����_�P�P
�Q������.�h\
��Y����.��\

���Y��Y����

��Y��/���\���@{`�
���\�� �����
�����@����\G�P��\����_�P�P��Q������.�h\W��Y����.��\���Y��Y����W��Y��/���\���@m`����\�������X\
��@��
gX\

wX\
G�P������\
��Q���Y���w��Y
��Y��Y� ����.w��\/���\���@a`����\��� ����\
G�P�����
��Q���Yg��Y���
w�Y���Y������?.g��\/���\@X`��������\����\1 ��_
G�P���
�Q������YG��Y
g�Y������Y��.G��\����?/���\@�N`����\�������\
G�P���B���
�Q��Y�������Y
G�Y��Y� ����.���\/���\���@E`����\������
	�X\
��
�X\����WX\
G�P��\������Q

���Y
���Y�����Y

�Y������?.���\/���\@:`�����
���\��	��\1 ��_G�P����Q������Y���Y���Y������Y��.���\����?/���\@�0`����\�����
��\	G�P���B�����Q	���Y����	���Y��Y	���Y� ����.���\/���\���@'`�	���\��� ����\
G�P�����Q
�Y
W��Y������Y*�Y������?.W��\/���\@`� �@���G�O �%G�O ��"�@�
&G�O �'G�O ���O ��"�@�%��O �&��O �'��O ر"�@��O �%�O �&�O ر"�@�G�O �%G�O �&G�O ر��'�O �'G�O ���L�`���LW4��gh\����G��Ygh\74��� ��gh\
w��Y
gh\�D���G�
�YG��Y���Y�����G�Yw�
�Yw��Y�$�����Y�h\w��Y� ����
�Y��Y���Y������Y��YCh8 ��	���\ �������*/w�8�A��,*����� ��+.w�8/,��h6-+�������/-��"h61���\�@�����1.���02/���0�1'2E\�����8@�1'��L�/�<�[� �@��4@�.����0�.����6����/����61`��P�1@������.'0FL7����P.@������.����6�/'0EL/����P����)@�/-��m[1���\� ��..
���21�
���/,��m[����//
���211
,*��@�P�"��3,�\-3G�P����?,3�0Y\/+���+-�Q����./��\--���Y2.���Y� ��+,'�Y+-��Y//��8� ��,,��Y*-���Y1/\����.*w�8..\/.��������//��l6�@�/.��i6����@�/.�m6@����/.���m7**
@�����1-���Y/.��k[11���� ��/.11 2-���Y�@��+-���Y/1�H\72����[����//��k[+.�\++�K[����/`���P11�(\+1(8����,���8,,��<,,G\���,+�\*,�G\@���****�'@�����*1��\@�@���./7H*.�'@�����*/7H@�*�����**W�P@�*.�X\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�
`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@t`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�h`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@``��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@W`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�N`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@D`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@;`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�2`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@*`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��LCh8	��\� ���L �������	w�8�@��
	����� ��w�8
��h6��������� h6w��\���\� ��@����0���0�����2@\�7@�'��L������<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[
��m[���\� ���������2	���2���		�#�������\G�P
�0Y\� ������Q��\������Y���Y
���Y������Y���8

���Y����	���Y\	w�8����\������l6�����@���i6�@������m6@����m7����		@����Y� ����k[���� �� ���Y
���Y�����H\����[��k[�����\�K[!��P�����(\(8
��8����

��<

G\
�\����	
�G\@�		����		�'@�	��\����@�@�7H����	�'@�	7H�@�D@�	��		W�P���@�	wX\@��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NG�0[7�O�GB��7�N�mK7�0[����cK�P���D<E*�LF*7�LEG�P���FW�\E��QG��Y����FG��YE7#�YW��Y����?@�@`����\��D<E*�LF*G�LEG�P���FW�\E�QW�Y����FW��YE�#�YG��Y����?@�@�`����\� ����*'�\��1���?*�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������
�?X9������[�`��?h8	Gh\&��P��H��G�\	G�P�@����\�P
�P� ��_	�Q���F�h\������YF��\F���Y���
	g#�Y��Y�����?E���\@�`����\�����������[� ����
@X9��Q� �@�
���?h8wh\���
��=��P���@��w�\G�P��\����?
�P�P��Q������F
�h\��Y����F��\
F��Y�#�Y����
���Y��E���\���@��`�
���\���@���
���[
���\� �������
?h8� ��
�h\��>��P��H����\G�P�@���\�P�P� ��_
�Q���F7h\����W
�YF�\FW��Y���'#�Y7	�Y�����?E��\@�`����\�����������[� ����� X\?h8����h\�����@����\G�P7�\����?�P�P��Q������Fgh\���Y����FG�\F���YW#�Y����g�
�Y	��EG��\���@��`����\���@����?X8���"������?X9��� ��������[?h8����gh\	�� g�\��G�P!��\ �P=��!�P�Q���� ��F�h\��YF��\����F���Y�#�Y��Y����
��E���\@��`��������\�����"�����[���@X9� �@����?h8wh\������
��#w�\��"G�P$��\#�P=�� $�P!�Q���� ��Fh\!"�YF��\����F��Y �#�Y!�Y������E���\@��`��������\�����"�����[������B�� ?h8!�h\"����H��'��\$!G�P�@��(�\"'�P#(�P� ��_%!�Q���F"7h\����%$W�Y F�\"FW��Y���#!'#�Y"%7�Y�����?E��\@v`�"���\��������'���[� �@���#?h8$�h\����%����*��\��)$G�P+7�\%*�P=��'+�P($��Q���� ��F%wh\()���Y(FG�\����%F���Y'$W#�Y%(w��Y����
��EG��\@�e`�����%���\����� @�/&��k[+��/ �'�?X9��@�&�?X9(�?X8,'g�/ ر"@�-g�/ �/(g�/ �)@X8�"@�*�?X80)g�/ �1'��/ ر"@�2(��/ �3)��/ �4'��/ ر"@�5��/ �6(��/ �7)��/ �� @�.@X88'��/ �9��/ ر"��:(��/ �;)��/ ������<��
��<G�\2��@	G�P��\<<�P]��?�PA	 �Q���' ��F<�h\A@ �Y(F��\����Ag��Y?	G#�Y?A��Y����?
��E���\@�G`�����<���\������/=��k[��	�����
��w�\	G�P�@�t��\�P�P� �����Q���F�h\����		G��Y(F��\	g��Y���w#�Y		G��Y
����!�?E���\@8`�	���\��������/>��k[�������
���@��
��\G�P�\����_

�P�P�Q������F
h\=w�Y����(F�\=g��YG#�Y����=w�Y
��E��\���@�(`����\���������
��������\G�P�@�t7�\�P�P� ���
��Q���F�h\�������YFG�\g��Y���w#�Y���Y����!�?EG��\@`����\��������������
��	��g�\2��G�P��\�P]���P
�Q����' ��F�h\��YF��\����g��Y
�#�Y��Y����?��E���\@�`��������\����� ��
���
��
���@��w�\
G�P��\����_�P�P�Q������F�h\
��Y����F��\
g��Y�#�Y����
��Y��E���\���@��`�
���\���������
��������\!G�P�@�t �\�P  �P� ���!��Q���Fh\�������YF�\g��Y���!�#�Y���Y����!�?E��\@�`����\���������
��������\$G�P�@�t#7�\�P#�P� ���$��Q���FWh\�������YFG�\g��Y���$W#�Y���Y����!�?EG��\@�`����\���������X\����
��'X\WX\4 ��_G�P��\�Q���@��Y���Yg�Y������Y��F���\���!�?E���\@�`����\�����
gX\
��@��'X\WX\G�P�����\��Q7��Y���g��Y�YG��Y� ����Fg��\E��\���@��`����\��� ��
�\G�P������Q�Y���Y���g��Y�Y������?F���\E��\@��`��������\���\� ���4��
G�P--gh\� ��_��,,7��Y��Q���@�
G��Y
'��Y�	�Y����
G��Y��F'��\���!�?E��\@�`�
���\�������\�4��1��G�P//��Y�������Q'��YW��Y���'�
�Y	�Y������?FW��\E��\@��`��������\����\� ���4��G�P/0��Y���B�����Q?'�Y����?���Y�Y??'�Y� ����F���\E���\����@��`���+gh\1���G�P"��\�4�������
��17�Y2��Y����	�Q�4��3�Y� ��G	�Y�h\'��Y����/�	�YG�Y?W
�Y� ����F'��\E���\����@��`���g4��� ��55gh\G�P%��\���
��447��YG4�����B�46��Y�Q'�Y� ��?7�YW��Y��Y�����Y?W�Y������FW��\E���\@��`�����	�X\'4���@����99gh\GX\����887��YwX\8:��Y2���G�Pg�\;�Y������Y
��Q
���Y�`���w	���\
w��Y������Y�(8

'�Y� ����Fw��\Eg��\���@�r`�
���\��� ��g�\G�P�������Q��YG��Y���w�Y��Y������FG��\Eg��\@�i`��������\��	g�\ ��?G�P����Q������Y���YG��Y������Y��F���\����?Eg��\@``����\��D��<g�\	G�P���A�����Q	���Y����	���Y��Y	���Y� ����F���\Eg��\���@�V`�	���\������
�X\
���X\����WX\G�P��\�����Q�Y���Y����Y��Y������F���\E���\@�K`��������\��
��\ ��?G�P����Q�����Y���Y��Y�����Y��F���\����?E���\@B`����\��D����\
G�P���A����Q
�Y����
���Y���Y
�Y� ����F���\E���\���@�8`�
���\��� ����\G�P�����Q��YW��Y���?��Y?��Y������FW��\E���\@�/`� �@���g�O �'g�O ��"�@�(g�O �)g�O ���O ر"�@�'��O �(��O �)��O ر"�@���O �'��O �(��O ر"�@���O �'��O �)��O ر"���(��O �)��O �g4��� ��Gh\Gh\'��L�$�����Y�	�Y�)8� ��74��w�YGh\� ��w	�YGh\(8� ���
�Y��Y��8�`����Y4��	��Y� ����[�h\w�Y�"����[�[w��Y� ��W�[g�Y��Y�"�����0[�[� [� ���([�)8
���Y�A�����YW�[���\�&��?W�[W�[
��Y� ����0[W0[

�w�"���\	���\�)8� ��G�\7\����K� ���(8�����6��� ���?Ew�8� ��A��A?����@Fw�8����/A��h6B@����/B��"h6����C���\�@�CF���0����DE���0�CG2E\9@����C'��L�E7�<#�[
5@��"���F����0�F����6�E����6����C`��P�1@�F'0FL����7����P.@�F����6�����E'0EL/����P�)@�� ��/B��m[C���\FF
���2����C�
���/A��m[EE
���2���CC
B?�#������EBW\GEG�PAE�0Y\����D@���BG� �Q@G'�#�Y����GDg�\EG��YBAW�#�Y����B@'�"�YDD���8AA'�#�Y����DD7\?@!�YC?w�8����CCG\EC����/E��l6�����@�/C��i6�@�����/C�m6@�/C���m7����??
@�E@!�Y� ��/C��k[EE���DC� ��EE F@!�Y@@!�Y����DEGH\7F���[/D��k[����@C�\@@��!K[/`���P����EE(\@E(8A���8����BA �<BBWG\B@'\����?B�G\@�??����??�'@�?D��\����@�@�EE7#H����?E�'@�?E7#H�@�D@�?��??W�P���@�?FWX\@��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@u`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�i`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@a`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@X`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�O`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@E`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@<`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�3`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@+`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��L�w	��\�@���L�(8 �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@*`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@"`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@�
`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@��`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@��`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@��`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@�`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@�`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@��`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@��`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@��`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@�z`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@r`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@�i`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@�^`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@V`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@�M`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@�D`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0ر"�@�G�/0�"��/0�'G�/0ر"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ����4��

Wh\Wh\�$��G�YG�Y'��L� ���w4��
g��Y	g��Y� ����)8	w�Y
w�Y� ���
(8
Wh\��8�$��		7�Y%7��YWh\�&�� ��[ ��[
�[� ���G4��G�Y�[� ��G��Y �0[� [�@����([
Wh\
g��Y� ��Wh\g��Y '�[�`�����\4��G��Y� �� '�[G��Y
w�Y�@�@�w�Y �)8  0[�$��
g��Yg��Y

��Y� ���%��Y'�['�[����
w��Yw�Y�0[����

7�Y7��YW��\��H

�w�w	���\�"��!�\	g��\!!�)8� �@�?!!'\����K�(8�@���(8�����6��� ���
w�8� ���@��
����
w�8������h6
����� h6���G��\���\@��������0���0�'2@\����7@�'��L�'�<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ����m[���\�����������2	���2	����
��"��\ ��G�P�0Y\

��������Q
��\��Y������Y��Y'��Y� ��

��8��Y
\����
���Y
w�8

\����
������l6@�����
��i6�@�
�m6����@�
���m7

� ��@����Y
��k[�������
 �������Y���Y�H\� ������[��k[
�\� ����K[!��P�(\����
(8��8��<�����G\
�\
�G\����@�



�'���@�
��\@����@�
7H

�'���@�
7H@�� ��
��

W�P@�����
GX\@� �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@u`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�i`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@a`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@X`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�O`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@E`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@<`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�3`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@+`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��L�w	��\�@���L�(8 �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@*`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@"`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@�
`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@��`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@��`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@��`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@�`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@�`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@��`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@��`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@��`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@�z`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@r`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@�i`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@�^`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@V`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@�M`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@�D`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0ر"�@�G�/0�"��/0�'G�/0ر"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ����4��

Wh\Wh\�$��G�YG�Y'��L� ���w4��
g��Y	g��Y� ����)8	w�Y
w�Y� ���
(8
Wh\��8�$��		7�Y%7��YWh\�&�� ��[ ��[
�[� ���G4��G�Y�[� ��G��Y �0[� [�@����([
Wh\
g��Y� ��Wh\g��Y '�[�`�����\4��G��Y� �� '�[G��Y
w�Y�@�@�w�Y �)8  0[�$��
g��Yg��Y

��Y� ���%��Y'�['�[����
w��Yw�Y�0[����

7�Y7��YW��\��H

�w�w	���\�"��!�\	g��\!!�)8� �@�?!!'\����K�(8�@���(8�����6��� ���
w�8� ���@��
����
w�8������h6
����� h6���G��\���\@��������0���0�'2@\����7@�'��L�'�<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ����m[���\�����������2	���2	����
��"��\ ��G�P�0Y\

��������Q
��\��Y������Y��Y'��Y� ��

��8��Y
\����
���Y
w�8

\����
������l6@�����
��i6�@�
�m6����@�
���m7

� ��@����Y
��k[�������
 �������Y���Y�H\� ������[��k[
�\� ����K[!��P�(\����
(8��8��<�����G\
�\
�G\����@�



�'���@�
��\@����@�
7H

�'���@�
7H@�� ��
��

W�P@�����
GX\@� �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@u`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�i`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@a`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@X`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�O`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@E`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@<`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�3`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@+`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��L�w	��\�@���L�(8 �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'�NG�0[7�O�GB��7�N�mK7�0[����cK�P���D<E*�LF*7�LEG�P���FW�\E��QG��Y����FG��YE7#�YW��Y����?@�@�`����\��D<E*�LF*G�LEG�P���FW�\E�QW�Y����FW��YE�#�YG��Y����?@�@��`����\� ����*'�\��1���?*�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������
�?X9������[�`��?h8	Gh\&��P��H��G�\	G�P�@����\�P
�P� ��_	�Q���F�h\������YF��\F���Y���
	g#�Y��Y�����?E���\@��`����\�����������[� ����
@X9��Q� �@�
���?h8wh\���
��=��P���@��w�\G�P��\����?
�P�P��Q������F
�h\��Y����F��\
F��Y�#�Y����
���Y��E���\���@�`�
���\���@���
���[
���\� �������
?h8� ��
�h\��>��P��H����\G�P�@���\�P�P� ��_
�Q���F7h\����W
�YF�\FW��Y���'#�Y7	�Y�����?E��\@��`����\�����������[� ����� X\?h8����h\�����@����\G�P7�\����?�P�P��Q������Fgh\���Y����FG�\F���YW#�Y����g�
�Y	��EG��\���@�`����\���@����?X8���"������?X9��� ��������[?h8����gh\	�� g�\��G�P!��\ �P=��!�P�Q���� ��F�h\��YF��\����F���Y�#�Y��Y����
��E���\@�`��������\�����"�����[���@X9� �@����?h8wh\������
��#w�\��"G�P$��\#�P=�� $�P!�Q���� ��Fh\!"�YF��\����F��Y �#�Y!�Y������E���\@�`��������\�����"�����[������B�� ?h8!�h\"����H��'��\$!G�P�@��(�\"'�P#(�P� ��_%!�Q���F"7h\����%$W�Y F�\"FW��Y���#!'#�Y"%7�Y�����?E��\@�u`�"���\��������'���[� �@���#?h8$�h\����%����*��\��)$G�P+7�\%*�P=��'+�P($��Q���� ��F%wh\()���Y(FG�\����%F���Y'$W#�Y%(w��Y����
��EG��\@e`�����%���\����� @�/&��k[+��/ �'�?X9��@�&�?X9(�?X8,'g�/ ر"@�-g�/ �/(g�/ �)@X8�"@�*�?X80)g�/ �1'��/ ر"@�2(��/ �3)��/ �4'��/ ر"@�5��/ �6(��/ �7)��/ �� @�.@X88'��/ �9��/ ر"��:(��/ �;)��/ ������<��
��<G�\2��@	G�P��\<<�P]��?�PA	 �Q���' ��F<�h\A@ �Y(F��\����Ag��Y?	G#�Y?A��Y����?
��E���\@G`�����<���\������/=��k[��	�����
��w�\	G�P�@�t��\�P�P� �����Q���F�h\����		G��Y(F��\	g��Y���w#�Y		G��Y
����!�?E���\@�7`�	���\��������/>��k[�������
���@��
��\G�P�\����_

�P�P�Q������F
h\=w�Y����(F�\=g��YG#�Y����=w�Y
��E��\���@(`����\���������
��������\G�P�@�t7�\�P�P� ���
��Q���F�h\�������YFG�\g��Y���w#�Y���Y����!�?EG��\@�`����\��������������
��	��g�\2��G�P��\�P]���P
�Q����' ��F�h\��YF��\����g��Y
�#�Y��Y����?��E���\@`��������\����� ��
���
��
���@��w�\
G�P��\����_�P�P�Q������F�h\
��Y����F��\
g��Y�#�Y����
��Y��E���\���@�`�
���\���������
��������\!G�P�@�t �\�P  �P� ���!��Q���Fh\�������YF�\g��Y���!�#�Y���Y����!�?E��\@��`����\���������
��������\$G�P�@�t#7�\�P#�P� ���$��Q���FWh\�������YFG�\g��Y���$W#�Y���Y����!�?EG��\@��`����\���������X\����
��'X\WX\4 ��_G�P��\�Q���@��Y���Yg�Y������Y��F���\���!�?E���\@��`����\�����
gX\
��@��'X\WX\G�P�����\��Q7��Y���g��Y�YG��Y� ����Fg��\E��\���@�`����\��� ��
�\G�P������Q�Y���Y���g��Y�Y������?F���\E��\@�`��������\���\� ���4��
G�P--gh\� ��_��,,7��Y��Q���@�
G��Y
'��Y�	�Y����
G��Y��F'��\���!�?E��\@��`�
���\�������\�4��1��G�P//��Y�������Q'��YW��Y���'�
�Y	�Y������?FW��\E��\@�`��������\����\� ���4��G�P/0��Y���B�����Q?'�Y����?���Y�Y??'�Y� ����F���\E���\����@�`���+gh\1���G�P"��\�4�������
��17�Y2��Y����	�Q�4��3�Y� ��G	�Y�h\'��Y����/�	�YG�Y?W
�Y� ����F'��\E���\����@�`���g4��� ��55gh\G�P%��\���
��447��YG4�����B�46��Y�Q'�Y� ��?7�YW��Y��Y�����Y?W�Y������FW��\E���\@�`�����	�X\'4���@�����99gh\GX\����887��YwX\8:��Y2���G�Pg�\;�Y������Y
��QCh8����
���Y	���\
w��Y�����Y

��Y������Fw��\Eg��\@s`�����?
���\��g�\ ��?G�P�����Q������YG��Yw�Y������Y��FG��\����?Eg��\@�i`����\��D��	g�\G�P���A����Q��Y�������YG��Y��Y� ����F���\Eg��\���@``����\��� ��<g�\	G�P�������Q	���Y	���Y�����Y	���Y������F���\Eg��\@W`�����	���\��
�X\����
���X\WX\ ��?G�P��\�Q���@��Y���Y�Y������Y��F���\����?E���\@�K`����\��D��
��\G�P���A����Q�Y�������Y��Y�Y� ����F���\E���\���@B`����\��� ����\
G�P������Q
�Y
���Y������Y
�Y������F���\E���\@9`�����
���\����\ ��?G�P����Q������YW��Y?��Y����?��Y��FW��\����E���\@�/`���"�@�g�O �'g�O �(g�O ��"�@�)g�O ���O �'��O ر">@�(��O �)��O ���O ��"�@�
'��O �(��O ���O ر"�@�'��O �)��O �(��O رd��)��O ��4��Gh\����	�Yw4��w	�Y� ��Gh\��Y'��L�����Y�)8G4��� ���w�YGh\Gh\� ��(8�
�Y��Y�&��W�[W�[��8����h\w�Y4��� ��	��Y�0[�[�@��g�Y��Yw��Y� ����[� [�([�"���)8
���Y��Y� ���W�[W�[g �\�&��?W�[W�[
��Y� ��0[W0[

Ch8�"���\
	���\�)8� ��g��\7\����K�@��
���6�� �����?Ew�8�A������A?����@Fw�8/A��h6����B@����/B��"h6C���\� ��
@�CF���0DE���0�����CG2E\�8@�C'��L�����E7�<#�[�4@�F����0�����F����6�E����6C`��P����
1@�F'0FL7����P� ��.@�F����6�E'0EL����/����P�)@�/B��m[� ��C���\FF
���2C�
���� ��/A��m[EE
���2CC
����B?��"��EBW\ ��GEG�PAE�0Y\D@���� ��BG� �Q@G'�#�YGDg�\����EG��YBAW�#�YB@'�"�Y� ��DD���8AA'�#�YDD7\����?@!�YC?w�8CCG\����EC����/E��l6
@�����/C��i6�@�/C�m6����@�/C���m7??� ��
@�E@!�Y/C��k[����EE���DCEE ����F@!�Y@@!�YDEGH\� ��7F���[/D��k[@C�\� ��@@��!K[/`���PEE(\����@E(8A���8BA �<����BBWG\B@'\?B�G\����@�????�'���@�?D��\@����@�EE7#H?E�'���@�?E7#H@�� ��?��??W�P@�����?FWX\@� �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�
`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@t`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�h`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@``��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@W`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�N`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@D`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@;`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�2`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@*`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��LCh8	��\� ���L �������	w�8�@��
	����� ��w�8
��h6��������� h6w��\���\� ��@����0���0�����2@\�7@�'��L������<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[
��m[���\� ���������2	���2���		�#�������\G�P
�0Y\� ������Q��\������Y���Y
���Y������Y���8

���Y����	���Y\	w�8����\������l6�����@���i6�@������m6@����m7����		@����Y� ����k[���� �� ���Y
���Y�����H\����[��k[�����\�K[!��P�����(\(8
��8����

��<

G\
�\����	
�G\@�		����		�'@�	��\����@�@�7H����	�'@�	7H�@�D@�	��		W�P���@�	wX\@��� ����@��P� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@�(`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@� `����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@	`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@�`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@�`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@�`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@��`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@��`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@�`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@�`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@�`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@y`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@�p`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@h`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@]`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@�T`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@L`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@C`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0ر"�@�G�/0�"��/0�'G�/0ر"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ����4��

Wh\Wh\�$��G�YG�Y'��L� ���g4��
g��Y	g��Y�`���)8Wh\	w�Y� ��
w�Y
(8
Wh\� ��G4��G��Y �)8� ��		7�Y%7��Y ��[� ��
 ��[@�8G�Y�@��
g��YWh\ �0[� ���
�[ 7�[ 7�[� ��'4��G��Yg�Y�&����[�([
Wh\� ��� [  �0[7�[� ���7�[
w�YG��Y�!��
g��Yw�Y��\� ����0[

��Yg��Y� ��
w��Y%��Y!�\� ��W�\w�Y

7�Y� ��!!�)87��Y

Ch8� ��!!'\
	���\����K�@��Ch8	g��\��� �����6 �������
w�8�@��
����� ��
w�8��h6
��������� h6G��\���\� ��@����0���0�����'2@\�7@�'��L�����'�<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[��m[���\� ���������2	���2���	
�#������\G�P�0Y\� ��

����Q
��\������Y��Y��Y����'��Y

��8��Y����
\
���Y
w�8����

\
������l6�����@�
��i6�@�����
�m6@�
���m7����

@����Y� ��
��k[���
� �� ���Y���Y�����H\����[��k[����
�\��K[!��P�����(\
(8��8������<�G\
�\����
�G\@�

����

�'@�
��\����@�@�
7H����

�'@�
7H�@�D@�
��

W�P���@�
GX\@��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�
`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@t`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�h`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@``��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@W`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�N`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@D`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@;`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�2`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@*`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��LCh8	��\� ���L �������	w�8�@��
	����� ��w�8
��h6��������� h6w��\���\� ��@����0���0�����2@\�7@�'��L������<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[
��m[���\� ���������2	���2���		�#�������\G�P
�0Y\� ������Q��\������Y���Y
���Y������Y���8

���Y����	���Y\	w�8����\������l6�����@���i6�@������m6@����m7����		@����Y� ����k[���� �� ���Y
���Y�����H\����[��k[�����\�K[!��P�����(\(8
��8����

��<

G\
�\����	
�G\@�		����		�'@�	��\����@�@�7H����	�'@�	7H�@�D@�	��		W�P���@�	wX\@��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NG�0[7�O�GB��7�N�mK7�0[����cK�P���D<E*�LF*7�LEG�P���FW�\E��QG��Y����FG��YE7#�YW��Y����?@�@�`����\��D<E*�LF*G�LEG�P���FW�\E�QW�Y����FW��YE�#�YG��Y����?@�@��`����\� ����*'�\��1���?*�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������
�?X9������[�`��?h8	Gh\&��P��H��G�\	G�P�@����\�P
�P� ��_	�Q���F�h\������YF��\F���Y���
	g#�Y��Y�����?E���\@��`����\�����������[� ����
@X9��Q� �@�
���?h8wh\���
��=��P���@��w�\G�P��\����?
�P�P��Q������F
�h\��Y����F��\
F��Y�#�Y����
���Y��E���\���@�`�
���\���@���
���[
���\� �������
?h8� ��
�h\��>��P��H����\G�P�@���\�P�P� ��_
�Q���F7h\����W
�YF�\FW��Y���'#�Y7	�Y�����?E��\@��`����\�����������[� ����� X\?h8����h\�����@����\G�P7�\����?�P�P��Q������Fgh\���Y����FG�\F���YW#�Y����g�
�Y	��EG��\���@�`����\���@����?X8���"������?X9��� ��������[?h8����gh\	�� g�\��G�P!��\ �P=��!�P�Q���� ��F�h\��YF��\����F���Y�#�Y��Y����
��E���\@�`��������\�����"�����[���@X9� �@����?h8wh\������
��#w�\��"G�P$��\#�P=�� $�P!�Q���� ��Fh\!"�YF��\����F��Y �#�Y!�Y������E���\@�`��������\�����"�����[������B�� ?h8!�h\"����H��'��\$!G�P�@��(�\"'�P#(�P� ��_%!�Q���F"7h\����%$W�Y F�\"FW��Y���#!'#�Y"%7�Y�����?E��\@�u`�"���\��������'���[� �@���#?h8$�h\����%����*��\��)$G�P+7�\%*�P=��'+�P($��Q���� ��F%wh\()���Y(FG�\����%F���Y'$W#�Y%(w��Y����
��EG��\@e`�����%���\����� @�/&��k[+��/ �'�?X9��@�&�?X9(�?X8,'g�/ ر"@�-g�/ �/(g�/ �)@X8�"@�*�?X80)g�/ �1'��/ ر"@�2(��/ �3)��/ �4'��/ ر"@�5��/ �6(��/ �7)��/ �� @�.@X88'��/ �9��/ ر"��:(��/ �;)��/ ������<��
��<G�\2��@	G�P��\<<�P]��?�PA	 �Q���' ��F<�h\A@ �Y(F��\����Ag��Y?	G#�Y?A��Y����?
��E���\@G`�����<���\������/=��k[��	�����
��w�\	G�P�@�t��\�P�P� �����Q���F�h\����		G��Y(F��\	g��Y���w#�Y		G��Y
����!�?E���\@�7`�	���\��������/>��k[�������
���@��
��\G�P�\����_

�P�P�Q������F
h\=w�Y����(F�\=g��YG#�Y����=w�Y
��E��\���@(`����\���������
��������\G�P�@�t7�\�P�P� ���
��Q���F�h\�������YFG�\g��Y���w#�Y���Y����!�?EG��\@�`����\��������������
��	��g�\2��G�P��\�P]���P
�Q����' ��F�h\��YF��\����g��Y
�#�Y��Y����?��E���\@`��������\����� ��
���
��
���@��w�\
G�P��\����_�P�P�Q������F�h\
��Y����F��\
g��Y�#�Y����
��Y��E���\���@�`�
���\���������
��������\!G�P�@�t �\�P  �P� ���!��Q���Fh\�������YF�\g��Y���!�#�Y���Y����!�?E��\@��`����\���������
��������\$G�P�@�t#7�\�P#�P� ���$��Q���FWh\�������YFG�\g��Y���$W#�Y���Y����!�?EG��\@��`����\���������X\����
��'X\WX\4 ��_G�P��\�Q���@��Y���Yg�Y������Y��F���\���!�?E���\@��`����\�����
gX\
��@��'X\WX\G�P�����\��Q7��Y���g��Y�YG��Y� ����Fg��\E��\���@�`����\��� ��
�\G�P������Q�Y���Y���g��Y�Y������?F���\E��\@�`��������\���\� ���4��
G�P--gh\� ��_��,,7��Y��Q���@�
G��Y
'��Y�	�Y����
G��Y��F'��\���!�?E��\@��`�
���\�������\�4��1��G�P//��Y�������Q'��YW��Y���'�
�Y	�Y������?FW��\E��\@�`��������\����\� ���4��G�P/0��Y���B�����Q?'�Y����?���Y�Y??'�Y� ����F���\E���\����@�`���+gh\1���G�P"��\�4�������
��17�Y2��Y����	�Q�4��3�Y� ��G	�Y�h\'��Y����/�	�YG�Y?W
�Y� ����F'��\E���\����@�`���g4��� ��55gh\G�P%��\���
��447��YG4�����B�46��Y�Q'�Y� ��?7�YW��Y��Y�����Y?W�Y������FW��\E���\@�`�����	�X\'4���@�����99gh\GX\����887��YwX\8:��Y2���G�Pg�\;�Y������Y
��QCh8����
���Y	���\
w��Y�����Y

��Y������Fw��\Eg��\@s`�����?
���\��g�\ ��?G�P�����Q������YG��Yw�Y������Y��FG��\����?Eg��\@�i`����\��D��	g�\G�P���A����Q��Y�������YG��Y��Y� ����F���\Eg��\���@``����\��� ��<g�\	G�P�������Q	���Y	���Y�����Y	���Y������F���\Eg��\@W`�����	���\��
�X\����
���X\WX\ ��?G�P��\�Q���@��Y���Y�Y������Y��F���\����?E���\@�K`����\��D��
��\G�P���A����Q�Y�������Y��Y�Y� ����F���\E���\���@B`����\��� ����\
G�P������Q
�Y
���Y������Y
�Y������F���\E���\@9`�����
���\����\ ��?G�P����Q������YW��Y?��Y����?��Y��FW��\����E���\@�/`���"�@�g�O �'g�O �(g�O ��"�@�)g�O ���O �'��O ر">@�(��O �)��O ���O ��"�@�
'��O �(��O ���O ر"�@�'��O �)��O �(��O رd��)��O ��4��Gh\����	�Yw4��w	�Y� ��Gh\��Y'��L�����Y�)8G4��� ���w�YGh\Gh\� ��(8�
�Y��Y�&��W�[W�[��8����h\w�Y4��� ��	��Y�0[�[�@��g�Y��Yw��Y� ����[� [�([�"���)8
���Y��Y� ���W�[W�[g �\�&��?W�[W�[
��Y� ��0[W0[

Ch8�"���\
	���\�)8� ��g��\7\����K�@��
���6�� �����?Ew�8�A������A?����@Fw�8/A��h6����B@����/B��"h6C���\� ��
@�CF���0DE���0�����CG2E\�8@�C'��L�����E7�<#�[�4@�F����0�����F����6�E����6C`��P����
1@�F'0FL7����P� ��.@�F����6�E'0EL����/����P�)@�/B��m[� ��C���\FF
���2C�
���� ��/A��m[EE
���2CC
����B?��"��EBW\ ��GEG�PAE�0Y\D@���� ��BG� �Q@G'�#�YGDg�\����EG��YBAW�#�YB@'�"�Y� ��DD���8AA'�#�YDD7\����?@!�YC?w�8CCG\����EC����/E��l6
@�����/C��i6�@�/C�m6����@�/C���m7??� ��
@�E@!�Y/C��k[����EE���DCEE ����F@!�Y@@!�YDEGH\� ��7F���[/D��k[@C�\� ��@@��!K[/`���PEE(\����@E(8A���8BA �<����BBWG\B@'\?B�G\����@�????�'���@�?D��\@����@�EE7#H?E�'���@�?E7#H@�� ��?��??W�P@�����?FWX\@� �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�
`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@t`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�h`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@``��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@W`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�N`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@D`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@;`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�2`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@*`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��LCh8	��\� ���L �������	w�8�@��
	����� ��w�8
��h6��������� h6w��\���\� ��@����0���0�����2@\�7@�'��L������<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[
��m[���\� ���������2	���2���		�#�������\G�P
�0Y\� ������Q��\������Y���Y
���Y������Y���8

���Y����	���Y\	w�8����\������l6�����@���i6�@������m6@����m7����		@����Y� ����k[���� �� ���Y
���Y�����H\����[��k[�����\�K[!��P�����(\(8
��8����

��<

G\
�\����	
�G\@�		����		�'@�	��\����@�@�7H����	�'@�	7H�@�D@�	��		W�P���@�	wX\@��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D</*�L.*7�L/G�P���.��\/�Q7�Y����.7��Y/G�YW�Y����?@�@�`����\��D</*�L.*G�L/G�P���.��\/�QW�Y����.W��Y
/G�Y
��Y����?@�@�`�
���\� ����*�\"��1���?*'�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������	
�?X9������[�`��?h8Gh\&��P��H��G�\G�P�@��w�\�P�P� ��_�Q���.�h\������Y.��\.���Y�����Y"��Y�����?/���\@�`�"���\�����������[� �����Q

@X9� �@�		���
?h8gh\���!��(��P���@��g�\G�P��\����?�P�P�Q������.�h\��Y����.��\.���Y��Y����!��Y��/���\���@��`�!���\���@���	���[
���\� ��

�����	?h8� ��	�h\ ��)��P��H����\G�P�@����\�P�P� ��_	�Q���.h\����7	�Y.��\.7��Y����Y �Y�����?/���\@�`� ���\��������
���[� ����� X\
?h8���
�h\4�����@����\G�P�\����?�P�P
�Q�������.7h\W
�Y����.�\.W��Y'�Y����*7	�Y	��/��\����?@��`���4���\��@����?X8���"������?X90��� ��������[?h8����'h\	��'�\��G�PG�\�P=���P
�Q��� ��.�h\�
�Y.W�\����.���Y��Y*��Y����
��/W��\@��`�����0���\���"�����[���@X9� �@����?h87h\����#��
��7�\��G�Pw�\�P=���P��Q���� ��.�h\���Y.��\����.���Y��Y#�
�Y������/���\@�`�����#���\�����"�����[������B��?h8gh\%����H��$g�\G�P�@��'��\$�P'�P� ��_�Q���.�h\������Y .��\.���Y�����Y%��Y�����?/���\@n`�%���\��������'���[� �@���?h8�h\����$����*��\��$G�P��\*�P=���P'�Q���� ��.�h\'$w�Y(.��\����.w��Y$��Y$'G�Y����
��/���\@�]`�����$���\��������!'X\
��X\����'GX\'G�P(4w�\����'�Q��Y-G��Y���'��Y���Y
������.G��\/w��\@�Q`��������\��( w�\ ��?*'G�P���'�Q����-*��Y*-��Y'��Y����-��Y
��.��\����?/w��\@H`����\��D��(!w�\ 'G�P���A���*'�Q+ ��Y���� +��Y*'��Y +��Y� ��
��.��\/w��\���@�>`� ���\��� ��("w�\!'G�P�����*'��Q+!���Y!+'��Y���*'�Y!+���Y
������.'��\/w��\@�5`�����!���\��"#X\����
��""WX\6"GX\ ��?'6G�P(0g�\"6��Q���@�"''��Y'"��Y,6w�Y����""���Y
��.��\����?/g��\@*`�"���\��D��(#g�\'6G�P���A���*6��Q*'���Y����0*7��Y'6��Y0*w�Y� ��
��.7��\/g��\���@� `�0���\��� ��(%g�\#6G�P�����*6��Q,#���Y#,W��Y���*67��Y#,���Y
������.W��\/g��\@�`�����#���\��($g�\ ��?%6G�P���'6��Q����'%w��Y%'G��Y*6W�Y����*'���Y
��.G��\����/g��\@`���B�@�1��/ �%�?X9/&��k[�B�@�+%��/ �&�?X8'@X8���2&��/ �$�?X93'��/ ر"�@�/G�/ �.%G�/ �-&G�/ سd��,'G�/ �g4��41h\����4+�Y74��42��Y� ��+3��Y//h\4@X8�@��++h\0�?X86�/ ��"�@�3%�/ �2&�/ �1G�/ ����74��..��Y/%G�/ �����--��Y74��5,���Y�"�@�,'�/ �.&G�/ �-'G�/ �� ��!��"5'��Y+'�O� ���'4��66h\ 1h\�$��53�Y /�Y/'�N�B���!'�N52���Y1'�)N�!�� .��Y,,���Y'�N�"��.'�?N��0[ -��Y� ��#,7�Y��1� �\�@��?�\�)8  ���Y�@��\ �w ��L ��?	���\!�L"�(8����"  ��
��G�\2��G�P w�\�P��  �P�Q���� ��.h\!G�Y(.��\����!���Yw�Y!G��Y����
��/���\@��`��������\��������/(��k[�������
��g�\G�P�@�t
��\�P

�P� ����Q���.�h\����g�Y(.��\���Y���G�Yg�Y
�����?/���\@�`����\��������/)��k[�������
���@��	��\G�P��\����_		�P�P�Q������.	�h\w�Y����(.��\���Yg�Y����w�Y
��/���\���@��`����\���������
������
��\G�P�@�t�\

�P�P� �����Q���.
h\����	���Y.�\	���Y���w�Y	���Y�����?/��\@�`����\��������������
��	��'�\2��G�PG�\�P]���P	�Q����' ��.Gh\��Y.W�\�������Y	��Y��Y����?��/W��\@��`��������\����� ��	���
��
���@��7�\
G�Pw�\����_�P	�P�Q������.�h\
��Y����.��\	���Y
��Y����	���Y��/���\���@��`�	���\������
���
������g�\
G�P�@�t��\�P�P� ���
�Q���.�h\����
��Y.��\

���Y�����Y

��Y�����?/���\@|`�
���\���������
��������\G�P�@�t��\�P�P� �����Q���.�h\����W��Y.��\���Y�����YW��Y�����?/���\@n`����\���������X\����
��
gX\

wX\4 ��_
G�P��\
��Q���@����Yw��Y
��Y������Y��.w��\����?/���\@b`����\�������\
G�P���B���
��Q���Y����g��Y
w�Y���Y� ����.g��\/���\���@�X`����\��� ����\
G�P�����
�Q��YG��Y���
g�Y��Y������?.G��\/���\@�O`��������\����\1 ��_
G�P���
�Q������Y���Y
G�Y������Y��.���\����?/���\@F`����\�����
	�X\
��@��
�X\WX\
G�P������\��Q

���Y���
���Y��Y

�Y� ����.���\/���\���@�:`�
���\��� ��	��\G�P������Q��Y���Y������Y��Y������?.���\/���\@�1`��������\��
��\1 ��_	G�P�����Q����	���Y	���Y��Y����	���Y��.���\����?/���\@(`�	���\�������\
G�P���B�����Q
�Y����
W��Y���Y*�Y� ����.W��\/���\����@�`���G�O ر"�@�%G�O �
&G�O �'G�O ��"�@���O �%��O �&��O ��"�@�'��O ��O �%�O ��"6@�&�O �G�O �%G�O ر"�@�&G�O �'�O �'G�O �������L�LW4��$��gh\G��Ygh\�@��74��gh\
w��Y�@���
gh\G�
�YG��Y� �����YG�Yw�
�Y����w��Y���Y�h\����w��Y��
�Y��Y����?���Y��Y��Y����?�w	���\�(8��� ���*/w�8� ��A��,*����+.w�8����/,��h6-+����/-��"h6����1���\�@�1.���0����2/���0�1'2E\9@����1'��L�/�<�[
5@��"���.����0�.����6�/����6����1`��P�1@�.'0FL����7����P.@�.����6�����/'0EL/����P�)@�� ��/-��m[1���\..
���2����1�
���/,��m[//
���2���11
,*�#������3,�\-3G�P,3�0Y\� ��/+���+-�Q./��\����--���Y2.���Y+,'�Y����+-��Y//��8,,��Y����*-���Y1/\.*w�8����..\/.����//��l6�����@�/.��i6�@�����/.�m6@�/.���m7����**
@�1-���Y� ��/.��k[11���/.� ��11 2-���Y+-���Y����/1�H\72����[//��k[����+.�\++�K[/`���P����11�(\+1(8,���8����,,��<,,G\,+�\����*,�G\@�**����**�'@�*1��\����@�@�./7H����*.�'@�*/7H�@�D@�*��**W�P���@�*.�X\@��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@u`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�i`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@a`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@X`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�O`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@E`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@<`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�3`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@+`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��L�w	��\�@���L�(8 �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@�#`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@�`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@�`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@�`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@�`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@��`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@��`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@�`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@�`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@}`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@t`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@�k`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@c`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@X`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@�O`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@G`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@>`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0��"�@�"��/0�G�/0�'G�/0��"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ���w4��

Wh\Wh\�$��G�YG�Y
 '�N�b�� '�?NG4��g��Y� ��	g��Y	Wh\ '�N�$��w�Y
w�YG��Y�"�@� '�)N
 '�O '�N�$��7�Y%7��YWh\� ��74��g�
�Y
 �0[������ �\G��Y	g��Y���Wh\Wh\4��� ��	w��YG��YG��Y� ��w�Y
�\!!�)8� ��	�Yg��Yg��Y� ��%��Y!\��L�%��w��Yw�Y!�L����7�Y7��Y�w�@���w	��\��L6 ��?	g��\!�L�(8�@��	�(8 ��	 �����
w�8A������
����
w�8��h6� ��
����� h6G��\�������\@����0�������0�'2@\�7@����'��L�'�<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[��m[� �����\�������2����	���2	
��@�P�"��\G�P����?�0Y\

����Q����
��\��Y��Y� ����Y'��Y

��8������Y
\
���Y����
w�8

\
����������l6�@�
��i6����@�
�m6@����
���m7

@��������Y
��k[���� ��
 ���Y�@�����Y�H\����[������k[
�\��K[����!��P�(\
(8������8��<�G\���
�\
�G\@���



�'@�����
��\@�@���
7H

�'@�����
7H@�
�����

W�P@�
GX\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@u`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�i`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@a`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@X`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�O`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@E`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@<`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�3`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@+`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��L�w	��\�@���L�(8 �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@�#`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@�`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@�`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@�`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@�`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@��`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@��`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@�`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@�`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@}`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@t`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@�k`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@c`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@X`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@�O`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@G`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@>`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0��"�@�"��/0�G�/0�'G�/0��"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ���w4��

Wh\Wh\�$��G�YG�Y
 '�N�b�� '�?NG4��g��Y� ��	g��Y	Wh\ '�N�$��w�Y
w�YG��Y�"�@� '�)N
 '�O '�N�$��7�Y%7��YWh\� ��74��g�
�Y
 �0[������ �\G��Y	g��Y���Wh\Wh\4��� ��	w��YG��YG��Y� ��w�Y
�\!!�)8� ��	�Yg��Yg��Y� ��%��Y!\��L�%��w��Yw�Y!�L����7�Y7��Y�w�@���w	��\��L6 ��?	g��\!�L�(8�@��	�(8 ��	 �����
w�8A������
����
w�8��h6� ��
����� h6G��\�������\@����0�������0�'2@\�7@����'��L�'�<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[��m[� �����\�������2����	���2	
��@�P�"��\G�P����?�0Y\

����Q����
��\��Y��Y� ����Y'��Y

��8������Y
\
���Y����
w�8

\
����������l6�@�
��i6����@�
�m6@����
���m7

@��������Y
��k[���� ��
 ���Y�@�����Y�H\����[������k[
�\��K[����!��P�(\
(8������8��<�G\���
�\
�G\@���



�'@�����
��\@�@���
7H

�'@�����
7H@�
�����

W�P@�
GX\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@u`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�i`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@a`��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@X`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�O`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@E`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@<`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�3`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@+`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��L�w	��\�@���L�(8 �����	w�8A������
	����w�8
��h6� ������� h6w��\�������\@����0�������0�2@\�7@����'��L��<�[�3@����������0�����6�����6�����0@�'0AL�!��P����-@�'0AL�!��P� ��)@���m[
��m[� �����\�������2����	���2		��@�P�"���\G�P����?
�0Y\����Q������\��Y���Y� ��
���Y��Y���8� ��

���Y	���Y\����	w�8\����������l6�@���i6����@��m6@�������m7		@��������Y��k[���� �� ���Y�@��
���Y�H\����[������k[�\�K[����!��P�(\(8����
��8

��<

G\���
�\	
�G\@���				�'@�����	��\@�@���7H	�'@�����	7H@�	�����		W�P@�	wX\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D</*�L.*7�L/G�P���.��\/�Q7�Y����.7��Y/G�YW�Y����?@�@��`����\��D</*�L.*G�L/G�P���.��\/�QW�Y����.W��Y
/G�Y
��Y����?@�@��`�
���\� ����*�\"��1���?*'�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������	
�?X9������[�`��?h8Gh\&��P��H��G�\G�P�@��w�\�P�P� ��_�Q���.�h\������Y.��\.���Y�����Y"��Y�����?/���\@��`�"���\�����������[� �����Q

@X9� �@�		���
?h8gh\���!��(��P���@��g�\G�P��\����?�P�P�Q������.�h\��Y����.��\.���Y��Y����!��Y��/���\���@�`�!���\���@���	���[
���\� ��

�����	?h8� ��	�h\ ��)��P��H����\G�P�@����\�P�P� ��_	�Q���.h\����7	�Y.��\.7��Y����Y �Y�����?/���\@��`� ���\��������
���[� ����� X\
?h8���
�h\4�����@����\G�P�\����?�P�P
�Q�������.7h\W
�Y����.�\.W��Y'�Y����*7	�Y	��/��\����?@�`���4���\��@����?X8���"������?X90��� ��������[?h8����'h\	��'�\��G�PG�\�P=���P
�Q��� ��.�h\�
�Y.W�\����.���Y��Y*��Y����
��/W��\@�`�����0���\���"�����[���@X9� �@����?h87h\����#��
��7�\��G�Pw�\�P=���P��Q���� ��.�h\���Y.��\����.���Y��Y#�
�Y������/���\@~`�����#���\�����"�����[������B��?h8gh\%����H��$g�\G�P�@��'��\$�P'�P� ��_�Q���.�h\������Y .��\.���Y�����Y%��Y�����?/���\@�l`�%���\��������'���[� �@���?h8�h\����$����*��\��$G�P��\*�P=���P'�Q���� ��.�h\'$w�Y(.��\����.w��Y$��Y$'G�Y����
��/���\@\`�����$���\��������!'X\
��X\����'GX\'G�P(4w�\����'�Q��Y-G��Y���'��Y���Y
������.G��\/w��\@P`��������\��( w�\ ��?*'G�P���'�Q����-*��Y*-��Y'��Y����-��Y
��.��\����?/w��\@�F`����\��D��(!w�\ 'G�P���A���*'�Q+ ��Y���� +��Y*'��Y +��Y� ��
��.��\/w��\���@=`� ���\��� ��("w�\!'G�P�����*'��Q+!���Y!+'��Y���*'�Y!+���Y
������.'��\/w��\@4`�����!���\��"#X\����
��""WX\6"GX\ ��?'6G�P(0g�\"6��Q���@�"''��Y'"��Y,6w�Y����""���Y
��.��\����?/g��\@�(`�"���\��D��(#g�\'6G�P���A���*6��Q*'���Y����0*7��Y'6��Y0*w�Y� ��
��.7��\/g��\���@`�0���\��� ��(%g�\#6G�P�����*6��Q,#���Y#,W��Y���*67��Y#,���Y
������.W��\/g��\@`�����#���\��($g�\ ��?%6G�P���'6��Q����'%w��Y%'G��Y*6W�Y����*'���Y
��.G��\����/g��\@�`���B�@�1��/ �%�?X9/&��k[�B�@�+%��/ �&�?X8'@X8���2&��/ �$�?X93'��/ ر"�@�/G�/ �.%G�/ �-&G�/ سd��,'G�/ �g4��41h\����4+�Y74��42��Y� ��+3��Y//h\4@X8�@��++h\0�?X86�/ ��"�@�3%�/ �2&�/ �1G�/ ����74��..��Y/%G�/ �����--��Y74��5,���Y�"�@�,'�/ �.&G�/ �-'G�/ �� ��� ��"5'��Y+'�O� ���'4��66h\ 1h\�$��53�Y /�Y/'�N�B���!'�N52���Y1'�)N�!�� .��Y,,���Y'�N�"��.'�?N��0[ -��Y� ��#,7�Y��1� �\�@��?�\�)8  ���Y�@��\ Ch8 ��L@��#	���\!�L  ���H
��G�\G�P�@�� w�\�P  �P� ��_�Q���.h\����!G�Y(.��\!���Y���w�Y!G��Y
�����?/���\@��`����\��������/(��k[�������
���@��g�\G�P
��\����_�P

�P�Q����?��.�h\g�Y����(.��\���YG�Y����g�Y
��/���\���@�`����\������/)��k[��������
��	��\2��G�P��\		�P]���P�Q����' ��.	�h\w�Y(.��\�������Yg�Yw�Y����?
��/���\@�`��������\����� �����
�����@��
��\G�P�\����_

�P�P��Q������.
h\	���Y����.�\	���Yw�Y����	���Y��/��\���@�`����\���������
����	��'�\G�P�@�tG�\�P�P� ���	�Q���.Gh\������Y.W�\���Y���	��Y��Y�����?/W��\@��`����\��������	������
��
��7�\2��
G�Pw�\�P]��	�P�Q����' ��.�h\
��Y.��\����	���Y
��Y	���Y����?��/���\@�`�����	���\����� ��
���
�����@��g�\
G�P��\����_�P�P
�Q������.�h\
��Y����.��\

���Y��Y����

��Y��/���\���@{`�
���\�� �����
�����@����\G�P��\����_�P�P��Q������.�h\W��Y����.��\���Y��Y����W��Y��/���\���@m`����\�������X\
��@��
gX\

wX\
G�P������\
��Q���Y���w��Y
��Y��Y� ����.w��\/���\���@a`����\��� ����\
G�P�����
��Q���Yg��Y���
w�Y���Y������?.g��\/���\@X`��������\����\1 ��_
G�P���
�Q������YG��Y
g�Y������Y��.G��\����?/���\@�N`����\�������\
G�P���B���
�Q��Y�������Y
G�Y��Y� ����.���\/���\���@E`����\������
	�X\
��
�X\����WX\
G�P��\������Q

���Y
���Y�����Y

�Y������?.���\/���\@:`�����
���\��	��\1 ��_G�P����Q������Y���Y���Y������Y��.���\����?/���\@�0`����\�����
��\	G�P���B�����Q	���Y����	���Y��Y	���Y� ����.���\/���\���@'`�	���\��� ����\
G�P�����Q
�Y
W��Y������Y*�Y������?.W��\/���\@`� �@���G�O �%G�O ��"�@�
&G�O �'G�O ���O ��"�@�%��O �&��O �'��O ر"�@��O �%�O �&�O ر"�@�G�O �%G�O �&G�O ر��'�O �'G�O ���L�`���LW4��gh\����G��Ygh\74��� ��gh\
w��Y
gh\�D���G�
�YG��Y���Y�����G�Yw�
�Yw��Y�$�����Y�h\w��Y� ����
�Y��Y���Y������Y��YCh8 ��	���\ �������*/w�8�A��,*����� ��+.w�8/,��h6-+�������/-��"h61���\�@�����1.���02/���0�1'2E\�����8@�1'��L�/�<�[� �@��4@�.����0�.����6����/����61`��P�1@������.'0FL7����P.@������.����6�/'0EL/����P����)@�/-��m[1���\� ��..
���21�
���/,��m[����//
���211
,*��@�P�"��3,�\-3G�P����?,3�0Y\/+���+-�Q����./��\--���Y2.���Y� ��+,'�Y+-��Y//��8� ��,,��Y*-���Y1/\����.*w�8..\/.��������//��l6�@�/.��i6����@�/.�m6@����/.���m7**
@�����1-���Y/.��k[11���� ��/.11 2-���Y�@��+-���Y/1�H\72����[����//��k[+.�\++�K[����/`���P11�(\+1(8����,���8,,��<,,G\���,+�\*,�G\@���****�'@�����*1��\@�@���./7H*.�'@�����*/7H@�*�����**W�P@�*.�X\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�
`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@t`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�h`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@``��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@W`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�N`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@D`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@;`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�2`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@*`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��LCh8	��\� ���L �������	w�8�@��
	����� ��w�8
��h6��������� h6w��\���\� ��@����0���0�����2@\�7@�'��L������<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[
��m[���\� ���������2	���2���		�#�������\G�P
�0Y\� ������Q��\������Y���Y
���Y������Y���8

���Y����	���Y\	w�8����\������l6�����@���i6�@������m6@����m7����		@����Y� ����k[���� �� ���Y
���Y�����H\����[��k[�����\�K[!��P�����(\(8
��8����

��<

G\
�\����	
�G\@�		����		�'@�	��\����@�@�7H����	�'@�	7H�@�D@�	��		W�P���@�	wX\@��� ����@��P� �����L!W����Q���? g��'��!'�O�"�ğ!'N!!70[ 7�O�GB�� 7N!�mK  70[���� cK�P���D<*�L*7�LG�P���G�\�Q�Y����'��Y7�Y��Y����?@�@"`����\��D<*�L*G�LG�P���G�\�Q7�Y����7��Yg�YG�Y����?@�@`����\� ��	��*�\*�\� ��_��?X8?X8����?�Q"
7��\' X\�"�@��?X8����?X9������������[��@���?h8��� ���h\G�P��\����?
�\�P��Q ��?
�P���Y7h\����G�\���Y
7�Y�������Y�����\���@�`����\���������[��� ���Q@X9�����������?h8�@�D��Wh\G�P���W�\
7�\�P�����Q
�Pw�Y� ��Wh\G�\w��Y���W�Yg��Y����@��`����\���������[
���\��������������C���?h8��� ��Gh\G�P
G�\����?W�\
�P�Q ��?�Pw�Y
Wh\����
G�\
w��YG��Y����W�Y�����\���@��`����\���������[������ X\�����"��?h8��'h\@��G�P
'�\G�\��X
�P�Q�P����w�Y
Gh\
G�\����
w��Y'��YG�Y���������\@��`��������\�����"���?X8������ ���?X9������������[��?h8�@�D���h\
G�P�����\'�\�P����
�Q�P
��Y� ���h\G�\���Y���'�	�Y
��Y�����?7��\@�`����\�����������[� �����@X9�����������
?h8�@�D��wh\G�P���w�\��\�P����
�Q
�P
��Y� ���h\G�\���Y���
w	�Y
���Y�����?'��\@�`����\�����������[�������������@���
?h8��� ��Gh\G�PG�\����?
��\�P��Q ��?

�P���Y�h\����G�\
���Y���Y������Y�����\���@��`����\���������[���@���?h8��� ��wh\	G�Pw�\����?
G�\�P
��Q ��?
�P
	���Ywh\����G�\���YG��Y����	
w�Y�����\���@��`�	���\������X\�	��@�PWX\gX\G�P����G�\��QG��Y���g��Y
w�Y���Y������g��\@�{`��������\��G�\ ��?G�P����Q����G�YW��Yg��Y����G�Y��W��\���@�r`����\��� ��G�\G�P������Q
G��Y
7��Y���W��Y
G��Y�����?7��\@j`����\��D��G�\G�P���A������Q
G��Y����
��Y7�Y
G��Y��������\@�a`��������\��'X\����	���X\�X\ ��?G�PG�\��Q���@���Y'��Y
7�Y�������Y��'��\���@�V`����\��� ��G�\G�P�����Q
�Y
7��Y���'�	�Y
�Y�����?7��\@N`����\��D��G�\
G�P���A�����Q
�Y����

���Y���Y
�Y���������\@�E`��������\��	G�\ ��?
G�P����Q����
��Y#���Y
7��Y����#���Y�����\���@�<`�#���\��B�@�"��/0�"�?X9'"�?X8���
��/0�&"@X8'��/0ر$��$&��/0��?X8@X8� ���'4��	Wh\Wh\����G�Y	
G��Y

g��Y����	g��Y�?X9%%w�Y�"�@�
"��/0���/0�$$w��Y�"�@�'��/0�&��/0�"G�/0��"�@�"��/0�G�/0�'G�/0��"�@���/0�&G�/0�'��/0ر$��&��/0�$$'h\%%'h\� ���w4��

Wh\Wh\�$��G�YG�Y
 '�N�b�� '�?NG4��g��Y� ��	g��Y	Wh\ '�N�$��w�Y
w�YG��Y�"�@� '�)N
 '�O '�N�$��7�Y%7��YWh\� ��74��g�
�Y
 �0[������ �\G��Y	g��Y���Wh\Wh\4��� ��	w��YG��YG��Y� ��w�Y
�\!!�)8� ��	�Yg��Yg��Y� ��%��Y!\��L�%��w��Yw�Y!�L�$��7�Y7��Y��L��HCh8Ch8	��\� ��?!�L	g��\ ����� ���
w�8� ���@��
����
w�8������h6
����� h6���G��\���\@��������0���0�'2@\����7@�'��L�'�<�[� ��3@�����0�����6���������6�0@�'0AL�����!��P�-@�'0AL�����!��P�)@���m[� ����m[���\�����������2	���2	����
��"��\ ��G�P�0Y\

��������Q
��\��Y������Y��Y'��Y� ��

��8��Y
\����
���Y
w�8

\����
������l6@�����
��i6�@�
�m6����@�
���m7

� ��@����Y
��k[�������
 �������Y���Y�H\� ������[��k[
�\� ����K[!��P�(\����
(8��8��<�����G\
�\
�G\����@�



�'���@�
��\@����@�
7H

�'���@�
7H@�� ��
��

W�P@�����
GX\@� �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�
`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@t`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�h`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@``��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@W`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�N`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@D`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@;`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�2`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@*`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��LCh8	��\� ���L �������	w�8�@��
	����� ��w�8
��h6��������� h6w��\���\� ��@����0���0�����2@\�7@�'��L������<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[
��m[���\� ���������2	���2���		�#�������\G�P
�0Y\� ������Q��\������Y���Y
���Y������Y���8

���Y����	���Y\	w�8����\������l6�����@���i6�@������m6@����m7����		@����Y� ����k[���� �� ���Y
���Y�����H\����[��k[�����\�K[!��P�����(\(8
��8����

��<

G\
�\����	
�G\@�		����		�'@�	��\����@�@�7H����	�'@�	7H�@�D@�	��		W�P���@�	wX\@��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D</*�L.*7�L/G�P���.��\/�Q7�Y����.7��Y/G�YW�Y����?@�@��`����\��D</*�L.*G�L/G�P���.��\/�QW�Y����.W��Y
/G�Y
��Y����?@�@��`�
���\� ����*�\"��1���?*'�\��?X8�@�t?X8��Q
���\� �@�
7 X\
�?X8
�������	
�?X9������[�`��?h8Gh\&��P��H��G�\G�P�@��w�\�P�P� ��_�Q���.�h\������Y.��\.���Y�����Y"��Y�����?/���\@��`�"���\�����������[� �����Q

@X9� �@�		���
?h8gh\���!��(��P���@��g�\G�P��\����?�P�P�Q������.�h\��Y����.��\.���Y��Y����!��Y��/���\���@�`�!���\���@���	���[
���\� ��

�����	?h8� ��	�h\ ��)��P��H����\G�P�@����\�P�P� ��_	�Q���.h\����7	�Y.��\.7��Y����Y �Y�����?/���\@��`� ���\��������
���[� ����� X\
?h8���
�h\4�����@����\G�P�\����?�P�P
�Q�������.7h\W
�Y����.�\.W��Y'�Y����*7	�Y	��/��\����?@�`���4���\��@����?X8���"������?X90��� ��������[?h8����'h\	��'�\��G�PG�\�P=���P
�Q��� ��.�h\�
�Y.W�\����.���Y��Y*��Y����
��/W��\@�`�����0���\���"�����[���@X9� �@����?h87h\����#��
��7�\��G�Pw�\�P=���P��Q���� ��.�h\���Y.��\����.���Y��Y#�
�Y������/���\@~`�����#���\�����"�����[������B��?h8gh\%����H��$g�\G�P�@��'��\$�P'�P� ��_�Q���.�h\������Y .��\.���Y�����Y%��Y�����?/���\@�l`�%���\��������'���[� �@���?h8�h\����$����*��\��$G�P��\*�P=���P'�Q���� ��.�h\'$w�Y(.��\����.w��Y$��Y$'G�Y����
��/���\@\`�����$���\��������!'X\
��X\����'GX\'G�P(4w�\����'�Q��Y-G��Y���'��Y���Y
������.G��\/w��\@P`��������\��( w�\ ��?*'G�P���'�Q����-*��Y*-��Y'��Y����-��Y
��.��\����?/w��\@�F`����\��D��(!w�\ 'G�P���A���*'�Q+ ��Y���� +��Y*'��Y +��Y� ��
��.��\/w��\���@=`� ���\��� ��("w�\!'G�P�����*'��Q+!���Y!+'��Y���*'�Y!+���Y
������.'��\/w��\@4`�����!���\��"#X\����
��""WX\6"GX\ ��?'6G�P(0g�\"6��Q���@�"''��Y'"��Y,6w�Y����""���Y
��.��\����?/g��\@�(`�"���\��D��(#g�\'6G�P���A���*6��Q*'���Y����0*7��Y'6��Y0*w�Y� ��
��.7��\/g��\���@`�0���\��� ��(%g�\#6G�P�����*6��Q,#���Y#,W��Y���*67��Y#,���Y
������.W��\/g��\@`�����#���\��($g�\ ��?%6G�P���'6��Q����'%w��Y%'G��Y*6W�Y����*'���Y
��.G��\����/g��\@�`���B�@�1��/ �%�?X9/&��k[�B�@�+%��/ �&�?X8'@X8���2&��/ �$�?X93'��/ ر"�@�/G�/ �.%G�/ �-&G�/ سd��,'G�/ �g4��41h\����4+�Y74��42��Y� ��+3��Y//h\4@X8�@��++h\0�?X86�/ ��"�@�3%�/ �2&�/ �1G�/ ����74��..��Y/%G�/ �����--��Y74��5,���Y�"�@�,'�/ �.&G�/ �-'G�/ �� ��� ��"5'��Y+'�O� ���'4��66h\ 1h\�$��53�Y /�Y/'�N�B���!'�N52���Y1'�)N�!�� .��Y,,���Y'�N�"��.'�?N��0[ -��Y� ��#,7�Y��1� �\�@��?�\�)8  ���Y�@��\ Ch8 ��L@��#	���\!�L  ���H
��G�\G�P�@�� w�\�P  �P� ��_�Q���.h\����!G�Y(.��\!���Y���w�Y!G��Y
�����?/���\@��`����\��������/(��k[�������
���@��g�\G�P
��\����_�P

�P�Q����?��.�h\g�Y����(.��\���YG�Y����g�Y
��/���\���@�`����\������/)��k[��������
��	��\2��G�P��\		�P]���P�Q����' ��.	�h\w�Y(.��\�������Yg�Yw�Y����?
��/���\@�`��������\����� �����
�����@��
��\G�P�\����_

�P�P��Q������.
h\	���Y����.�\	���Yw�Y����	���Y��/��\���@�`����\���������
����	��'�\G�P�@�tG�\�P�P� ���	�Q���.Gh\������Y.W�\���Y���	��Y��Y�����?/W��\@��`����\��������	������
��
��7�\2��
G�Pw�\�P]��	�P�Q����' ��.�h\
��Y.��\����	���Y
��Y	���Y����?��/���\@�`�����	���\����� ��
���
�����@��g�\
G�P��\����_�P�P
�Q������.�h\
��Y����.��\

���Y��Y����

��Y��/���\���@{`�
���\�� �����
�����@����\G�P��\����_�P�P��Q������.�h\W��Y����.��\���Y��Y����W��Y��/���\���@m`����\�������X\
��@��
gX\

wX\
G�P������\
��Q���Y���w��Y
��Y��Y� ����.w��\/���\���@a`����\��� ����\
G�P�����
��Q���Yg��Y���
w�Y���Y������?.g��\/���\@X`��������\����\1 ��_
G�P���
�Q������YG��Y
g�Y������Y��.G��\����?/���\@�N`����\�������\
G�P���B���
�Q��Y�������Y
G�Y��Y� ����.���\/���\���@E`����\������
	�X\
��
�X\����WX\
G�P��\������Q

���Y
���Y�����Y

�Y������?.���\/���\@:`�����
���\��	��\1 ��_G�P����Q������Y���Y���Y������Y��.���\����?/���\@�0`����\�����
��\	G�P���B�����Q	���Y����	���Y��Y	���Y� ����.���\/���\���@'`�	���\��� ����\
G�P�����Q
�Y
W��Y������Y*�Y������?.W��\/���\@`� �@���G�O �%G�O ��"�@�
&G�O �'G�O ���O ��"�@�%��O �&��O �'��O ر"�@��O �%�O �&�O ر"�@�G�O �%G�O �&G�O ر��'�O �'G�O ���L�`���LW4��gh\����G��Ygh\74��� ��gh\
w��Y
gh\�D���G�
�YG��Y���Y�����G�Yw�
�Yw��Y�$�����Y�h\w��Y� ����
�Y��Y���Y������Y��YCh8 ��	���\ �������*/w�8�A��,*����� ��+.w�8/,��h6-+�������/-��"h61���\�@�����1.���02/���0�1'2E\�����8@�1'��L�/�<�[� �@��4@�.����0�.����6����/����61`��P�1@������.'0FL7����P.@������.����6�/'0EL/����P����)@�/-��m[1���\� ��..
���21�
���/,��m[����//
���211
,*��@�P�"��3,�\-3G�P����?,3�0Y\/+���+-�Q����./��\--���Y2.���Y� ��+,'�Y+-��Y//��8� ��,,��Y*-���Y1/\����.*w�8..\/.��������//��l6�@�/.��i6����@�/.�m6@����/.���m7**
@�����1-���Y/.��k[11���� ��/.11 2-���Y�@��+-���Y/1�H\72����[����//��k[+.�\++�K[����/`���P11�(\+1(8����,���8,,��<,,G\���,+�\*,�G\@���****�'@�����*1��\@�@���./7H*.�'@�����*/7H@�*�����**W�P@�*.�X\���@� ����@� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���w�\�Q'�Y������YW�Y'��Y����?@�@�
`����\��D<*�L*G�LG�P���w�\�Q7�Y����7��YG�Y'�Y����?@�@�`����\� ����*��\��1���?*w�\���?X8�@�t?X8	�Q
���\� �@�	g X\�?X8�������
����?X9
���[��@���	
?h8��� ��
�h\G�P
��\����?��\	
�P��Q ��?
�P���Y	�h\����w�\	���Y
��Y����	���Y��@�`��������\��� �����[����Q�@��@X9�������@���?h8��� ��7h\	G�P7�\����?G�\�P
��Q ��?�P
	���YGh\����w�\���Y7�Y����	
G��Y��@�`��������\��� ����[
7��\��������������"��?h8�
��'h\@��	G�P'�\7�\��X�P
��Q�P����
	���Y7h\w�\�������Y	7�Y	
���Y������@��`����������\�����[������G X\����@���?h8��� ��h\G�P
�\����?'�\
�P	��Q ��?�P	���Y'h\����w�\���Y�Y����		'�Y��@�`��������\���"���?X8������� ��
������?X9�������[��?h8�@�D�
���h\
G�P�����\
7�\�P�����Q	
�P
��Y� ���h\w�\	���Y���
��Y	���Y�����?@�`���
���\��������[���`��@X9�������@���?h8���� ��'h\	G�P'�\����?7�\�P
��Q ��?�P
	���Y7h\����w�\���Y	7�Y����
���Y��@�`��������\����� �����[��������������?h8�@�D�
��h\	G�P����\'�\�P����
��Q�P
	���Y� ��'h\w�\���Y���	�Y	
��Y�����?@�`������\��������[���@���?h8���� ��7h\	G�P7�\����?�\�P
��Q ��?�P
	���YWh\����w�\���Y�Y����
W�Y��@t`��������\��������'X\�	��WX\����gX\G�Pw�\����	�Q��Y	g��Y���
��Y���Y�����?g��\@�h`����\��D��w�\	G�P���A����
��Q	���Y����
W��Y��
�Y7�Y������W��\@``��������\��w�\ ��?	G�P���
��Q����	���YG��Y
W
�Y������
�Y��G��\���@W`����\��� ��w�\	G�P����
��Q	���Y'��Y���	G	�Y�
�Y�����?'��\@�N`����\�����	�X\�	��@�P	'X\WX\	G�P����
w�\
��Q
	���Y���
���Y	���Y
��Y���������\@D`��������\��w�\ ��?	G�P���
��Q����	���Y
7��Y
���Y����
���Y��7��\���@;`�
���\��� ��w�\	G�P������Q	7��Y	'��Y�����Y7��Y�����?'��\@�2`����\��D��w�\G�P���A���	�Q��Y����W��Y	'��Y	��Y������W��\@*`�@�����?X9�?X9��@��� ��?X8�� ر"�@�
G� �G� �G� ���@�@X8�?X8G� ��@�@�
@X8�� �� ��"�@��� �� ��� زB�@��� ��� ��� ر��_� �� �'�O�'b��'�NW4��Wh\� ��

Wh\G��Y'�?N���G�Y
'�N74���$���Wh\7��YWh\� ���7�YG�Y')N����YG��Y4��� ���Y7�Y
���\� ��
�h\7��Y'�N� ��'��Y��Y��Y�@��G0[7	�Y��\�`���)8���Y
�\�@�H��LCh8	��\� ���L �������	w�8�@��
	����� ��w�8
��h6��������� h6w��\���\� ��@����0���0�����2@\�7@�'��L������<�[�3@�����0��������6�����6
1@������'0AL�!��P�-@������'0AL�!��P�)@�������m[
��m[���\� ���������2	���2���		�#�������\G�P
�0Y\� ������Q��\������Y���Y
���Y������Y���8

���Y����	���Y\	w�8����\������l6�����@���i6�@������m6@����m7����		@����Y� ����k[���� �� ���Y
���Y�����H\����[��k[�����\�K[!��P�����(\(8
��8����

��<

G\
�\����	
�G\@�		����		�'@�	��\����@�@�7H����	�'@�	7H�@�D@�	��		W�P���@�	wX\@��� ����@��P������L�P�P����P����@� �����L W����Q���?&g��'�� '�O�"�ğ 'N  70[&7�O�GB��&7N �mK&&70[����&cK�P���D<*�L*7�LG�P���7�\�QW�Y������YW�Yg��Y����?@�@��`�W��\��D<*�L*G�LG�P���7�\�QW�Y����W��YG�Yg�Y����?@�@��`�W��\����W��L*g�\'��K1 ��_*�\?X8?X8� ��	�Q4
���\�Q=@��^"
��\(4�?X9
"�b0����!"�?X9!�B0��W YL� ��$' X\		G X\'"�?X8� �@���\
$�?X8-	�?X8�@��%$�?Y8	�?Y8@X8�"�@�@@X8�@h9Ah8� ��($'�	�Y	'�	�Y��Y�"����Y�@h9W�	�Y� �� $h\	Gh\
��Y� ��W�	�Y-��Y%�h\� �� $$��Q0	
�Q
�Y� ��'h\--�Y%%�Q� � �#	�Q	$�h\)$�h\� �@�*'��Y'��0�"G�0ر"�@�!G#0�'Gc0�"@X8������0�G�0�$gh\� ��$wh\7��Y34�?X8�"��?+G�YW��Y44@X8� �@�'4��%���Y%���Y�"��$�h\�?Y8
$�h\�"��	$�h\
$�h\*%��Y�"����Y$7Y\
�Y���'��Y7��Y"7�0��b��%��Y4��	%G��Y�"�@�%7Y\
%W�Y%g�Y� �@�%w�Y	���Y
��Y�"�@���Y��Y��Y�"����Y���Y0�h\�"�@�
0�h\,0�h\��Y� �@�0�h\-�?Y8(-���Y�B�@�)-G�Y,-��Y--g��Y�"�@�!7c0�'7�0�*"G�0�� �@�00WY\7C0�.!G�0رb��'GC0�G�0�0#Y\� �@�g4��1$'h\$7h\� ��G4��g��Y1'��L�"�@�w	�Y$�h\$�h\� ��1�)8%��	�Yg��Y� ��'4��$�h\%�	�Y�"��?$�h\1'@�8w�Y� ��'(8$�h\
%���Y� �����Y!g�[g�[� ��g� [%��Y$�h\� ����Y!g�([1&��[�"���	�Y
G�Y���Y� �@�4��%�Y!w �\�&��!&7�[&7�[%G�Y� ���
�Y)#'��Y���Y� �@�	%��Y&7�0[&��[�"�`�!&�)8��Y��Y� ��W�Y#��Y&&�	0[�&��!��[!��[#��Y� ��%W��Y	���Y0���Y� ��#'��Y!!g�	0[0���Y� ����Y	0�
�YCh8�"��& g�\	���\Ch8� ��0��Y	���\		Ch8� ��!w�\		���\  �)8�@��Ch8	��\  g\����&���K����6&��@��	��6���6 ������	w�8	����� ��w�8��h6�������� h6���\@��������0���0�W2@\����6@�7��L�G�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P	�7\G�P����?�0Y\�����Q����'�\��Y���Y������Yg�Y
��Y� �����8��YG\����w�87\����������l6�@���i6����@��m6 �������m7 �������Y��k[���� �� ��Y�@����YgH\7���[������k[�\��K[����!��P'(\(8������8�<'G\���7\WG\ ����' ����W�\ �GH����' �GH�@�D ���W�P��� �7X\ �����@��P�P������L�P�P����P����@� �����L W����Q���?&g��'�� '�O�"�ğ 'N  70[&7�O�GB��&7N �mK&&70[����&cK�P���D<*�L*7�LG�P���7�\�QW�Y������YW�Yg��Y����?@�@��`�W��\��D<*�L*G�LG�P���7�\�QW�Y����W��YG�Yg�Y����?@�@��`�W��\����W��L*g�\'��K1 ��_*�\?X8?X8� ��	�Q4
���\�Q=@��^"
��\(4�?X9
"�b0����!"�?X9!�B0��W YL� ��$' X\		G X\'"�?X8� �@���\
$�?X8-	�?X8�@��%$�?Y8	�?Y8@X8�"�@�@@X8�@h9Ah8� ��($'�	�Y	'�	�Y��Y�"����Y�@h9W�	�Y� �� $h\	Gh\
��Y� ��W�	�Y-��Y%�h\� �� $$��Q0	
�Q
�Y� ��'h\--�Y%%�Q� � �#	�Q	$�h\)$�h\� �@�*'��Y'��0�"G�0ر"�@�!G#0�'Gc0�"@X8������0�G�0�$gh\� ��$wh\7��Y34�?X8�"��?+G�YW��Y44@X8� �@�'4��%���Y%���Y�"��$�h\�?Y8
$�h\�"��	$�h\
$�h\*%��Y�"����Y$7Y\
�Y���'��Y7��Y"7�0��b��%��Y4��	%G��Y�"�@�%7Y\
%W�Y%g�Y� �@�%w�Y	���Y
��Y�"�@���Y��Y��Y�"����Y���Y0�h\�"�@�
0�h\,0�h\��Y� �@�0�h\-�?Y8(-���Y�B�@�)-G�Y,-��Y--g��Y�"�@�!7c0�'7�0�*"G�0�� �@�00WY\7C0�.!G�0رb��'GC0�G�0�0#Y\� �@�g4��1$'h\$7h\� ��G4��g��Y1'��L�"�@�w	�Y$�h\$�h\� ��1�)8%��	�Yg��Y� ��'4��$�h\%�	�Y�"��?$�h\1'@�8w�Y� ��'(8$�h\
%���Y� �����Y!g�[g�[� ��g� [%��Y$�h\� ����Y!g�([1&��[�"���	�Y
G�Y���Y� �@�4��%�Y!w �\�&��!&7�[&7�[%G�Y� ���
�Y)#'��Y���Y� �@�	%��Y&7�0[&��[�"�`�!&�)8��Y��Y� ��W�Y#��Y&&�	0[�&��!��[!��[#��Y� ��%W��Y	���Y0���Y� ��#'��Y!!g�	0[0���Y� ����Y	0�
�YCh8�"��& g�\	���\Ch8� ��0��Y	���\		Ch8� ��!w�\		���\  �)8�@��Ch8	��\  g\����&���K����6&��@��	��6���6 ������	w�8	����� ��w�8��h6�������� h6���\@��������0���0�W2@\����6@�7��L�G�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P	�7\G�P����?�0Y\�����Q����'�\��Y���Y������Yg�Y
��Y� �����8��YG\����w�87\����������l6�@���i6����@��m6 �������m7 �������Y��k[���� �� ��Y�@����YgH\7���[������k[�\��K[����!��P'(\(8������8�<'G\���7\WG\ ����' ����W�\ �GH����' �GH�@�D ���W�P��� �7X\ �����@��P�P������L�P�P����P����@� �����LW����Q���?#g��'��'�O�"�ğ'N70[#7�O�GB��#7N�mK##'0[����#cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�`�g��\����	W��L*7�\	'��K1 ��_*�\?X8?X8� ����Q)
���\�Q=@��^&
���\()�?X9
&�b0���� &�?X9 �B0�	�W YL� ��$g X\� X\'&@X8� �@�		��\$�?X8�?X8�@��$�?Y8�?Y8	@X8�"�@�	@@X8	�@h9	Ah8� �@�$	�Y
	��Y	G�Y� ��	�@h9	�Y	�Y� ��$7h\
���Y�	�Y� ��-g��Ywh\Wh\� ��$$�	�Q%��Yh\� ��-��Y.��Q"�
�Q� �`�!��Q+$�h\*$�h\�@��+%'��Y&�?X8��0ر"�@�&��0� �"0��b0��"���'��0�'��0�$gh\���@�$wh\%7�Y,%G�Y�`��%W��Y74��"���Y�"�@�"���Y"��Y"��Y�"��$�h\%�?Y8
$�h\��@�$�h\$�h\4���"��%��Y$7Y\%�Y�@��%'��Y%7�Y	"G��Y�"�@�"7Y\
"W��Y
"g��Y�@��"w�Y)�?X8	���Y�"�@���Y
���Y��Y� �@����Y+��Y��Y�"�@�*.�h\
.�h\.�h\� ����Y.�h\-�?Y8�"�@�*-��Y+-��Y,-���Y���))@X8--g�Y&W�0�� �@�..GY\
 WA0�Wa0ر"�@�&�"0�'WA0� ��0ز"�����0�&'��0�/'��L�`��.!�Y\74��$�h\�"�@�$�h\	$�h\$h\�"�@�$'h\$7h\
%��Y� ��
/�)8%��Y$�h\�b��%W��Y4��%��Y�"��
"���Y/'@�8"��Y� ��%G��Y

'(8"��Y� ���%��	�Y7�[	7�[� ��#��[ #��["g�Y� ���"w��Y7([7� [� �`�w�Yg�YG�Y� ��W�Y	� �\#��[� �@�
#�)8!���Y!G��Y�"���#0[ #��[
��[� ��
��[##0[$h\� ��
��0[g��Y%�	�Y� ��#7�\%��Y��\�"��
!��Y
"���Y!g�Y� �@��)8"���Y""��	�Y�@�@�
.��Y(��Y'\� ��w��Y)��Y

Ch8�"��.���Y
	���\#���K�b��.W�Y.��YCh8�@��Ch8	���\Ch8���?	W��\	G��\
����6����_#�
���6
����6���
 ���
w�8����
����	w�8��h6����	����� h6���\� ��@����0���0�����g2@\6@�7��L�����W���[2@�����0��������6�����6�/@������70AL�!��P,@������70AL�!��P(@�������m[��m[���\� ���������2	���2����	
�G\ ��G�P�0Y\	�������	�Q7�\���Y����	���Y���Y	w��Y�������Y��8���Y����W\w�8G\����������l6@�������i6�@��m6���� ����m7� �� ����Y��k[������� �������Y���YwH\� ��G���[��k[�\� ���K[!��P7(\����(8��8��<����7G\G\gG\���� ��'��� �g�\ ���G�H�' ����G�H �����W�P �GX\��� ����@��P����P�P�P������L�P�P����P����@� �����LW����Q���?#g��'��'�O�"�ğ'N70[#7�O�GB��#7N�mK##'0[����#cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�`�g��\����	W��L*7�\	'��K1 ��_*�\?X8?X8� ����Q)
���\�Q=@��^&
���\()�?X9
&�b0���� &�?X9 �B0�	�W YL� ��$g X\� X\'&@X8� �@�		��\$�?X8�?X8�@��$�?Y8�?Y8	@X8�"�@�	@@X8	�@h9	Ah8� �@�$	�Y
	��Y	G�Y� ��	�@h9	�Y	�Y� ��$7h\
���Y�	�Y� ��-g��Ywh\Wh\� ��$$�	�Q%��Yh\� ��-��Y.��Q"�
�Q� �`�!��Q+$�h\*$�h\�@��+%'��Y&�?X8��0ر"�@�&��0� �"0��b0��"���'��0�'��0�$gh\���@�$wh\%7�Y,%G�Y�`��%W��Y74��"���Y�"�@�"���Y"��Y"��Y�"��$�h\%�?Y8
$�h\��@�$�h\$�h\4���"��%��Y$7Y\%�Y�@��%'��Y%7�Y	"G��Y�"�@�"7Y\
"W��Y
"g��Y�@��"w�Y)�?X8	���Y�"�@���Y
���Y��Y� �@����Y+��Y��Y�"�@�*.�h\
.�h\.�h\� ����Y.�h\-�?Y8�"�@�*-��Y+-��Y,-���Y���))@X8--g�Y&W�0�� �@�..GY\
 WA0�Wa0ر"�@�&�"0�'WA0� ��0ز"�����0�&'��0�/'��L�`��.!�Y\74��$�h\�"�@�$�h\	$�h\$h\�"�@�$'h\$7h\
%��Y� ��
/�)8%��Y$�h\�b��%W��Y4��%��Y�"��
"���Y/'@�8"��Y� ��%G��Y

'(8"��Y� ���%��	�Y7�[	7�[� ��#��[ #��["g�Y� ���"w��Y7([7� [� �`�w�Yg�YG�Y� ��W�Y	� �\#��[� �@�
#�)8!���Y!G��Y�"���#0[ #��[
��[� ��
��[##0[$h\� ��
��0[g��Y%�	�Y� ��#7�\%��Y��\�"��
!��Y
"���Y!g�Y� �@��)8"���Y""��	�Y�@�@�
.��Y(��Y'\� ��w��Y)��Y

Ch8�"��.���Y
	���\#���K�b��.W�Y.��YCh8�@��Ch8	���\Ch8���?	W��\	G��\
����6����_#�
���6
����6���
 ���
w�8����
����	w�8��h6����	����� h6���\� ��@����0���0�����g2@\6@�7��L�����W���[2@�����0��������6�����6�/@������70AL�!��P,@������70AL�!��P(@�������m[��m[���\� ���������2	���2����	
�G\ ��G�P�0Y\	�������	�Q7�\���Y����	���Y���Y	w��Y�������Y��8���Y����W\w�8G\����������l6@�������i6�@��m6���� ����m7� �� ����Y��k[������� �������Y���YwH\� ��G���[��k[�\� ���K[!��P7(\����(8��8��<����7G\G\gG\���� ��'��� �g�\ ���G�H�' ����G�H �����W�P �GX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@z`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@r`�w��\� ��	'��L*�\�)81���*'�\
W��L	�)8� ��?
'��K		��8?X8�`��?X8(8
��[� ����[��Q	'�[�@���Q
7��\��[1 ��
w��\
��0[��[� ���	'�[	'� [
	'([�&�@���[	��[�W YL� ��0[�)8
� �\� ��?��0[��\W X\�@��
��\
g X\	�@h9� ���?X8'�\Ah8� �@�
@X8
�?X8@@X8� ��w��Y�?Y8
�?Y8� ��0�?X8g��Y��Y� ��(���Y
��Y
��Y� ��
��Y��Y�?X9�B�@�
�?X9�/ ��\�"�@�
�/ �g�/ ��/ ر"�@�
g�/ �g�/ �	@X8�$��	g�/ ��h\�@h9� ���h\�h\
�h\� ���Q��Y��Y� ��
�Q�Q

�
�Q� �@�'4��'h\Gh\� �@��?X87	�Y�?Y8� �@�7��Y	�/ �G�/ ��"�@�
G�/ �G�/ ���Y�@��'Y\@X8'�/ ���@�74��W�	�Y
'�/ ����Y\�
�Y	G�/ ر"���'�/ �	'�/ ���6�`��
�h\74����Y� �@���
�Y�h\�h\�b����
�Y4��7�Y�`�@��?Y8w
�Y7�Y��@�
7Y\W
�Y�O �� ��4���Y7Y\����'\w��Y��L���@�g��Y
�O ��L� �@��w�O �	�O ر"�@�g�O �
g�O �g�O ��"�@t	g�O �G�O �	���\�B��'�O � ��
G�O ر"�@�
'�O �G�O �'�O ز"���	G�O �		'�O ��4������Wh\�4��w�
�Y��B�g�
�Y�h\g4��� ���h\��Y���Y�����
�Y�h\���Y� �@�'4��'��Y���Y� �@�
�h\7�Y��Y� �`�g��YG�Y	��Y�@����LG��Y�L�@�t	��Y		�w		���\���	 ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@�7��L�����g�<�[2@�����0��������6�����6�/@������70AL�!��P,@������70AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���GH�' ����GH �����W�P �WX\��� ����@��P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�Z`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�R`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([� ��G	�Y�)8�h\� ��4��g�	�Y	w�[� �����\
�?Y8w�[�&��w�[w�['��Y� �����Y
�Y\G�0[� ��w0[��Y���Y� ��
�Y\7�\��\�����)8

��Y'\��@X

�w����K	���\� ��� ���� ��
w�8
����	w�8������h6	����� h6�������\@����0�������0�g2@\6@����7��L�W���[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[��m[� �����\�������2����	���2	
�����G\G�P�0Y\� ��	���	�Q7�\�������Y	���Y���Y� ��	w��Y���Y��8�������YW\w�8����G\������l6�����@���i6�@������m6 ����m7���� ����Y� ����k[���� �� ���Y���Y����wH\G���[��k[�����\�K[!��P����7(\(8��8������<7G\G\���gG\ �����' �g�\���� �G�H�'��� �G�H �� ����W�P ����GX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�l`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�d`�w��\� ���W YL*'�\*�\����?X8?X8�Q�@����Q
g��\W��L=@��^
7��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@���\	@X8@@X8���@�0�@h9Ah8
�	�Y�*��W�	�Yg�	�Y�	�Y�*��	G	�Y		7	�Y
�h\�(����Y�@h9G��Y� ��h\	�h\��Q� ��Wh\'�Yw�Y� ����Q��Q	@X8� �`��
�Q�h\�h\�B����Y
�?X8
��/0ر"�@���/0���/0�
��/0ر"���	��/0�	��/0���Y� �@�74����Y
�?Y8�"���h\�h\��
�Y�"��@X8
�Y\�Y����?X8��Y�/0��b���Y\4��'�
�Y� �@�7��YG��YW��Y�"�@��/0�
�/0���/0�� �@�g�
�Y�?Y8w��Y�"�@�gh\�Y\h\�b@�	�/0���/0����Y�"6��

��/0�	��/0���Y�����Y\'��L�)8�`��	(8W4���h\�����h\@�8'�[�"��G�Y'�[W��Y� ��74���h\�h\�"���'�Y'
(['� [� ��7�
�Y��[�)8�`����Y4��g�Y� ���g 
�\��[	��[� ��w�Y�	�Y���Y� �@���Y	�
0[��[� � ����Y���Y�Y� ���W
0[��[��[�@�@����Y���Y'�\� ���)8�
0[��Y��H�w'\	���\�@��g��\�w	���\� ��g\@�6��L���?�L��L �������L ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�Z`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�R`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([� ��G	�Y�)8�h\� ��4��g�	�Y	w�[� �����\
�?Y8w�[�&��w�[w�['��Y� �����Y
�Y\G�0[� ��w0[��Y���Y� ��
�Y\7�\��\�����)8

��Y'\��@X

�w����K	���\� ��� ���� ��
w�8
����	w�8������h6	����� h6�������\@����0�������0�g2@\6@����7��L�W���[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[��m[� �����\�������2����	���2	
�����G\G�P�0Y\� ��	���	�Q7�\�������Y	���Y���Y� ��	w��Y���Y��8�������YW\w�8����G\������l6�����@���i6�@������m6 ����m7���� ����Y� ����k[���� �� ���Y���Y����wH\G���[��k[�����\�K[!��P����7(\(8��8������<7G\G\���gG\ �����' �g�\���� �G�H�'��� �G�H �� ����W�P ����GX\ ����@� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�o`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�g`�g��\����W��L*��\'��K1 ��_*�\?X8?X8� ����Q
���\�Q=@��^

���\$�?X9
G�/0��B��
��/0�
�?X9
G�/0ؿ$��
��/0��W YL� X\� ��� X\&
@X8'@X8� ����\�?X8�?X8�@�� �?Y8�?Y8@X8�"��@@X8�@h9Ah8�,�@�	�Y���Y���Y� ��(�@h9	�Y	�Y� ��Gh\�	�Yg�	�Y� ���	�Ywh\ Wh\� ��
�Q��Yg��Y� ��
�?X8�?X8h\������Q  �
�Q��Q� �@�4��7h\!'h\�"�@�"���Y��/0�#gh\��@�wh\!���Y4�����#G��Y%W��Y&��/0ر"�@�
G�/0�
G�/0�

w�/0��"�@�
w�/0�G�/0�&G�/0ز"�@�G�/0�&G�/0�w�/0رd��&w�/0�w4��# '��Y���@��?Y8 7��Y'Y\�b��wh\W4���h\� �@� 'Y\! ���Y	���Y� ���h\
���Y ��Y� ��G��Y#'��LW��Y���gh\���Y74���"����Y��Y#�)8���� 7��Y���Y#@�8� ��#�)8 '��Y(8�&����[�� [
#��[����#��[%��[
#�0[����#��[��[#��([� ���#g �\��[#��[� �`�W0[G��Y g��Y� ��W�	�Y w��Y70[� �@�
��\
Gh\�?Y8� �@�Wh\���Y���Y�"�@���\
�YGY\�`����Y�)8
G�Y�"��GY\W�Yg\�"��w�Yw�
�Y�\�@��@�6�w�w�@�	w��\��L	W��\� ��?�L��L�(8� ���(8�LwG\�@��WG\ �� ������
w�8
����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P
�G\G�P����?�0Y\	���	�Q����7�\���Y	���Y�������Y	w��Y���Y� ����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P7(\(8������8��<7G\���G\gG\ ����' ����g�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?
g��'��'�O����'N
7�OW0[�G"��
7�N�mK

G�0[����
cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@\`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@T`�g��\����W��L*�\'��K1 ��_*��\?X8?X8��D�Q�W YL
7��\�`����Q
G��\��\�"���@h9� X\Ah8� ��@X8	@@X8
�?X9� �@�� X\�?X8�?X9� �@��?Y8�� �
�� �� �@��?X8���Y���Y� ���?X8�?Y8@X8� �����Y7�YGh\� ���@h9���Yg��Y� ��W�Y�?X8�Q�"�@��� ��� �
�� ر"�@��� �� �W�Y�"�@�
� ��� �W��Y�"�@�	� �� �@X8� @�7�Y�� ��� ر"�@�

�� ��� ��� �����h\�h\��Q�����h\�
�Q�Q�����4��Gh\7�Y�`��'��Lw4��'�	�Y�����h\�)8�Y����?Y8(8G4��� �@���YGh\Y\� ��'h\W��YY\����	�8	���Y'4��� ��7�Y
G�[
G�[�"��w	�Y
���Y��Y� ��	�?Y8�h\
G�0[�����[
�)84��� ��g��Y�Y\w�Y�&�����[�� [��([�&���
W�[
W�[W�[�"��wY\��Y��Y� ��W�[

�0[ 	�\� ��g�YW0[��\�@���)8�w��\ ��	g��\'\����K������(8gG\��� ���w�8��������	w�8��h6����	����� h6���\� ��@����0���0�����g2@\6@�7��L�����W���[2@�����0��������6�����6�/@������70AL�!��P,@������70AL�!��P(@�������m[��m[���\� ���������2	���2����	�G\ ��G�P�0Y\	�������	�Q7�\���Y����	���Y���Y	w��Y�������Y���8���Y����W\w�8G\����������l6@�������i6�@��m6���� ����m7� �� ����Y��k[������� �������Y���YwH\� ��G���[��k[�\� ���K[!��P7(\����(8��8��<����7G\G\gG\���� ��'��� �g�\ ���G�H�' ����G�H �����W�P �GX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�{`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�s`�w��\� ��	'��L*�\�)81���*'�\
W��L	�)8� ��?
'��K		��8?X8�`��?X8(8
��[� ����[��Q	'�[�@���Q
7��\��[1 ��
w��\
��0[��[� ���	'�[	'� [
	'([�&�@���[	��[�W YL� ��0[�)8
� �\� ��?��0[��\W X\�@��
��\
g X\	�@h9� ���?X8'�\Ah8� �@�
@X8
�?X8@@X8� ��w��Y�?Y8
�?Y8� ��0�?X8g��Y��Y� ��(���Y
��Y
��Y� ��
��Y��Y�?X9�B�@�
�?X9�/ ��\�"�@�
�/ �g�/ ��/ ر"�@�
g�/ �g�/ �	@X8�$��	g�/ ��h\�@h9� ���h\�h\
�h\� ���Q��Y��Y� ��
�Q�Q

�
�Q� �@�'4��'h\Gh\� �@��?X87	�Y�?Y8� �@�7��Y	�/ �G�/ ��"�@�
G�/ �G�/ ���Y�@��'Y\@X8'�/ ���@�74��W�	�Y
'�/ ����Y\�
�Y	G�/ ر"���'�/ �	'�/ ���6�`��
�h\74����Y� �@���
�Y�h\�h\�b����
�Y4��7�Y�`�@��?Y8w
�Y7�Y��@�
7Y\W
�Y�O �� ��4���Y7Y\����'\w��Y��L���@�g��Y
�O ��L� �@�Ch8�O �	�O ر"�@�g�O �
g�O �g�O ��"�@�	g�O �G�O �	���\�@6@��6'�O � ��"�@�
G�O �
'�O �G�O رB�@�'�O �	G�O �		'�O �� ���4��Wh\�4�� ��_w�
�Yg�
�Y�h\�@��w4���h\��Y������Y�
�Y���Y� ��74���h\'��Y� ��
�h\���Y7�Y� � �g��Y��YG�Y� ����L	��YG��Y�����L	��Y		Ch8@��		���\	�6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �GH����' �GH�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�[`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�S`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([���G	�Y�h\4��� ��g�	�Y�)8	w�[� �@����\
�?Y8'��Y� �����Yw�[w�[� ��w�[
�Y\��Y�A�����YG�0[w0[� ��
�Y\7�\

��Y������\�)8

Ch8 ��
	���\'\����K�@���
�6 ������
w�8
����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P
�G\G�P����?�0Y\	���	�Q����7�\���Y	���Y�������Y	w��Y���Y� ����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P7(\(8������8��<7G\���G\gG\ ����' ����g�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@n`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@f`�w��\� ���W YL*'�\*�\����?X8?X8�Q�@����Q
g��\W��L=@��^
7��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@���\	@X8@@X8���@�0�@h9Ah8
�	�Y�*��W�	�Yg�	�Y�	�Y�*��	G	�Y		7	�Y
�h\�(����Y�@h9G��Y� ��h\	�h\��Q� ��Wh\'�Yw�Y� ����Q��Q	@X8� �`��
�Q�h\�h\�B����Y
�?X8
��/0ر"�@���/0���/0�
��/0ر"���	��/0�	��/0���Y� �@�74����Y
�?Y8�"���h\�h\��
�Y�"��@X8
�Y\�Y����?X8��Y�/0��b���Y\4��'�
�Y� �@�7��YG��YW��Y�"�@��/0�
�/0���/0�� �@�g�
�Y�?Y8w��Y�"�@�gh\�Y\h\�b@�	�/0���/0����Y�"6��

��/0�	��/0���Y�����Y\'��L�)8�`��	(8W4���h\�����h\@�8'�[�"��G�Y'�[W��Y� ��74���h\�h\�"���'�Y'
(['� [� ��7�
�Y��[�)8�`����Y4��g�Y� ���g 
�\��[	��[� ��w�Y�	�Y���Y� �@���Y	�
0[��[� � ����Y���Y�Y� ���W
0[��[��[�@�@����Y���Y'�\� ���)8�
0[��Y���Ch8'\	���\�@�Lg��\Ch8	���\� ��g\@�6��L� �@��6�L��L�@���L�6 ����� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@�7��L�����g�<�[2@�����0��������6�����6�/@������70AL�!��P,@������70AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���GH�' ����GH �����W�P �WX\��� ����@��P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�[`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�S`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([���G	�Y�h\4��� ��g�	�Y�)8	w�[� �@����\
�?Y8'��Y� �����Yw�[w�[� ��w�[
�Y\��Y�A�����YG�0[w0[� ��
�Y\7�\

��Y������\�)8

Ch8 ��
	���\'\����K�@���
�6 ������
w�8
����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P
�G\G�P����?�0Y\	���	�Q����7�\���Y	���Y�������Y	w��Y���Y� ����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P7(\(8������8��<7G\���G\gG\ ����' ����g�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�{`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�s`�w��\� ��	'��L*�\�)81���*'�\
W��L	�)8� ��?
'��K		��8?X8�`��?X8(8
��[� ����[��Q	'�[�@���Q
7��\��[1 ��
w��\
��0[��[� ���	'�[	'� [
	'([�&�@���[	��[�W YL� ��0[�)8
� �\� ��?��0[��\W X\�@��
��\
g X\	�@h9� ���?X8'�\Ah8� �@�
@X8
�?X8@@X8� ��w��Y�?Y8
�?Y8� ��0�?X8g��Y��Y� ��(���Y
��Y
��Y� ��
��Y��Y�?X9�B�@�
�?X9�/ ��\�"�@�
�/ �g�/ ��/ ر"�@�
g�/ �g�/ �	@X8�$��	g�/ ��h\�@h9� ���h\�h\
�h\� ���Q��Y��Y� ��
�Q�Q

�
�Q� �@�'4��'h\Gh\� �@��?X87	�Y�?Y8� �@�7��Y	�/ �G�/ ��"�@�
G�/ �G�/ ���Y�@��'Y\@X8'�/ ���@�74��W�	�Y
'�/ ����Y\�
�Y	G�/ ر"���'�/ �	'�/ ���6�`��
�h\74����Y� �@���
�Y�h\�h\�b����
�Y4��7�Y�`�@��?Y8w
�Y7�Y��@�
7Y\W
�Y�O �� ��4���Y7Y\����'\w��Y��L���@�g��Y
�O ��L� �@�Ch8�O �	�O ر"�@�g�O �
g�O �g�O ��"�@�	g�O �G�O �	���\�@6@��6'�O � ��"�@�
G�O �
'�O �G�O رB�@�'�O �	G�O �		'�O �� ���4��Wh\�4�� ��_w�
�Yg�
�Y�h\�@��w4���h\��Y������Y�
�Y���Y� ��74���h\'��Y� ��
�h\���Y7�Y� � �g��Y��YG�Y� ����L	��YG��Y�����L	��Y		Ch8@��		���\	�6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �GH����' �GH�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�[`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�S`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([���G	�Y�h\4��� ��g�	�Y�)8	w�[� �@����\
�?Y8'��Y� �����Yw�[w�[� ��w�[
�Y\��Y�A�����YG�0[w0[� ��
�Y\7�\

��Y������\�)8

Ch8 ��
	���\'\����K�@���
�6 ������
w�8
����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P
�G\G�P����?�0Y\	���	�Q����7�\���Y	���Y�������Y	w��Y���Y� ����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P7(\(8������8��<7G\���G\gG\ ����' ����g�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NG�0[7�O�GB��7�N�mK7�0[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�v`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�n`�w��\����W��L*�\'��K1 ��_*'�\?X8?X8� ���Q
g��\��Q=@��^
W��\�?X9��/ ���@��?X9�?X8��/ ��@�@�@X8��/ �
��/ �� ���W YL� X\� X\� ���?X8��\�?X8� ��
�?X8�?Y8	�?Y8�"�@�@X8@@X8�@h9� ���(Ah8�	�Y��
�Y�"�� ��
�Y�@h9g	�Y� �@� �	�Y�h\g��Y���
w��Y�	�Y��/ ر"�@���/ ���/ ��Q�(���g�	�Y��/ �

�	�Y� ��Gh\	'h\h\� ��
�Q			�Q�Q� �@�g4��Gh\�?Y8������YGY\G4���������YGY\���Y�"�@���O ���O ���O ر"�@���/ ���/ �
��/ ��"�����O ���/ �g4������h\w�YG4��������Yg��Ygh\�`��
G��Y'4���h\� �A��h\�Y'��Y�#����Y��Y@X8����W�Y
7	�Y	�
�Y�"�@���O ���O ���O ��"�@���O ���O ���O ��"�@���O ���O ��/ ��"�@��O ��/ ��O ز"�@��/ ��O ��/ ؿ$���O �
�?Y8G4��"���h\�Ygh\� �����YGh\'��L� �@�'4����YW�Y�`��'��Y�)8���Y���7
�Y'(84���"�����Y'��8w	�Y�`�@�7�[7�[�[�"��'h\Y\w�
0[� �@��[�[� [� ��
��	�Y�h\
�)8� ���	�Y\
�[�([� ��	��Y��	�Y	
�[� ��
�[��Y'�0[�@��
G�
�\7�Y

��0[����w�\	���\�!��w�Y�)8
���\� ��w�Y7\���K� ���w	w��\��� ��6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�Z`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�R`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([� ��G	�Y�)8�h\� ��4��g�	�Y	w�[� �����\
�?Y8w�[�&��w�[w�['��Y� �����Y
�Y\G�0[� ��w0[��Y���Y� ��
�Y\7�\��\�����)8

��Y'\��@X

�w����K	���\� ��� ���� ��
w�8
����	w�8������h6	����� h6�������\@����0�������0�g2@\6@����7��L�W���[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[��m[� �����\�������2����	���2	
�����G\G�P�0Y\� ��	���	�Q7�\�������Y	���Y���Y� ��	w��Y���Y��8�������YW\w�8����G\������l6�����@���i6�@������m6 ����m7���� ����Y� ����k[���� �� ���Y���Y����wH\G���[��k[�����\�K[!��P����7(\(8��8������<7G\G\���gG\ �����' �g�\���� �G�H�'��� �G�H �� ����W�P ����GX\ ����@� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�i`�w��\��D<*�L*G�LG�P���G�\��Qw��Y����
W��Y���Yw�Y����?@�@�a`�w��\� ��	�W YL*g�\*�\����?X8?X8��Q�@���Q
G��\W��L=@��^
'��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�	�@h9	Ah8'�
�Y�*�@�	7�Y	'�Y		�@h9�*����
�Y'�
�YGh\�(�����Y��YG�
�Y� ���h\7h\
�Q�(�����Y��Y	�?X8� �@���Q'h\�	�Q� �`�	�Q�h\�h\�"�@���Y	��/0���/0ر"�@���/0�	��/0�
@X8�"���
��/0�
��/0����Y� ���?Y8�Y\74���"���
�Y�?Y8�h\�`�@����Y�h\�Y\�b��
�Y��Y�Y\�a��
'��Y7��YG��Y��@��?X8
g��YG�/0ر"��G�/0�w��Y@X8�@�@�W��Y
�h\h\�"�@����Y	G�/0���/0ر"�@���/0�
G�/0�	��/0ر$��

��/0���Y�Y\� �@�'��L�)8g4���"��'h\7h\'@�8�"����Y�	�Yg�[� ��G4��	��Y'(8� ���g�([g�[g� [����h\h\W �\� �@���[��[���Y� �@�4��g�Yw��Y� ��G��Y
�)8'0[� �@�W�Y��Y���Y� ��7��Y7�[7�[� �`�W��Y
��Y���Y�&��
7�[
7�[��0[�B��
���Y��Y
��0[�"��g�\�)8

�w�@���w
	���\W�\ ��	G��\'\���K�@���6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�G\G�P����?	�0Y\
���
��Q����7�\
��Y���Y����
	���Y

��Y	���Y� �����8
��YW\����w�8	G\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\G���[������k[	�\��K[����!��P7(\(8������8��<7G\���G\wG\ ����' ����w�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�Z`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�R`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([� ��G	�Y�)8�h\� ��4��g�	�Y	w�[� �����\
�?Y8w�[�&��w�[w�['��Y� �����Y
�Y\G�0[� ��w0[��Y���Y� ��
�Y\7�\��\�����)8

��Y'\��@X

�w����K	���\� ��� ���� ��
w�8
����	w�8������h6	����� h6�������\@����0�������0�g2@\6@����7��L�W���[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[��m[� �����\�������2����	���2	
�����G\G�P�0Y\� ��	���	�Q7�\�������Y	���Y���Y� ��	w��Y���Y��8�������YW\w�8����G\������l6�����@���i6�@������m6 ����m7���� ����Y� ����k[���� �� ���Y���Y����wH\G���[��k[�����\�K[!��P����7(\(8��8������<7G\G\���gG\ �����' �g�\���� �G�H�'��� �G�H �� ����W�P ����GX\ ����@� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�m`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�e`�g��\����W��L*��\'��K1 ��_*�\?X8?X8� ����Q
���\�Q=@��^

���\$�?X9
G�/0��B��
��/0�
�?X9
G�/0ؿ$��
��/0��W YL� X\� ��� X\&
@X8'@X8� ����\�?X8�?X8�@�� �?Y8�?Y8@X8�"��@@X8�@h9Ah8�,�@�	�Y���Y���Y� ��(�@h9	�Y	�Y� ��Gh\�	�Yg�	�Y� ���	�Ywh\ Wh\� ��
�Q��Yg��Y� ��
�?X8�?X8h\������Q  �
�Q��Q� �@�4��7h\!'h\�"�@�"���Y��/0�#gh\��@�wh\!���Y4�����#G��Y%W��Y&��/0ر"�@�
G�/0�
G�/0�

w�/0��"�@�
w�/0�G�/0�&G�/0ز"�@�G�/0�&G�/0�w�/0رd��&w�/0�g4��# '��Y���@��?Y8 7��Y'Y\��@�$wh\ 'Y\G4��� ��! ���Y	��Y ��Y�"��W��Y'��LG��Y� �@�#gh\��Y��Y� ��74�� 7��Y�)8� ��Gh\���Y#�h\� ���h\'(8�Y� ��'@�8���Y
��Y� ��� '��Y��[%��[���7�[#7�[4��� ��W�	�YG��Y w��Y�"��W0[Wh\
70[� ����[ g��Y��[�&���� [��YG�Y� ��!�([�?Y8���Y� �����YW�Y�)8� ��
��\�Y\!' �\� ����)87�[7�[� ���Y\7�0[w�
�Y� ����YG�\�w�`���w	W��\7\1���	���\
���K
�������(8�(8WG\�@���G\6 ������
w�8
����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P
�G\G�P����?�0Y\	���	�Q����7�\���Y	���Y�������Y	w��Y���Y� ����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P7(\(8������8��<7G\���G\gG\ ����' ����g�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P� �����LW����Q���?
g��'��'�O����'N
7�OW0[�G"��
7�N�mK

G�0[����
cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@\`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@T`�g��\����W��L*�\'��K1 ��_*��\?X8?X8��D�Q�W YL
7��\�`����Q
G��\��\�"���@h9� X\Ah8� ��@X8	@@X8
�?X9� �@�� X\�?X8�?X9� �@��?Y8�� �
�� �� �@��?X8���Y���Y� ���?X8�?Y8@X8� �����Y7�YGh\� ���@h9���Yg��Y� ��W�Y�?X8�Q�"�@��� ��� �
�� ر"�@��� �� �W�Y�"�@�
� ��� �W��Y�"�@�	� �� �@X8� @�7�Y�� ��� ر"�@�

�� ��� ��� �����h\�h\��Q�����h\�
�Q�Q�����4��Gh\7�Y�`��'��Lw4��'�	�Y�����h\�)8�Y����?Y8(8G4��� �@���YGh\Y\� ��'h\W��YY\����	�8	���Y'4��� ��7�Y
G�[
G�[�"��w	�Y
���Y��Y� ��	�?Y8�h\
G�0[�����[
�)84��� ��g��Y�Y\w�Y�&�����[�� [��([�&���
W�[
W�[W�[�"��wY\��Y��Y� ��W�[

�0[ 	�\� ��g�YW0[��\�@���)8�w��\ ��	g��\'\����K������(8gG\��� ���w�8��������	w�8��h6����	����� h6���\� ��@����0���0�����g2@\6@�7��L�����W���[2@�����0��������6�����6�/@������70AL�!��P,@������70AL�!��P(@�������m[��m[���\� ���������2	���2����	�G\ ��G�P�0Y\	�������	�Q7�\���Y����	���Y���Y	w��Y�������Y���8���Y����W\w�8G\����������l6@�������i6�@��m6���� ����m7� �� ����Y��k[������� �������Y���YwH\� ��G���[��k[�\� ���K[!��P7(\����(8��8��<����7G\G\gG\���� ��'��� �g�\ ���G�H�' ����G�H �����W�P �GX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7�N�mK'�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�x`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�p`�g��\� ���W YL*��\*�\����?X8?X8��Q�@���Q
g��\W��L=@��^
7��\�?X9��/ ���@��?X9�?X8
��/ ��@�@�@X8��/ ���/ �� ��'��K� X\� X\�"�@��?X8	�?Y8
�?X8�����?Y8��\@X8�"�@�@@X8�@h9Ah8� ��(��
�YG�
�Y��
�Y�"����
�Y�@h9g�
�Y� ���h\g��Y	'h\� ��
w��Y��
�YGh\� �@���Q��/ ���/ ر����/ �g�	�Y��/ �� ��			�Q
�Qh\�����?X8

�	�Y�Q� ��g4��Wh\�?Y8��@�
�	�YWY\G4��������YWY\���Y�"�@���O ���O ���O ر"�@�
��/ ���/ ���/ ��"�����O ���/ �w4������h\w��YW4��������Yg�
�Y�h\�`��
W�
�Y'4���h\� �@��h\'�
�Y�Y��@���O ��
�Y4�����@����Y��O �G��Y�"�@���O ���O ���O ر����O �
7�Y��O ��B����O �@X8	��Y�"�@��/ ��O ��/ رB�@��O ��/ ��O ز���?�/ ��O �
�?Y8�`��Y\�4���h\����Y'��Lg4�����@����Y'h\G��Y� ���)8W	�Ygh\� ��G4���h\�)8� ��'(87�
�Y'��8� �����Yw	�Y�[�b�@��[��[wh\�`��	0[�� [
���Y�"��
	Y\
��[��[� ��	G��Y	��[
��0[� ����([	��Y��[� ����[��\���Y�������Y�0[���Y� ��w�YG�\��\� �@��)8
��Y
w
�Y� ��'\���KCh8�`��Ch8	���\�1 ��_	w��\�6�6� ��6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�g2@\6@����7��L�W���[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����G\G�P	�0Y\� ��
���
��Q7�\�������Y
���Y	���Y� ��
w�Y		���Y���8������YW\w�8����
G\
������l6�����@�
��i6�@�����
�m6 �
���m7���� ���Y� ��
��k[���
� �� ��Y��Y����wH\G���[��k[����
�\��K[!��P����7(\(8��8������<7G\G\���gG\ �����' �g�\���� �G�H�'��� �G�H �� ����W�P ����GX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�[`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�S`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([���G	�Y�h\4��� ��g�	�Y�)8	w�[� �@����\
�?Y8'��Y� �����Yw�[w�[� ��w�[
�Y\��Y�A�����YG�0[w0[� ��
�Y\7�\

��Y������\�)8

Ch8 ��
	���\'\����K�@���
�6 ������
w�8
����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P
�G\G�P����?�0Y\	���	�Q����7�\���Y	���Y�������Y	w��Y���Y� ����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P7(\(8������8��<7G\���G\gG\ ����' ����g�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Y���Y����?@�@j`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Y���Y����?@�@b`�g��\� ��	�W YL*w�\*�\����?X8?X8��Q�@���Q
G��\W��L=@��^
7��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�0	�@h9	Ah8'��Y�*��
	��	�Y	g�	�Y'��Y�*��G��Y7��Y�h\� ��		�@h9���Y��Y� ��7h\h\�Q� ��Wh\'��Y��Y�@���	�Q��Q�
�Q����	�h\�h\���Y���	�?X8
@X8	��/0ر"�@���/0���/0�	��/0��"���
��/0�
��/0��
�Y����@X8�?Y8�Y\� ��'4�����Y�h\� �@��?Y8�h\�
�Y�"��
�Y�Y\��Y�!�@�
'��Y�Y\7��Y�"�@�
g��Yw��YG��Y�@���?X8W��Y
�h\� �@�h\G�/0�G�/0���@�	G�/0����Y��/0��"�@�
G�/0���/0�	��/0ر$��

��/0���Y'��L����Y\�)8'��8� ��	w�['(874��� �@�h\h\
�h\�"�@�'�Y7��Yw�[� ��4��w��Y��Y�&��w� [w�	([�	�Y� ��'�[���YG��Y� ��	� �\�h\'�[� ���)8W��Yg�[�"��G��Yg�	�Y0[�`��g�[W�Y��	�Y�&�`�g�[���Yg�[�@���0[
��	�Y��Y� �@�g0[w�\�)8�@��
�
�YCh8��\@��	���\

Ch8
	���\���@�'\���K������6
�66��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����g2@\6@�7��L�����W���[2@�����0��������6�����6�/@������70AL�!��P,@������70AL�!��P(@�������m[	��m[���\� ���������2	���2����	�G\ ��G�P	�0Y\
�������
��Q7�\
��Y�������Y
	���Y

��Y����	���Y���8
��Y����W\w�8	G\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��G���[��k[	�\� ����K[!��P7(\����(8��8��<����7G\G\gG\���� ��'��� �g�\ ���G�H�' ����G�H �����W�P �GX\��� ����@��P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�[`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�S`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([���G	�Y�h\4��� ��g�	�Y�)8	w�[� �@����\
�?Y8'��Y� �����Yw�[w�[� ��w�[
�Y\��Y�A�����YG�0[w0[� ��
�Y\7�\

��Y������\�)8

Ch8 ��
	���\'\����K�@���
�6 ������
w�8
����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P
�G\G�P����?�0Y\	���	�Q����7�\���Y	���Y�������Y	w��Y���Y� ����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P7(\(8������8��<7G\���G\gG\ ����' ����g�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7�N�mK'�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�x`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�p`�g��\� ���W YL*��\*�\����?X8?X8��Q�@���Q
g��\W��L=@��^
7��\�?X9��/ ���@��?X9�?X8
��/ ��@�@�@X8��/ ���/ �� ��'��K� X\� X\�"�@��?X8	�?Y8
�?X8�����?Y8��\@X8�"�@�@@X8�@h9Ah8� ��(��
�YG�
�Y��
�Y�"����
�Y�@h9g�
�Y� ���h\g��Y	'h\� ��
w��Y��
�YGh\� �@���Q��/ ���/ ر����/ �g�	�Y��/ �� ��			�Q
�Qh\�����?X8

�	�Y�Q� ��g4��Wh\�?Y8��@�
�	�YWY\G4��������YWY\���Y�"�@���O ���O ���O ر"�@�
��/ ���/ ���/ ��"�����O ���/ �w4������h\w��YW4��������Yg�
�Y�h\�`��
W�
�Y'4���h\� �@��h\'�
�Y�Y��@���O ��
�Y4�����@����Y��O �G��Y�"�@���O ���O ���O ر����O �
7�Y��O ��B����O �@X8	��Y�"�@��/ ��O ��/ رB�@��O ��/ ��O ز���?�/ ��O �
�?Y8�`��Y\�4���h\����Y'��Lg4�����@����Y'h\G��Y� ���)8W	�Ygh\� ��G4���h\�)8� ��'(87�
�Y'��8� �����Yw	�Y�[�b�@��[��[wh\�`��	0[�� [
���Y�"��
	Y\
��[��[� ��	G��Y	��[
��0[� ����([	��Y��[� ����[��\���Y�������Y�0[���Y� ��w�YG�\��\� �@��)8
��Y
w
�Y� ��'\���KCh8�`��Ch8	���\�1 ��_	w��\�6�6� ��6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�g2@\6@����7��L�W���[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����G\G�P	�0Y\� ��
���
��Q7�\�������Y
���Y	���Y� ��
w�Y		���Y���8������YW\w�8����
G\
������l6�����@�
��i6�@�����
�m6 �
���m7���� ���Y� ��
��k[���
� �� ��Y��Y����wH\G���[��k[����
�\��K[!��P����7(\(8��8������<7G\G\���gG\ �����' �g�\���� �G�H�'��� �G�H �� ����W�P ����GX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�[`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�S`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([���G	�Y�h\4��� ��g�	�Y�)8	w�[� �@����\
�?Y8'��Y� �����Yw�[w�[� ��w�[
�Y\��Y�A�����YG�0[w0[� ��
�Y\7�\

��Y������\�)8

Ch8 ��
	���\'\����K�@���
�6 ������
w�8
����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P
�G\G�P����?�0Y\	���	�Q����7�\���Y	���Y�������Y	w��Y���Y� ����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P7(\(8������8��<7G\���G\gG\ ����' ����g�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7�N�mK'�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�x`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�p`�g��\� ���W YL*��\*�\����?X8?X8��Q�@���Q
g��\W��L=@��^
7��\�?X9��/ ���@��?X9�?X8
��/ ��@�@�@X8��/ ���/ �� ��'��K� X\� X\�"�@��?X8	�?Y8
�?X8�����?Y8��\@X8�"�@�@@X8�@h9Ah8� ��(��
�YG�
�Y��
�Y�"����
�Y�@h9g�
�Y� ���h\g��Y	'h\� ��
w��Y��
�YGh\� �@���Q��/ ���/ ر����/ �g�	�Y��/ �� ��			�Q
�Qh\�����?X8

�	�Y�Q� ��g4��Wh\�?Y8��@�
�	�YWY\G4��������YWY\���Y�"�@���O ���O ���O ر"�@�
��/ ���/ ���/ ��"�����O ���/ �w4������h\w��YW4��������Yg�
�Y�h\�`��
W�
�Y'4���h\� �@��h\'�
�Y�Y��@���O ��
�Y4�����@����Y��O �G��Y�"�@���O ���O ���O ر����O �
7�Y��O ��B����O �@X8	��Y�"�@��/ ��O ��/ رB�@��O ��/ ��O ز���?�/ ��O �
�?Y8�`��Y\�4���h\����Y'��Lg4�����@����Y'h\G��Y� ���)8W	�Ygh\� ��G4���h\�)8� ��'(87�
�Y'��8� �����Yw	�Y�[�b�@��[��[wh\�`��	0[�� [
���Y�"��
	Y\
��[��[� ��	G��Y	��[
��0[� ����([	��Y��[� ����[��\���Y�������Y�0[���Y� ��w�YG�\��\� �@��)8
��Y
w
�Y� ��'\���K�w�`���w	���\�1 ��_	w��\��� ��6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�g2@\6@����7��L�W���[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����G\G�P	�0Y\� ��
���
��Q7�\�������Y
���Y	���Y� ��
w�Y		���Y���8������YW\w�8����
G\
������l6�����@�
��i6�@�����
�m6 �
���m7���� ���Y� ��
��k[���
� �� ��Y��Y����wH\G���[��k[����
�\��K[!��P����7(\(8��8������<7G\G\���gG\ �����' �g�\���� �G�H�'��� �G�H �� ����W�P ����GX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�[`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�S`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([���G	�Y�h\4��� ��g�	�Y�)8	w�[� �@����\
�?Y8'��Y� �����Yw�[w�[� ��w�[
�Y\��Y�A�����YG�0[w0[� ��
�Y\7�\

��Y������\�)8

�w ��
	���\'\����K�@���
� ������
w�8
����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P
�G\G�P����?�0Y\	���	�Q����7�\���Y	���Y�������Y	w��Y���Y� ����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P7(\(8������8��<7G\���G\gG\ ����' ����g�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Y���Y����?@�@j`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Y���Y����?@�@b`�g��\� ��	�W YL*w�\*�\����?X8?X8��Q�@���Q
G��\W��L=@��^
7��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�0	�@h9	Ah8'��Y�*��
	��	�Y	g�	�Y'��Y�*��G��Y7��Y�h\� ��		�@h9���Y��Y� ��7h\h\�Q� ��Wh\'��Y��Y�@���	�Q��Q�
�Q����	�h\�h\���Y���	�?X8
@X8	��/0ر"�@���/0���/0�	��/0��"���
��/0�
��/0��
�Y����@X8�?Y8�Y\� ��'4�����Y�h\� �@��?Y8�h\�
�Y�"��
�Y�Y\��Y�!�@�
'��Y�Y\7��Y�"�@�
g��Yw��YG��Y�@���?X8W��Y
�h\� �@�h\G�/0�G�/0���@�	G�/0����Y��/0��"�@�
G�/0���/0�	��/0ر$��

��/0���Y'��L����Y\�)8'��8� ��	w�['(874��� �@�h\h\
�h\�"�@�'�Y7��Yw�[� ��4��w��Y��Y�&��w� [w�	([�	�Y� ��'�[���YG��Y� ��	� �\�h\'�[� ���)8W��Yg�[�"��G��Yg�	�Y0[�`��g�[W�Y��	�Y�&�`�g�[���Yg�[�@���0[
��	�Y��Y� �@�g0[w�\�)8�@��
�
�Y�w��\@��	���\

�w
	���\���@�'\���K������
�6��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����g2@\6@�7��L�����W���[2@�����0��������6�����6�/@������70AL�!��P,@������70AL�!��P(@�������m[	��m[���\� ���������2	���2����	�G\ ��G�P	�0Y\
�������
��Q7�\
��Y�������Y
	���Y

��Y����	���Y���8
��Y����W\w�8	G\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��G���[��k[	�\� ����K[!��P7(\����(8��8��<����7G\G\gG\���� ��'��� �g�\ ���G�H�' ����G�H �����W�P �GX\��� ����@��P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�[`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�S`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([���G	�Y�h\4��� ��g�	�Y�)8	w�[� �@����\
�?Y8'��Y� �����Yw�[w�[� ��w�[
�Y\��Y�A�����YG�0[w0[� ��
�Y\7�\

��Y������\�)8

�w ��
	���\'\����K�@���
� ������
w�8
����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P
�G\G�P����?�0Y\	���	�Q����7�\���Y	���Y�������Y	w��Y���Y� ����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P7(\(8������8��<7G\���G\gG\ ����' ����g�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�i`�w��\��D<*�L*G�LG�P���G�\��Qw��Y����
W��Y���Yw�Y����?@�@�a`�w��\� ��	�W YL*g�\*�\����?X8?X8��Q�@���Q
G��\W��L=@��^
'��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�	�@h9	Ah8'�
�Y�*�@�	7�Y	'�Y		�@h9�*����
�Y'�
�YGh\�(�����Y��YG�
�Y� ���h\7h\
�Q�(�����Y��Y	�?X8� �@���Q'h\�	�Q� �`�	�Q�h\�h\�"�@���Y	��/0���/0ر"�@���/0�	��/0�
@X8�"���
��/0�
��/0����Y� ���?Y8�Y\74���"���
�Y�?Y8�h\�`�@����Y�h\�Y\�b��
�Y��Y�Y\�a��
'��Y7��YG��Y��@��?X8
g��YG�/0ر"��G�/0�w��Y@X8�@�@�W��Y
�h\h\�"�@����Y	G�/0���/0ر"�@���/0�
G�/0�	��/0ر$��

��/0���Y�Y\� �@�'��L�)8g4���"��'h\7h\'@�8�"����Y�	�Yg�[� ��G4��	��Y'(8� ���g�([g�[g� [����h\h\W �\� �@���[��[���Y� �@�4��g�Yw��Y� ��G��Y
�)8'0[� �@�W�Y��Y���Y� ��7��Y7�[7�[� �`�W��Y
��Y���Y�&��
7�[
7�[��0[�B��
���Y��Y
��0[�"��g�\�)8

�w�@���w
	���\W�\ ��	G��\'\���K�@���6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�G\G�P����?	�0Y\
���
��Q����7�\
��Y���Y����
	���Y

��Y	���Y� �����8
��YW\����w�8	G\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\G���[������k[	�\��K[����!��P7(\(8������8��<7G\���G\wG\ ����' ����w�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�Z`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�R`�g��\����W��L*�\'��K1 ��_*��\?X8?X8� ���Q
7��\��Q�����W YL
G��\��\� �@�� X\�?X8@X8� ���@h9� X\�?X8�"�@�Ah8@X8@@X8� �@�(�@h9
�?X8g��Y����?X9�?Y8�� �� �� �?Y8���Y7�Y�,��(w�Yw�Yg�Y� �� W�Y
w�YW�Y����?X9�?X8�� ������ �
w�Y
�� ر"�@��� ��� �
� �����Gh\@X8�Q�"�@�� ��� �� ��"�@�� ��� �	�� ر"�@��� ��� ��� �� �@��h\wh\�h\� ���
�Q��Q�Q� ���4��Gh\�4���"��7
�Y�h\
'��L� ��'�	�Yw
�Y
�)8�����?Y8g4����Y�"���h\(8GY\� ��
'��Y
�	�8w�[��@�w�[GY\74��� ���
���Y��[
��[� �@�w0[���Y�h\� �����Y
�� [�
([� ��G	�Y�)8�h\� ��4��g�	�Y	w�[� �����\
�?Y8w�[�&��w�[w�['��Y� �����Y
�Y\G�0[� ��w0[��Y���Y� ��
�Y\7�\��\�����)8

��Y'\��@X

�w����K	���\� ��� ���� ��
w�8
����	w�8������h6	����� h6�������\@����0�������0�g2@\6@����7��L�W���[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[��m[� �����\�������2����	���2	
�����G\G�P�0Y\� ��	���	�Q7�\�������Y	���Y���Y� ��	w��Y���Y��8�������YW\w�8����G\������l6�����@���i6�@������m6 ����m7���� ����Y� ����k[���� �� ���Y���Y����wH\G���[��k[�����\�K[!��P����7(\(8��8������<7G\G\���gG\ �����' �g�\���� �G�H�'��� �G�H �� ����W�P ����GX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'Nw0[7�O�GB��7�N�mK'�0[����cK�P���D<*�L*7�LG�P���G�\�Qw�Y����	'��Y���Yw��Y����?@�@�x`�w��\��D<*�L*G�LG�P���G�\�Qw�Y����	g��Y���Yw��Y����?@�@�p`�w��\����W��L*W�\'��K1 ��_*�\?X8?X8� ���Q
g��\�Q=@��^
7��\�?X9��/ ���@��?X9�?X8��/ ��@�@�@X8��/ �
��/ �� ���W YL
� X\� X\� �@���\
�?X8�?X8�@��
�?Y8	�?Y8@X8�"�@�@@X8�@h9Ah8� �@�
G�
�Y��
�Y��
�Y� ���@h9g�
�YG�
�Y� ��
�h\w��Yg��Y� �@���
�Y��/ �7h\� �� h\

�Qw	�Y�"�@���/ ���/ ���/ �� ��	�Y�	�Q	Gh\����?X8�Q		
�Q� ��g4��
Gh\�?Y8��@����Y
GY\G4��������YGY\���Y�"�@���O ���O ���O ر"�@�
��/ ���/ ���/ ر"�����O ���/ ��4�����
�h\w�Yg4��������Yg��Ygh\�`��G�Y74��
�h\� �@�
�h\'��Y
���Y�� ���Y��Y
��O ���@�@X87��Y��O ������O �W�Y��O ر��@���O ���O �	�
�Y�"�@���O ���O ��/ ر"�@��O ��/ ��O ز"�@��/ ��O ��/ رd���O ��4��
�h\���@�
��Y
7h\���Y� ��G4��'�	�Y
wh\� ��
�h\
�?Y8W��Y� ��G�	�Y'��L���Y� ����
�Yg�	�Y�)8�#�����Y'�
�Y�)8� ��gh\'(8�Y\� �@�'��8���Yw�Y� �����Y
'�['�[�"��	7Y\
W� [�[� ���		��YW�[�0[� ��W�[
�[��Y� ��'��YW�([�[�@��Ch8	���\���\���@��0[�[W�\�@���0[Ch8�)83���	G��\w�\g\� �@�_�6���K�6� ������ ��6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g���[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����G\G�P	�0Y\� ��
���
��Q7�\����
��Y���Y
	���Y� ��

��Y	���Y���8����
��Yg\w�8����	G\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\G���[��k[����	�\��K[!��P����7(\(8��8�����<7G\G\���wG\ �����' �w�\���� �G�H�'��� �G�H �� ����W�P ����GX\ ����@�����P�P�P� �����LW����Q���?
g��'��'�O����'N
7�O'0[�G"��
7N�mK

W0[����
cK�P���D<*�L*7�LG�P���G�\��Q��Y������YW�Yg��Y����?@�@�[`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����W��Yw�Yg��Y����?@�@�S`�g��\����W��L*7�\'��K1 ��_*��\?X8?X8��D�Q�W YL
��\�`����Q
G��\��\�"���@h9� X\Ah8� ��@X8	@@X8@X8� �@�� X\�?X8�?X9� �@��?X8�� ����Y�"���?X8���Y�?Y8�"���?Y8���Yh\�"��
���Y�@h9g��Y�*��g��YW��Y@X8�B�@� ���Y
�?X9g��Y�"��
�� ��?X8�Q�"�@��� ��� �
�� ��"�@��� �� �w��Y�"�@�
� ��� �	� ��"@�� ��� ��� ر"�@�

�� ��� ��� �����h\�h\��Q�����h\�
�Q�Q� ���4��Gh\g4��� ��7�Y'��L�h\� ��'�	�YGh\�)8�����YG4��W�	�Y� ��(8��Y�?Y8� ��@�8��	�Y	
�[��@�
�[�Y\'4��� ��'h\��[
G�0[�B��	
�)8�Y\
���Y� ��
G�[
G�[��([�"�@���Yw	�Y�?Y8� � ��	�Yg��Y��[�"���h\�� [Y\� ��

�0[w�Y��Y� ���Y\	G�[	G�[� ��� 	�\��Y��\�@���)8		�0[g��Y�@�H	��\Ch8	��\���@�'\����K���@���6� ������w�8����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P�G\G�P����?�0Y\	���	�Q����'�\���Y���Y����	w�Y	���Y��Y� �����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P'(\(8������8��<'G\���G\gG\ ����' ����g�\ �GH����' �GH�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�k`�w��\��D<*�L*G�LG�P���G�\��Qw��Y����
W��Y���Yw�Y����?@�@�c`�w��\� ��	�W YL*g�\*�\����?X8?X8��Q�@���Q
���\W��L=@��^
'��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�	�@h9	Ah8	�Y�*�@�	���Y	'��Y		�@h9�(�@�	�Yw	�YGh\�(�����Y���YW	�Y� ��7h\Wh\
�Q�(�����Y���Yh\�����	�Q�
�Q�Q����	�h\�h\���Y���
�?X8	@X8
��/0ر"�@���/0���/0�
��/0��"���	��/0�	��/0��
�Y� �@�'4��h\�?Y8� �@�h\
�
�Y���Y�"�����YY\���Y�@���?Y8�?X8G�Y�"��Y\W
�Y�Y\�"�@�g
�Yw�
�Y'�Y�@��@X87��YGh\���@�Wh\�/0����Y�"�@�w�/0��/0�
�/0��"�@�	�/0�w�/0�
w�/0ر$��	w�/0���Y'��L���?�Y\�)8
�)8� ��74��Gh\Wh\�"�@�	�h\�
�Y��
�Y���
�h\'(84��� �@�	'��Y��Y�
�Y� ��7��Y'@�8W�[� �@�	���Y�Y�Y� ����YW�[g�[�!�@�	g��Y���YG�
�Y� ��w�YW�0[g([� ��g�[
g� [��[�$����[
��[	���Y� �@���
�Y
��[w��\�@����0[		Ch8Ch8��		���\
��0[	���\����7�\�)8G�\� ��?'\���K	�6� ���6����@���6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�G\G�P����?	�0Y\
���
��Q����7�\
��Y���Y����
	���Y

��Y	���Y� �����8
��YW\����w�8	G\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\G���[������k[	�\��K[����!��P7(\(8������8��<7G\���G\wG\ ����' ����w�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?
g��'��'�O����'N
7�O'0[�G"��
7N�mK

W0[����
cK�P���D<*�L*7�LG�P���G�\��Q��Y������YW�Yg��Y����?@�@�[`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����W��Yw�Yg��Y����?@�@�S`�g��\����W��L*7�\'��K1 ��_*��\?X8?X8��D�Q�W YL
��\�`����Q
G��\��\�"���@h9� X\Ah8� ��@X8	@@X8@X8� �@�� X\�?X8�?X9� �@��?X8�� ����Y�"���?X8���Y�?Y8�"���?Y8���Yh\�"��
���Y�@h9g��Y�*��g��YW��Y@X8�B�@� ���Y
�?X9g��Y�"��
�� ��?X8�Q�"�@��� ��� �
�� ��"�@��� �� �w��Y�"�@�
� ��� �	� ��"@�� ��� ��� ر"�@�

�� ��� ��� �����h\�h\��Q�����h\�
�Q�Q� ���4��Gh\g4��� ��7�Y'��L�h\� ��'�	�YGh\�)8�����YG4��W�	�Y� ��(8��Y�?Y8� ��@�8��	�Y	
�[��@�
�[�Y\'4��� ��'h\��[
G�0[�B��	
�)8�Y\
���Y� ��
G�[
G�[��([�"�@���Yw	�Y�?Y8� � ��	�Yg��Y��[�"���h\�� [Y\� ��

�0[w�Y��Y� ���Y\	G�[	G�[� ��� 	�\��Y��\�@���)8		�0[g��Y�@�H	��\Ch8	��\���@�'\����K���@���6� ������w�8����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P�G\G�P����?�0Y\	���	�Q����'�\���Y���Y����	w�Y	���Y��Y� �����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P'(\(8������8��<'G\���G\gG\ ����' ����g�\ �GH����' �GH�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'Nw0[7�O�GB��7�N�mK'�0[����cK�P���D<*�L*7�LG�P���G�\�Qw�Y����	'��Y���Yw��Y����?@�@�x`�w��\��D<*�L*G�LG�P���G�\�Qw�Y����	g��Y���Yw��Y����?@�@�p`�w��\����W��L*W�\'��K1 ��_*�\?X8?X8� ���Q
g��\�Q=@��^
7��\�?X9��/ ���@��?X9�?X8��/ ��@�@�@X8��/ �
��/ �� ���W YL
� X\� X\� �@���\
�?X8�?X8�@��
�?Y8	�?Y8@X8�"�@�@@X8�@h9Ah8� �@�
G�
�Y��
�Y��
�Y� ���@h9g�
�YG�
�Y� ��
�h\w��Yg��Y� �@���
�Y��/ �7h\� �� h\

�Qw	�Y�"�@���/ ���/ ���/ �� ��	�Y�	�Q	Gh\����?X8�Q		
�Q� ��g4��
Gh\�?Y8��@����Y
GY\G4��������YGY\���Y�"�@���O ���O ���O ر"�@�
��/ ���/ ���/ ر"�����O ���/ ��4�����
�h\w�Yg4��������Yg��Ygh\�`��G�Y74��
�h\� �@�
�h\'��Y
���Y�� ���Y��Y
��O ���@�@X87��Y��O ������O �W�Y��O ر��@���O ���O �	�
�Y�"�@���O ���O ��/ ر"�@��O ��/ ��O ز"�@��/ ��O ��/ رd���O ��4��
�h\���@�
��Y
7h\���Y� ��G4��'�	�Y
wh\� ��
�h\
�?Y8W��Y� ��G�	�Y'��L���Y� ����
�Yg�	�Y�)8�#�����Y'�
�Y�)8� ��gh\'(8�Y\� �@�'��8���Yw�Y� �����Y
'�['�[�"��	7Y\
W� [�[� ���		��YW�[�0[� ��W�[
�[��Y� ��'��YW�([�[�@��Ch8	���\���\���@��0[�[W�\�@���0[Ch8�)83���	G��\w�\g\� �@�_�6���K�6� ������ ��6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g���[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����G\G�P	�0Y\� ��
���
��Q7�\����
��Y���Y
	���Y� ��

��Y	���Y���8����
��Yg\w�8����	G\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\G���[��k[����	�\��K[!��P����7(\(8��8�����<7G\G\���wG\ �����' �w�\���� �G�H�'��� �G�H �� ����W�P ����GX\ ����@�����P�P�P� �����LW����Q���?
g��'��'�O����'N
7�O'0[�G"��
7N�mK

W0[����
cK�P���D<*�L*7�LG�P���G�\��Q��Y������YW�Yg��Y����?@�@�[`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����W��Yw�Yg��Y����?@�@�S`�g��\����W��L*7�\'��K1 ��_*��\?X8?X8��D�Q�W YL
��\�`����Q
G��\��\�"���@h9� X\Ah8� ��@X8	@@X8@X8� �@�� X\�?X8�?X9� �@��?X8�� ����Y�"���?X8���Y�?Y8�"���?Y8���Yh\�"��
���Y�@h9g��Y�*��g��YW��Y@X8�B�@� ���Y
�?X9g��Y�"��
�� ��?X8�Q�"�@��� ��� �
�� ��"�@��� �� �w��Y�"�@�
� ��� �	� ��"@�� ��� ��� ر"�@�

�� ��� ��� �����h\�h\��Q�����h\�
�Q�Q� ���4��Gh\g4��� ��7�Y'��L�h\� ��'�	�YGh\�)8�����YG4��W�	�Y� ��(8��Y�?Y8� ��@�8��	�Y	
�[��@�
�[�Y\'4��� ��'h\��[
G�0[�B��	
�)8�Y\
���Y� ��
G�[
G�[��([�"�@���Yw	�Y�?Y8� � ��	�Yg��Y��[�"���h\�� [Y\� ��

�0[w�Y��Y� ���Y\	G�[	G�[� ��� 	�\��Y��\�@���)8		�0[g��Y�@�H	��\Ch8	��\���@�'\����K���@���6� ������w�8����� ��	w�8��h6	�������� h6���\@��������0���0�g2@\����6@�7��L�W�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ����m[���\�����������2	���2	�@�P�G\G�P����?�0Y\	���	�Q����'�\���Y���Y����	w�Y	���Y��Y� �����8���YW\����w�8G\����������l6�@���i6����@��m6 �������m7 ��������Y��k[���� �� ���Y�@�����YwH\G���[������k[�\�K[����!��P'(\(8������8��<'G\���G\gG\ ����' ����g�\ �GH����' �GH�@�D ���W�P��� �GX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�r`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�j`�w��\� ��W��L*'�\
'�N1 �@�*�\'�?N	'�O� ��_'��K?X8?X8��@D�Q'�N
���\� ����Q'�N�W YL�B��'�)N
7��\	�0[�@����\�)8
���\� ���?X9w X\	�\�"��_�@h9Ah8W X\�B����/ �
�?X9�?X8�"�@�
��/ �w�/ ���/ �� ��	�?X8�?X8
G\�"�@�@X8@@X8�?Y8�
���
�Y
w
�Y
w�/ ر"��0w�/ ��?Y8��Y�(����Y	g�Y��Y�@����Y
@X8�Y���
w�/ �
��/ ��h\� ���@h9�h\�h\� �� �h\��Q		g�Y� ����Y�
�Q
�Q� ��@X8��QW4��"��7h\	'��Yh\����	�?Y8'4��7�Y����?X8	G	�Y7�/ ر"�@�
7�/ �7�/ �Y\�"�@�W
�Yw�/ �
w�/ ر��
7�/ �Y\g4���"�@�'
�Yw�/ �
w�/ ��������Y�h\7��Y����g4���h\	��
�Y� ��'4����
�YWh\� ���?Y8	g�
�Y'�Y� �@�WY\��O �4����@�G�
�Yg��Y
��O �� ��'Y\'
�Y��L� �@�G��Y��O �
��O ر"�@�w�O �
w�O �w�O ���t�w�L	���\� @��(8
w�O �7�O رB��w�O � ��
7�O ر"�@�
w�O �7�O �w�O ز"���
7�O �

w�O ��4����Wh\	g�
�Y�4������G�
�Y�h\�
�Y����	���Yg4�����Y� �@����Y�h\�h\�`���h\'4��	'��Y�`��		��Y��Y7��Y� �`����Y7�Y���Y����'�
�Y��L��Y�@�t�L�w	���\� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Yw�Y���Y����?@�@�R`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�J`�w��\����W��L*�\'��K1 ��_*7�\?X8?X8��D�Q�W YL
G��\�`���Q
W��\��\�"���@h9� X\Ah8� ��@X8
@@X8�?X9� ��� X\�?X8�?X9� ����?X8W�Y�?Y8�"��	�?X8w�Y�?Y8� ��W�YWh\'�Y�"����YG�Y�@h9� ��@X8��Q�Y� �@�	��Y�� ��� ��"���� �G�Y@X8� �@�	��Y�� ��� ��B��
�� �	�?X8�� ر"�@��� �
�� ��� ��"�@�	�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Wh\� ����Q�Q�
�Q����w4��gh\�Y�b���?Y8W4��Wh\�`�����Y�h\'��Y�`�����Y74�����Y� ��'�N
��YgY\���@��h\'�O�Y\� ��4�����Y'�N�"��g�0['�N
7��Y�"��	��Y
���Y�?Y8� �����Y')N
�h\�`��'�?N'Y\
���Y�!�����Yg��\'Y\� ���\�)8	��Y����\���Y�w������L	��\�L� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���G�\��Q��Y������YW�Yg��Y����?@�@�b`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����W��Yw�Yg��Y����?@�@�Z`�g��\� ��	�W YL*��\*7�\����?X8?X8��Q�@���Q
g��\W��L=@��^
'��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�0	�@h9	Ah8��Y� �@���Y
	�	�Y	'�	�Y�*��g�YW�Yh\� ��'h\		�@h9���Y� ��G��Y�h\��Q� ���Qh\'��Y�`��G��Y�
�Q�Q����	�h\�h\���Y���
�?X8	@X8
��/0ر"�@���/0���/0�
��/0ر"���	��/0�	��/0���Y� �@�74�����Y�?Y8�"��
�h\�h\��
�Y� ���?Y8�Y\
'��Y� �@�7�Y�Y\
��Y� �@��	�YG�Y�Y\�B�@�g��Y�?X8w��Y�B����/0�@X8W�Y�B�@���/0�gh\wh\�B�@���/0�
��/0�	��/0ر�@���/0�G��Y
��/0ر$��	��/0����Y�Y\�`��	'�N'4��'h\�"�@�7h\�h\
�h\�"�@�'�Og��Yw�Y� �@�'�N���Y��Y�"�@���0[��Y��Y� �`�'�N	G��YW�
�Y�"�@�'�?N�	�Y��Y� �@�'�)N	���Y��Y� �@�7�\���Y'�Y�`��� �\�)8	��Y� ����Y7\��L��H		�w�w	���\� ���L	���\��L�@���L�(8 ��� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�g2@\6@����7��L�W�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����G\G�P	�0Y\� ��
���
��Q'�\�������Y���Y
	w�Y� �����Y		��Y���8�������YW\w�8����G\������l6�����@���i6�@������m6 ����m7���� ����Y� ����k[���� �� ���Y���Y����wH\G���[��k[�����\��K[!��P����'(\(8��8������<'G\G\���gG\ �����' �g�\���� �GH�'��� �GH �� ����W�P ����GX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Yw�Y���Y����?@�@�R`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�J`�w��\����W��L*�\'��K1 ��_*7�\?X8?X8��D�Q�W YL
G��\�`���Q
W��\��\�"���@h9� X\Ah8� ��@X8
@@X8�?X9� ��� X\�?X8�?X9� ����?X8W�Y�?Y8�"��	�?X8w�Y�?Y8� ��W�YWh\'�Y�"����YG�Y�@h9� ��@X8��Q�Y� �@�	��Y�� ��� ��"���� �G�Y@X8� �@�	��Y�� ��� ��B��
�� �	�?X8�� ر"�@��� �
�� ��� ��"�@�	�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Wh\� ����Q�Q�
�Q����w4��gh\�Y�b���?Y8W4��Wh\�`�����Y�h\'��Y�`�����Y74�����Y� ��'�N
��YgY\���@��h\'�O�Y\� ��4�����Y'�N�"��g�0['�N
7��Y�"��	��Y
���Y�?Y8� �����Y')N
�h\�`��'�?N'Y\
���Y�!�����Yg��\'Y\� ���\�)8	��Y����\���Y�w������L	��\�L� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���G�\��Q��Y������YW�Yg��Y����?@�@�b`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����W��Yw�Yg��Y����?@�@�Z`�g��\� ��	�W YL*��\*7�\����?X8?X8��Q�@���Q
g��\W��L=@��^
'��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�0	�@h9	Ah8��Y� �@���Y
	�	�Y	'�	�Y�*��g�YW�Yh\� ��'h\		�@h9���Y� ��G��Y�h\��Q� ���Qh\'��Y�`��G��Y�
�Q�Q����	�h\�h\���Y���
�?X8	@X8
��/0ر"�@���/0���/0�
��/0ر"���	��/0�	��/0���Y� �@�74�����Y�?Y8�"��
�h\�h\��
�Y� ���?Y8�Y\
'��Y� �@�7�Y�Y\
��Y� �@��	�YG�Y�Y\�B�@�g��Y�?X8w��Y�B����/0�@X8W�Y�B�@���/0�gh\wh\�B�@���/0�
��/0�	��/0ر�@���/0�G��Y
��/0ر$��	��/0����Y�Y\�`��	'�N'4��'h\�"�@�7h\�h\
�h\�"�@�'�Og��Yw�Y� �@�'�N���Y��Y�"�@���0[��Y��Y� �`�'�N	G��YW�
�Y�"�@�'�?N�	�Y��Y� �@�'�)N	���Y��Y� �@�7�\���Y'�Y�`��� �\�)8	��Y� ����Y7\��L��H		�w�w	���\� ���L	���\��L�@���L�(8 ��� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�g2@\6@����7��L�W�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����G\G�P	�0Y\� ��
���
��Q'�\�������Y���Y
	w�Y� �����Y		��Y���8�������YW\w�8����G\������l6�����@���i6�@������m6 ����m7���� ����Y� ����k[���� �� ���Y���Y����wH\G���[��k[�����\��K[!��P����'(\(8��8������<'G\G\���gG\ �����' �g�\���� �GH�'��� �GH �� ����W�P ����GX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Yw�Y���Y����?@�@�R`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�J`�w��\����W��L*�\'��K1 ��_*7�\?X8?X8��D�Q�W YL
G��\�`���Q
W��\��\�"���@h9� X\Ah8� ��@X8
@@X8�?X9� ��� X\�?X8�?X9� ����?X8W�Y�?Y8�"��	�?X8w�Y�?Y8� ��W�YWh\'�Y�"����YG�Y�@h9� ��@X8��Q�Y� �@�	��Y�� ��� ��"���� �G�Y@X8� �@�	��Y�� ��� ��B��
�� �	�?X8�� ر"�@��� �
�� ��� ��"�@�	�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Wh\� ����Q�Q�
�Q����w4��gh\�Y�b���?Y8W4��Wh\�`�����Y�h\'��Y�`�����Y74�����Y� ��'�N
��YgY\���@��h\'�O�Y\� ��4�����Y'�N�"��g�0['�N
7��Y�"��	��Y
���Y�?Y8� �����Y')N
�h\�`��'�?N'Y\
���Y�!�����Yg��\'Y\� ���\�)8	��Y����\���Y�w������L	��\�L� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7�N�mK'�0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Y���Yw�Y����?@�@�q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����g��Y���Yw�Y����?@�@�i`�w��\� ��W��L*�\	'�N1���*G�\
'�?N'��K����?X8?X8�Q�"��'�N
���\�Q�b�@�'�N'�O'�)N3 ��
'��\g�0[�)8� ��?�W YL	� �\�?X9���^w X\G�\��/ ����
�?X9��\
��/ ر"��w�/ �W X\�?X8� �@�	\��/ ��@h9� �@��?X8	�?X8Ah8�"��@X8@@X8�?Y8�
��
w
�Y�
�Y
w�/ ر"�@�0w�/ ��?Y8��Y� �@� ��Y��Y	g�Y�@����Y
@X8�Y���
w�/ �
��/ ��h\� ���@h9�h\�h\� ���h\��Q		g�Y� ����Y�
�Q
�Q� ��@X8��QW4��� �@�7h\	'��Yh\����	�?Y874��7�Y����?X8Y\7�/ ر"�@�
7�/ �7�/ �	G	�Y�b��Y\74��W
�Y�"�@�w�/ �
w�/ �
7�/ ��"�@�'
�Yw�/ �
w�/ ��������Y�h\7��Y����g4���h\	��
�Y� ��'4����
�YWh\� ���?Y8	g�
�Y'�Y� �@�WY\��O �4����@�G�
�Yg��Y
��O �� ��'Y\'
�Y��L� �@�G��Y��O �
��O ر"�@�w�O �
w�O �w�O ��B��
w�O �Ch8�L] �B�	���\7�O �w�O ��' �@� ��
7�O �
w�O ر"�@�7�O �w�O �
7�O ؿ���

w�O ���L�4���@�Wh\	g�
�Y�4������G�
�Y�h\�
�Y����	���Yg4�����Y�"���h\���Y�h\�`���h\'4��	'��Y�`��		��Y��Y7��Y�������Y�L7�Y�������Y'�
�Y��Y����?Ch8	���\ ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����7�\
��Y���Y����
	���Y

��Y	���Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��P7(\(8������8�<7G\���W\wG\ ����' ����w�\ �G�H����' �G�H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���W�\�Qg�Y����g��Yw�Y��Y����?@�@�Q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�I`�w��\����W��L*'�\'��K1 ��*7�\?X8�W YL����?X8�Q��\@��
��\�Q
g��\�"�@�@X8@@X8�@h9� ��
Ah8	� X\�?X9�"��_�?X8@X8� X\�"�@�	�?X8	G��Y	�?Y8�"���?X9@X8
�?X8�*��W��Y	gh\�?Y8�(��G��Y'��Y���Y� ���@h9�Y		�Q�"�@��� ��� ��� �� ��
��Y��Y
�?X8�����Y
���Y�� ر"�@��� �
�� ��� ��"�@��� ��� ��� ��"�@�
�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Gh\� ����Q�Q
�Q����g4��	gh\�Y���@��?Y8	�h\��Y� ��74����Y	Gh\�"��	gY\	�h\'�N� ��	'�O
��Y�Y���GY\�?Y84��� �@���Y'�N	�0[� �@����Y7��Y��Y� ��GY\��Y'�N� ��
���Y7h\'�)N� � �GY\�	�Y'�?N� ��g�Yg��\	'�\������Y�)8'��Y����'\��LCh8���?	G��\�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �GH����' �GH�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7�N�mKW�0[����cK�P���D<*�L*7�LG�P���G�\��Q��Y������YW��Yg��Y����?@�@�a`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�Y`�g��\� ��	�W YL*��\*'�\����?X8?X8��Q�@���Q
W��\W��L=@��^
��\�?X9
��/0�����?X9��/0�'��K����g X\w X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�	�@h9	Ah8�	�Y�*�@�	7�Y	�Y		�@h9�(�@��	�Yg	�Y7h\�(����Y���YW	�Y� ��Wh\Gh\�	�Q�(����Y���Y�h\� ���
�Q
�Q	@X8� �`���Q
�h\�h\�B����Y�?X8��/0ر"�@�
w�/0�w�/0�w�/0��"���	��/0�	w�/0���Y� �@�'4��
�h\
�?Y8�"���
�Y�Y�h\�"��G��Y
�Y\
�?Y8� �`�W�Y���Y
�Y\� �@���Y
'��Y
��
�Y� �@�
��Y�?X8@X8�"�@��Y\'h\
7�Y� �@����Y�/0�
w�/0رB�@��/0��/0�	�/0�� @��h\w�/0�w�/0رD��	w�/0��Y\���Y�"�B؟'�N'�?N	'�O�`����0['4���h\�"�@��h\
�h\�h\� �@�'�N��Y��Y�b����Y��YG��Y�"��W�Y'�N'�Y� ��7�Y'�\
�Y�"��
�Y'�)N
g�Y� ��
w�Y�)8�Y����
�Y� �\g�Y� ��G
�Y'\��L��HCh8Ch8	��\� ���L	���\��L�@��_�L �� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�g2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�G\G�P����?	�0Y\
���
��Q����7�\���Y
���Y����	���Y
w�Y		���Y� �����8��YW\����w�8
G\
����������l6�@�
��i6����@�
�m6 ����
���m7 �������Y
��k[���� ��
 ��Y�@����YwH\G���[������k[
�\��K[����!��P7(\(8������8��<7G\���G\gG\ ����' ����g�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���W�\�Qg�Y����g��Yw�Y��Y����?@�@�Q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�I`�w��\����W��L*'�\'��K1 ��*7�\?X8�W YL����?X8�Q��\@��
��\�Q
g��\�"�@�@X8@@X8�@h9� ��
Ah8	� X\�?X9�"��_�?X8@X8� X\�"�@�	�?X8	G��Y	�?Y8�"���?X9@X8
�?X8�*��W��Y	gh\�?Y8�(��G��Y'��Y���Y� ���@h9�Y		�Q�"�@��� ��� ��� �� ��
��Y��Y
�?X8�����Y
���Y�� ر"�@��� �
�� ��� ��"�@��� ��� ��� ��"�@�
�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Gh\� ����Q�Q
�Q����g4��	gh\�Y���@��?Y8	�h\��Y� ��74����Y	Gh\�"��	gY\	�h\'�N� ��	'�O
��Y�Y���GY\�?Y84��� �@���Y'�N	�0[� �@����Y7��Y��Y� ��GY\��Y'�N� ��
���Y7h\'�)N� � �GY\�	�Y'�?N� ��g�Yg��\	'�\������Y�)8'��Y����'\��LCh8���?	G��\�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �GH����' �GH�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7�N�mK'�0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Y���Yw�Y����?@�@�q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����g��Y���Yw�Y����?@�@�i`�w��\� ��W��L*�\	'�N1���*G�\
'�?N'��K����?X8?X8�Q�"��'�N
���\�Q�b�@�'�N'�O'�)N3 ��
'��\g�0[�)8� ��?�W YL	� �\�?X9���^w X\G�\��/ ����
�?X9��\
��/ ر"��w�/ �W X\�?X8� �@�	\��/ ��@h9� �@��?X8	�?X8Ah8�"��@X8@@X8�?Y8�
��
w
�Y�
�Y
w�/ ر"�@�0w�/ ��?Y8��Y� �@� ��Y��Y	g�Y�@����Y
@X8�Y���
w�/ �
��/ ��h\� ���@h9�h\�h\� ���h\��Q		g�Y� ����Y�
�Q
�Q� ��@X8��QW4��� �@�7h\	'��Yh\����	�?Y874��7�Y����?X8Y\7�/ ر"�@�
7�/ �7�/ �	G	�Y�b��Y\74��W
�Y�"�@�w�/ �
w�/ �
7�/ ��"�@�'
�Yw�/ �
w�/ ��������Y�h\7��Y����g4���h\	��
�Y� ��'4����
�YWh\� ���?Y8	g�
�Y'�Y� �@�WY\��O �4����@�G�
�Yg��Y
��O �� ��'Y\'
�Y��L� �@�G��Y��O �
��O ر"�@�w�O �
w�O �w�O ��B��
w�O �Ch8�L] �B�	���\7�O �w�O ��' �@� ��
7�O �
w�O ر"�@�7�O �w�O �
7�O ؿ���

w�O ���L�4���@�Wh\	g�
�Y�4������G�
�Y�h\�
�Y����	���Yg4�����Y�"���h\���Y�h\�`���h\'4��	'��Y�`��		��Y��Y7��Y�������Y�L7�Y�������Y'�
�Y��Y����?Ch8	���\ ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����7�\
��Y���Y����
	���Y

��Y	���Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��P7(\(8������8�<7G\���W\wG\ ����' ����w�\ �G�H����' �G�H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���W�\�Qg�Y����g��Yw�Y��Y����?@�@�Q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�I`�w��\����W��L*'�\'��K1 ��*7�\?X8�W YL����?X8�Q��\@��
��\�Q
g��\�"�@�@X8@@X8�@h9� ��
Ah8	� X\�?X9�"��_�?X8@X8� X\�"�@�	�?X8	G��Y	�?Y8�"���?X9@X8
�?X8�*��W��Y	gh\�?Y8�(��G��Y'��Y���Y� ���@h9�Y		�Q�"�@��� ��� ��� �� ��
��Y��Y
�?X8�����Y
���Y�� ر"�@��� �
�� ��� ��"�@��� ��� ��� ��"�@�
�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Gh\� ����Q�Q
�Q����g4��	gh\�Y���@��?Y8	�h\��Y� ��74����Y	Gh\�"��	gY\	�h\'�N� ��	'�O
��Y�Y���GY\�?Y84��� �@���Y'�N	�0[� �@����Y7��Y��Y� ��GY\��Y'�N� ��
���Y7h\'�)N� � �GY\�	�Y'�?N� ��g�Yg��\	'�\������Y�)8'��Y����'\��LCh8���?	G��\�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �GH����' �GH�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7�N�mK'�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�x`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�p`�g��\� ���W YL*��\*�\����?X8?X8��Q�@���Q
g��\W��L=@��^
7��\�?X9��/ ���@��?X9�?X8
��/ ��@�@�@X8��/ ���/ �� ��'��K� X\� X\�"�@��?X8	�?Y8
�?X8�����?Y8��\@X8�"�@�@@X8�@h9Ah8� ��(��
�YG�
�Y��
�Y�"����
�Y�@h9g�
�Y� ���h\g��Y	'h\� ��
w��Y��
�YGh\� �@���Q��/ ���/ ر����/ �g�	�Y��/ �� ��			�Q
�Qh\�����?X8

�	�Y�Q� ��g4��Wh\�?Y8��@�
�	�YWY\G4��������YWY\���Y�"�@���O ���O ���O ر"�@�
��/ ���/ ���/ ��"�����O ���/ �w4������h\w��YW4��������Yg�
�Y�h\�`��
W�
�Y'4���h\� �@��h\'�
�Y�Y��@���O ��
�Y4�����@����Y��O �G��Y�"�@���O ���O ���O ر����O �
7�Y��O ��B����O �@X8	��Y�"�@��/ ��O ��/ رB�@��O ��/ ��O ز���?�/ ��O �
�?Y8�`��Y\�4���h\����Y'��Lg4�����@����Y'h\G��Y� ���)8W	�Ygh\� ��G4���h\�)8� ��(87�
�Y��8� �����Yw	�Y�[�b�@��[��[wh\�`��	0[�� [
���Y�"��
	Y\
��[��[� ��	G��Y	��[
��0[� ����([	��Y��[� ����[��\���Y�������Y�0[���Y� ��w�YG�\��\� �@��)8
��Y
w
�Y� ��G\����K�w�`���w	���\�1 ��_	w��\�(8�(8� �����6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�g2@\6@����7��L�W���[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����G\G�P	�0Y\� ��
���
��Q7�\�������Y
���Y	���Y� ��
w�Y		���Y���8������YW\w�8����
G\
������l6�����@�
��i6�@�����
�m6 �
���m7���� ���Y� ��
��k[���
� �� ��Y��Y����wH\G���[��k[����
�\��K[!��P����7(\(8��8������<7G\G\���gG\ �����' �g�\���� �G�H�'��� �G�H �� ����W�P ����GX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Yw�Y���Y����?@�@�R`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�J`�w��\����W��L*�\'��K1 ��_*7�\?X8?X8��D�Q�W YL
G��\�`���Q
W��\��\�"���@h9� X\Ah8� ��@X8
@@X8�?X9� ��� X\�?X8�?X9� ����?X8W�Y�?Y8�"��	�?X8w�Y�?Y8� ��W�YWh\'�Y�"����YG�Y�@h9� ��@X8��Q�Y� �@�	��Y�� ��� ��"���� �G�Y@X8� �@�	��Y�� ��� ��B��
�� �	�?X8�� ر"�@��� �
�� ��� ��"�@�	�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Wh\� ����Q�Q�
�Q����w4��gh\�Y�b���?Y8W4��Wh\�`�����Y�h\'��Y�`�����Y74�����Y� ��'�N
��YgY\���@��h\'�O�Y\� ��4�����Y'�N�"��g�0['�N
7��Y�"��	��Y
���Y�?Y8� �����Y')N
�h\�`��'�?N'Y\
���Y�!�����Yg��\'Y\� ���\�)8	��Y����\���Y�w������L	��\�L� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Y���Y����?@�@j`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Y���Y����?@�@b`�g��\� ��	�W YL*w�\*�\����?X8?X8��Q�@���Q
G��\W��L=@��^
7��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�0	�@h9	Ah8'��Y�*��
	��	�Y	g�	�Y'��Y�*��G��Y7��Y�h\� ��		�@h9���Y��Y� ��7h\h\�Q� ��Wh\'��Y��Y�@���	�Q��Q�
�Q����	�h\�h\���Y���	�?X8
@X8	��/0ر"�@���/0���/0�	��/0��"���
��/0�
��/0��
�Y����@X8�?Y8�Y\� ��'4�����Y�h\� �@��?Y8�h\�
�Y�"��
�Y�Y\��Y�!�@�
'��Y�Y\7��Y�"�@�
g��Yw��YG��Y�@���?X8W��Y
�h\� �@�h\G�/0�G�/0���@�	G�/0����Y��/0��"�@�
G�/0���/0�	��/0ر$��

��/0���Y'��L����Y\�)8��8� ��	w�[(874��� �@�h\h\
�h\�"�@�'�Y7��Yw�[� ��4��w��Y��Y�&��w� [w�	([�	�Y� ��'�[���YG��Y� ��	� �\�h\'�[� ���)8W��Yg�[�"��G��Yg�	�Y0[�`��g�[W�Y��	�Y�&��g�[g�[���Y�@���0[
��	�Yg0[�B����Yw�\
�
�Y�@����\�w�)8@��	���\

�w	���\� ��?G\����K�(8������(8���6��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����g2@\6@�7��L�����W���[2@�����0��������6�����6�/@������70AL�!��P,@������70AL�!��P(@�������m[	��m[���\� ���������2	���2����	�G\ ��G�P	�0Y\
�������
��Q7�\
��Y�������Y
	���Y

��Y����	���Y���8
��Y����W\w�8	G\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��G���[��k[	�\� ����K[!��P7(\����(8��8��<����7G\G\gG\���� ��'��� �g�\ ���G�H�' ����G�H �����W�P �GX\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Yw�Y���Y����?@�@�R`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�J`�w��\����W��L*�\'��K1 ��_*7�\?X8?X8��D�Q�W YL
G��\�`���Q
W��\��\�"���@h9� X\Ah8� ��@X8
@@X8�?X9� ��� X\�?X8�?X9� ����?X8W�Y�?Y8�"��	�?X8w�Y�?Y8� ��W�YWh\'�Y�"����YG�Y�@h9� ��@X8��Q�Y� �@�	��Y�� ��� ��"���� �G�Y@X8� �@�	��Y�� ��� ��B��
�� �	�?X8�� ر"�@��� �
�� ��� ��"�@�	�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Wh\� ����Q�Q�
�Q����w4��gh\�Y�b���?Y8W4��Wh\�`�����Y�h\'��Y�`�����Y74�����Y� ��'�N
��YgY\���@��h\'�O�Y\� ��4�����Y'�N�"��g�0['�N
7��Y�"��	��Y
���Y�?Y8� �����Y')N
�h\�`��'�?N'Y\
���Y�!�����Yg��\'Y\� ���\�)8	��Y����\���Y�w������L	��\�L� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'N70[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Y���Y����?@�@j`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Y���Y����?@�@b`�g��\� ��	�W YL*w�\*�\����?X8?X8��Q�@���Q
G��\W��L=@��^
7��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�0	�@h9	Ah8'��Y�*��
	��	�Y	g�	�Y'��Y�*��G��Y7��Y�h\� ��		�@h9���Y��Y� ��7h\h\�Q� ��Wh\'��Y��Y�@���	�Q��Q�
�Q����	�h\�h\���Y���	�?X8
@X8	��/0ر"�@���/0���/0�	��/0��"���
��/0�
��/0��
�Y����@X8�?Y8�Y\� ��'4�����Y�h\� �@��?Y8�h\�
�Y�"��
�Y�Y\��Y�!�@�
'��Y�Y\7��Y�"�@�
g��Yw��YG��Y�@���?X8W��Y
�h\� �@�h\G�/0�G�/0���@�	G�/0����Y��/0��"�@�
G�/0���/0�	��/0ر$��

��/0���Y'��L����Y\�)8��8� ��	w�[(874��� �@�h\h\
�h\�"�@�'�Y7��Yw�[� ��4��w��Y��Y�&��w� [w�	([�	�Y� ��'�[���YG��Y� ��	� �\�h\'�[� ���)8W��Yg�[�"��G��Yg�	�Y0[�`��g�[W�Y��	�Y�&��g�[g�[���Y�@���0[
��	�Yg0[�B����Yw�\
�
�Y�@����\�w�)8@��	���\

�w	���\� ��?G\����K�(8������(8���6��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����g2@\6@�7��L�����W���[2@�����0��������6�����6�/@������70AL�!��P,@������70AL�!��P(@�������m[	��m[���\� ���������2	���2����	�G\ ��G�P	�0Y\
�������
��Q7�\
��Y�������Y
	���Y

��Y����	���Y���8
��Y����W\w�8	G\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��G���[��k[	�\� ����K[!��P7(\����(8��8��<����7G\G\gG\���� ��'��� �g�\ ���G�H�' ����G�H �����W�P �GX\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Yw�Y���Y����?@�@�R`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�J`�w��\����W��L*�\'��K1 ��_*7�\?X8?X8��D�Q�W YL
G��\�`���Q
W��\��\�"���@h9� X\Ah8� ��@X8
@@X8�?X9� ��� X\�?X8�?X9� ����?X8W�Y�?Y8�"��	�?X8w�Y�?Y8� ��W�YWh\'�Y�"����YG�Y�@h9� ��@X8��Q�Y� �@�	��Y�� ��� ��"���� �G�Y@X8� �@�	��Y�� ��� ��B��
�� �	�?X8�� ر"�@��� �
�� ��� ��"�@�	�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Wh\� ����Q�Q�
�Q����w4��gh\�Y�b���?Y8W4��Wh\�`�����Y�h\'��Y�`�����Y74�����Y� ��'�N
��YgY\���@��h\'�O�Y\� ��4�����Y'�N�"��g�0['�N
7��Y�"��	��Y
���Y�?Y8� �����Y')N
�h\�`��'�?N'Y\
���Y�!�����Yg��\'Y\� ���\�)8	��Y����\���Y�w������L	��\�L� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�v`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�n`�w��\����W��L*'�\'��K1 ��_*�\?X8?X8� ���Q
g��\��Q=@��^
W��\�?X9��/ ���@��?X9�?X8��/ ��@�@�@X8��/ �
��/ �� ���W YL� X\� X\� ���?X8��\�?X8� ��
�?X8�?Y8	�?Y8�"�@�@X8@@X8�@h9� ���(Ah8�	�Y��
�Y�"�� ��
�Y�@h9g	�Y� �@� �	�Y�h\g��Y���
w��Y�	�Y��/ ر"�@���/ ���/ ��Q�(���g�	�Y��/ �

�	�Y� ��Gh\	'h\h\� ��
�Q			�Q�Q� �@�g4��Gh\�?Y8������YGY\G4���������YGY\���Y�"�@���O ���O ���O ر"�@���/ ���/ �
��/ ��"�����O ���/ �g4������h\w�YG4��������Yg��Ygh\�`��
G��Y'4���h\� �A��h\�Y'��Y�#����Y��Y@X8����W�Y
7	�Y	�
�Y�"�@���O ���O ���O ��"�@���O ���O ���O ��"�@���O ���O ��/ ��"�@��O ��/ ��O ز"�@��/ ��O ��/ رd���O �G4���h\���@��Ygh\���Y���Gh\'��L'4��� �@���YW�Y'��Y�@�@��)8���Y7��Y�`��(84�����Y� ��@
�8w��Y
�?Y8�F�@�7�[7�[�[�"��'h\Y\'�[� ���w�
0['�['� [� �@�
��	�Y
�['�([�@���h\
�)8	��	�Y� ��G�0[	�Y\���Y�&��
�[	
�[
'�
�\�"��W�\��Y7��Y�`���)8

�0[Ch8�!��w��Y	���\
���\� ���	�Y�\����K� ��Ch8	��\�� �����6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���W�\�Qg�Y����g��Yw�Y��Y����?@�@�Q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�I`�w��\����W��L*'�\'��K1 ��*7�\?X8�W YL����?X8�Q��\@��
��\�Q
g��\�"�@�@X8@@X8�@h9� ��
Ah8	� X\�?X9�"��_�?X8@X8� X\�"�@�	�?X8	G��Y	�?Y8�"���?X9@X8
�?X8�*��W��Y	gh\�?Y8�(��G��Y'��Y���Y� ���@h9�Y		�Q�"�@��� ��� ��� �� ��
��Y��Y
�?X8�����Y
���Y�� ر"�@��� �
�� ��� ��"�@��� ��� ��� ��"�@�
�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Gh\� ����Q�Q
�Q����g4��	gh\�Y���@��?Y8	�h\��Y� ��74����Y	Gh\�"��	gY\	�h\'�N� ��	'�O
��Y�Y���GY\�?Y84��� �@���Y'�N	�0[� �@����Y7��Y��Y� ��GY\��Y'�N� ��
���Y7h\'�)N� � �GY\�	�Y'�?N� ��g�Yg��\	'�\������Y�)8'��Y����'\��LCh8���?	G��\�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �GH����' �GH�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O����'N7�OW0[�G"��7�N�mKG�0[����cK�P���D<*�L*7�LG�P���G�\�QW�Y����'��YW��Yw��Y����?@�@�i`�w��\��D<*�L*G�LG�P���G�\��Qw��Y����
W��Y���Yw�Y����?@�@�a`�w��\� ��	�W YL*g�\*�\����?X8?X8��Q�@���Q
G��\W��L=@��^
'��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�	�@h9	Ah8'�
�Y�*�@�	7�Y	'�Y		�@h9�*����
�Y'�
�YGh\�(�����Y��YG�
�Y� ���h\7h\
�Q�(�����Y��Y	�?X8� �@���Q'h\�	�Q� �`�	�Q�h\�h\�"�@���Y	��/0���/0ر"�@���/0�	��/0�
@X8�"���
��/0�
��/0����Y� ���?Y8�Y\74���"���
�Y�?Y8�h\�`�@����Y�h\�Y\�b��
�Y��Y�Y\�a��
'��Y7��YG��Y��@��?X8
g��YG�/0ر"��G�/0�w��Y@X8�@�@�W��Y
�h\h\�"�@����Y	G�/0���/0ر"�@���/0�
G�/0�	��/0ر$��

��/0���Y�Y\� �@�'��L�)8g4���"��'h\7h\@�8�"����Y�	�Yg�[� ��G4��	��Y(8� ���g�([g�[g� [����h\h\W �\� �@���[��[���Y� �@�4��g�Yw��Y� ��G��Y
�)8'0[� �@�W�Y��Y���Y� ��7��Y7�[7�[� �`�W��Y
��Y���Y�&��
7�[
7�[��0[�B��
���Y��Y
��0[�"��g�\�)8

Ch8�@��Ch8
	���\W�\ ��	G��\'\����K�@������6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�G\G�P����?	�0Y\
���
��Q����7�\
��Y���Y����
	���Y

��Y	���Y� �����8
��YW\����w�8	G\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\G���[������k[	�\��K[����!��P7(\(8������8��<7G\���G\wG\ ����' ����w�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���W�\�Qg�Y����g��Yw�Y��Y����?@�@�Q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�I`�w��\����W��L*'�\'��K1 ��*7�\?X8�W YL����?X8�Q��\@��
��\�Q
g��\�"�@�@X8@@X8�@h9� ��
Ah8	� X\�?X9�"��_�?X8@X8� X\�"�@�	�?X8	G��Y	�?Y8�"���?X9@X8
�?X8�*��W��Y	gh\�?Y8�(��G��Y'��Y���Y� ���@h9�Y		�Q�"�@��� ��� ��� �� ��
��Y��Y
�?X8�����Y
���Y�� ر"�@��� �
�� ��� ��"�@��� ��� ��� ��"�@�
�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Gh\� ����Q�Q
�Q����g4��	gh\�Y���@��?Y8	�h\��Y� ��74����Y	Gh\�"��	gY\	�h\'�N� ��	'�O
��Y�Y���GY\�?Y84��� �@���Y'�N	�0[� �@����Y7��Y��Y� ��GY\��Y'�N� ��
���Y7h\'�)N� � �GY\�	�Y'�?N� ��g�Yg��\	'�\������Y�)8'��Y����'\��LCh8���?	G��\�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �GH����' �GH�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�v`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�n`�w��\����W��L*'�\'��K1 ��_*�\?X8?X8� ���Q
g��\��Q=@��^
W��\�?X9��/ ���@��?X9�?X8��/ ��@�@�@X8��/ �
��/ �� ���W YL� X\� X\� ���?X8��\�?X8� ��
�?X8�?Y8	�?Y8�"�@�@X8@@X8�@h9� ���(Ah8�	�Y��
�Y�"�� ��
�Y�@h9g	�Y� �@� �	�Y�h\g��Y���
w��Y�	�Y��/ ر"�@���/ ���/ ��Q�(���g�	�Y��/ �

�	�Y� ��Gh\	'h\h\� ��
�Q			�Q�Q� �@�g4��Gh\�?Y8������YGY\G4���������YGY\���Y�"�@���O ���O ���O ر"�@���/ ���/ �
��/ ��"�����O ���/ �g4������h\w�YG4��������Yg��Ygh\�`��
G��Y'4���h\� �A��h\�Y'��Y�#����Y��Y@X8����W�Y
7	�Y	�
�Y�"�@���O ���O ���O ��"�@���O ���O ���O ��"�@���O ���O ��/ ��"�@��O ��/ ��O ز"�@��/ ��O ��/ رd���O �G4���h\���@��Ygh\���Y���Gh\'��L'4��� �@���YW�Y'��Y�@�@��)8���Y7��Y�`��(84�����Y� ��@
�8w��Y
�?Y8�F�@�7�[7�[�[�"��'h\Y\'�[� ���w�
0['�['� [� �@�
��	�Y
�['�([�@���h\
�)8	��	�Y� ��G�0[	�Y\���Y�&��
�[	
�[
'�
�\�"��W�\��Y7��Y�`���)8

�0[Ch8�!��w��Y	���\
���\� ���	�Y�\����K� ��Ch8	��\�� �����6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���W�\�Qg�Y����g��Yw�Y��Y����?@�@�Q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�I`�w��\����W��L*'�\'��K1 ��*7�\?X8�W YL����?X8�Q��\@��
��\�Q
g��\�"�@�@X8@@X8�@h9� ��
Ah8	� X\�?X9�"��_�?X8@X8� X\�"�@�	�?X8	G��Y	�?Y8�"���?X9@X8
�?X8�*��W��Y	gh\�?Y8�(��G��Y'��Y���Y� ���@h9�Y		�Q�"�@��� ��� ��� �� ��
��Y��Y
�?X8�����Y
���Y�� ر"�@��� �
�� ��� ��"�@��� ��� ��� ��"�@�
�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Gh\� ����Q�Q
�Q����g4��	gh\�Y���@��?Y8	�h\��Y� ��74����Y	Gh\�"��	gY\	�h\'�N� ��	'�O
��Y�Y���GY\�?Y84��� �@���Y'�N	�0[� �@����Y7��Y��Y� ��GY\��Y'�N� ��
���Y7h\'�)N� � �GY\�	�Y'�?N� ��g�Yg��\	'�\������Y�)8'��Y����'\��LCh8���?	G��\�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �GH����' �GH�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�r`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�j`�w��\� ��W��L*'�\
'�N1 �@�*�\'�?N	'�O� ��_'��K?X8?X8��@D�Q'�N
���\� ����Q'�N�W YL�B��'�)N
7��\	�0[�@����\�)8
���\� ���?X9w X\	�\�"��_�@h9Ah8W X\�B����/ �
�?X9�?X8�"�@�
��/ �w�/ ���/ �� ��	�?X8�?X8
G\�"�@�@X8@@X8�?Y8�
���
�Y
w
�Y
w�/ ر"��0w�/ ��?Y8��Y�(����Y	g�Y��Y�@����Y
@X8�Y���
w�/ �
��/ ��h\� ���@h9�h\�h\� �� �h\��Q		g�Y� ����Y�
�Q
�Q� ��@X8��QW4��"��7h\	'��Yh\����	�?Y8'4��7�Y����?X8	G	�Y7�/ ر"�@�
7�/ �7�/ �Y\�"�@�W
�Yw�/ �
w�/ ر��
7�/ �Y\g4���"�@�'
�Yw�/ �
w�/ ��������Y�h\7��Y����g4���h\	��
�Y� ��'4����
�YWh\� ���?Y8	g�
�Y'�Y� �@�WY\��O �4����@�G�
�Yg��Y
��O �� ��'Y\'
�Y��L� �@�G��Y��O �
��O ر"�@�w�O �
w�O �w�O ���t�w�L	���\� @��(8
w�O �7�O رB��w�O � ��
7�O ر"�@�
w�O �7�O �w�O ز"���
7�O �

w�O ��4����Wh\	g�
�Y�4������G�
�Y�h\�
�Y����	���Yg4�����Y� �@����Y�h\�h\�`���h\'4��	'��Y�`��		��Y��Y7��Y� �`����Y7�Y���Y����'�
�Y��L��Y�@�t�L�w	���\� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Yw�Y���Y����?@�@�R`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�J`�w��\����W��L*�\'��K1 ��_*7�\?X8?X8��D�Q�W YL
G��\�`���Q
W��\��\�"���@h9� X\Ah8� ��@X8
@@X8�?X9� ��� X\�?X8�?X9� ����?X8W�Y�?Y8�"��	�?X8w�Y�?Y8� ��W�YWh\'�Y�"����YG�Y�@h9� ��@X8��Q�Y� �@�	��Y�� ��� ��"���� �G�Y@X8� �@�	��Y�� ��� ��B��
�� �	�?X8�� ر"�@��� �
�� ��� ��"�@�	�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Wh\� ����Q�Q�
�Q����w4��gh\�Y�b���?Y8W4��Wh\�`�����Y�h\'��Y�`�����Y74�����Y� ��'�N
��YgY\���@��h\'�O�Y\� ��4�����Y'�N�"��g�0['�N
7��Y�"��	��Y
���Y�?Y8� �����Y')N
�h\�`��'�?N'Y\
���Y�!�����Yg��\'Y\� ���\�)8	��Y����\���Y�w������L	��\�L� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���G�\��Q��Y������YW�Yg��Y����?@�@�b`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����W��Yw�Yg��Y����?@�@�Z`�g��\� ��	�W YL*��\*7�\����?X8?X8��Q�@���Q
g��\W��L=@��^
'��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�0	�@h9	Ah8��Y� �@���Y
	�	�Y	'�	�Y�*��g�YW�Yh\� ��'h\		�@h9���Y� ��G��Y�h\��Q� ���Qh\'��Y�`��G��Y�
�Q�Q����	�h\�h\���Y���
�?X8	@X8
��/0ر"�@���/0���/0�
��/0ر"���	��/0�	��/0���Y� �@�74�����Y�?Y8�"��
�h\�h\��
�Y� ���?Y8�Y\
'��Y� �@�7�Y�Y\
��Y� �@��	�YG�Y�Y\�B�@�g��Y�?X8w��Y�B����/0�@X8W�Y�B�@���/0�gh\wh\�B�@���/0�
��/0�	��/0ر�@���/0�G��Y
��/0ر$��	��/0����Y�Y\�`��	'�N'4��'h\�"�@�7h\�h\
�h\�"�@�'�Og��Yw�Y� �@�'�N���Y��Y�"�@���0[��Y��Y� �`�'�N	G��YW�
�Y�"�@�'�?N�	�Y��Y� �@�'�)N	���Y��Y� �@�7�\���Y'�Y�`��� �\�)8	��Y� ����Y7\��L��H		�w�w	���\� ���L	���\��L�@���L�(8 ��� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�g2@\6@����7��L�W�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����G\G�P	�0Y\� ��
���
��Q'�\�������Y���Y
	w�Y� �����Y		��Y���8�������YW\w�8����G\������l6�����@���i6�@������m6 ����m7���� ����Y� ����k[���� �� ���Y���Y����wH\G���[��k[�����\��K[!��P����'(\(8��8������<'G\G\���gG\ �����' �g�\���� �GH�'��� �GH �� ����W�P ����GX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Yw�Y���Y����?@�@�R`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�J`�w��\����W��L*�\'��K1 ��_*7�\?X8?X8��D�Q�W YL
G��\�`���Q
W��\��\�"���@h9� X\Ah8� ��@X8
@@X8�?X9� ��� X\�?X8�?X9� ����?X8W�Y�?Y8�"��	�?X8w�Y�?Y8� ��W�YWh\'�Y�"����YG�Y�@h9� ��@X8��Q�Y� �@�	��Y�� ��� ��"���� �G�Y@X8� �@�	��Y�� ��� ��B��
�� �	�?X8�� ر"�@��� �
�� ��� ��"�@�	�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Wh\� ����Q�Q�
�Q����w4��gh\�Y�b���?Y8W4��Wh\�`�����Y�h\'��Y�`�����Y74�����Y� ��'�N
��YgY\���@��h\'�O�Y\� ��4�����Y'�N�"��g�0['�N
7��Y�"��	��Y
���Y�?Y8� �����Y')N
�h\�`��'�?N'Y\
���Y�!�����Yg��\'Y\� ���\�)8	��Y����\���Y�w������L	��\�L� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���G�\��Q��Y������YW�Yg��Y����?@�@�b`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����W��Yw�Yg��Y����?@�@�Z`�g��\� ��	�W YL*��\*7�\����?X8?X8��Q�@���Q
g��\W��L=@��^
'��\�?X9��/0�����?X9
��/0�'��K����� X\� X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�0	�@h9	Ah8��Y� �@���Y
	�	�Y	'�	�Y�*��g�YW�Yh\� ��'h\		�@h9���Y� ��G��Y�h\��Q� ���Qh\'��Y�`��G��Y�
�Q�Q����	�h\�h\���Y���
�?X8	@X8
��/0ر"�@���/0���/0�
��/0ر"���	��/0�	��/0���Y� �@�74�����Y�?Y8�"��
�h\�h\��
�Y� ���?Y8�Y\
'��Y� �@�7�Y�Y\
��Y� �@��	�YG�Y�Y\�B�@�g��Y�?X8w��Y�B����/0�@X8W�Y�B�@���/0�gh\wh\�B�@���/0�
��/0�	��/0ر�@���/0�G��Y
��/0ر$��	��/0����Y�Y\�`��	'�N'4��'h\�"�@�7h\�h\
�h\�"�@�'�Og��Yw�Y� �@�'�N���Y��Y�"�@���0[��Y��Y� �`�'�N	G��YW�
�Y�"�@�'�?N�	�Y��Y� �@�'�)N	���Y��Y� �@�7�\���Y'�Y�`��� �\�)8	��Y� ����Y7\��L��H		�w�w	���\� ���L	���\��L�@���L�(8 ��� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�g2@\6@����7��L�W�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����G\G�P	�0Y\� ��
���
��Q'�\�������Y���Y
	w�Y� �����Y		��Y���8�������YW\w�8����G\������l6�����@���i6�@������m6 ����m7���� ����Y� ����k[���� �� ���Y���Y����wH\G���[��k[�����\��K[!��P����'(\(8��8������<'G\G\���gG\ �����' �g�\���� �GH�'��� �GH �� ����W�P ����GX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mK'0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Yw�Y���Y����?@�@�R`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�J`�w��\����W��L*�\'��K1 ��_*7�\?X8?X8��D�Q�W YL
G��\�`���Q
W��\��\�"���@h9� X\Ah8� ��@X8
@@X8�?X9� ��� X\�?X8�?X9� ����?X8W�Y�?Y8�"��	�?X8w�Y�?Y8� ��W�YWh\'�Y�"����YG�Y�@h9� ��@X8��Q�Y� �@�	��Y�� ��� ��"���� �G�Y@X8� �@�	��Y�� ��� ��B��
�� �	�?X8�� ر"�@��� �
�� ��� ��"�@�	�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Wh\� ����Q�Q�
�Q����w4��gh\�Y�b���?Y8W4��Wh\�`�����Y�h\'��Y�`�����Y74�����Y� ��'�N
��YgY\���@��h\'�O�Y\� ��4�����Y'�N�"��g�0['�N
7��Y�"��	��Y
���Y�?Y8� �����Y')N
�h\�`��'�?N'Y\
���Y�!�����Yg��\'Y\� ���\�)8	��Y����\���Y�w������L	��\�L� ���(8 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@����7��L�g�<�[2@����������0�����6�����6����
/@�70AL�!��P����,@�70AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �GH�'��� �GH �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7�N�mK'�0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Y���Yw�Y����?@�@�q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����g��Y���Yw�Y����?@�@�i`�w��\� ��W��L*�\	'�N1���*G�\
'�?N'��K����?X8?X8�Q�"��'�N
���\�Q�b�@�'�N'�O'�)N3 ��
'��\g�0[�)8� ��?�W YL	� �\�?X9���^w X\G�\��/ ����
�?X9��\
��/ ر"��w�/ �W X\�?X8� �@�	\��/ ��@h9� �@��?X8	�?X8Ah8�"��@X8@@X8�?Y8�
��
w
�Y�
�Y
w�/ ر"�@�0w�/ ��?Y8��Y� �@� ��Y��Y	g�Y�@����Y
@X8�Y���
w�/ �
��/ ��h\� ���@h9�h\�h\� ���h\��Q		g�Y� ����Y�
�Q
�Q� ��@X8��QW4��� �@�7h\	'��Yh\����	�?Y874��7�Y����?X8Y\7�/ ر"�@�
7�/ �7�/ �	G	�Y�b��Y\74��W
�Y�"�@�w�/ �
w�/ �
7�/ ��"�@�'
�Yw�/ �
w�/ ��������Y�h\7��Y����g4���h\	��
�Y� ��'4����
�YWh\� ���?Y8	g�
�Y'�Y� �@�WY\��O �4����@�G�
�Yg��Y
��O �� ��'Y\'
�Y��L� �@�G��Y��O �
��O ر"�@�w�O �
w�O �w�O ��B��
w�O �Ch8�L] �B�	���\7�O �w�O ��' �@� ��
7�O �
w�O ر"�@�7�O �w�O �
7�O ؿ���

w�O ���L�4���@�Wh\	g�
�Y�4������G�
�Y�h\�
�Y����	���Yg4�����Y�"���h\���Y�h\�`���h\'4��	'��Y�`��		��Y��Y7��Y�������Y�L7�Y�������Y'�
�Y��Y����?Ch8	���\ ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����7�\
��Y���Y����
	���Y

��Y	���Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��P7(\(8������8�<7G\���W\wG\ ����' ����w�\ �G�H����' �G�H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���W�\�Qg�Y����g��Yw�Y��Y����?@�@�Q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�I`�w��\����W��L*'�\'��K1 ��*7�\?X8�W YL����?X8�Q��\@��
��\�Q
g��\�"�@�@X8@@X8�@h9� ��
Ah8	� X\�?X9�"��_�?X8@X8� X\�"�@�	�?X8	G��Y	�?Y8�"���?X9@X8
�?X8�*��W��Y	gh\�?Y8�(��G��Y'��Y���Y� ���@h9�Y		�Q�"�@��� ��� ��� �� ��
��Y��Y
�?X8�����Y
���Y�� ر"�@��� �
�� ��� ��"�@��� ��� ��� ��"�@�
�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Gh\� ����Q�Q
�Q����g4��	gh\�Y���@��?Y8	�h\��Y� ��74����Y	Gh\�"��	gY\	�h\'�N� ��	'�O
��Y�Y���GY\�?Y84��� �@���Y'�N	�0[� �@����Y7��Y��Y� ��GY\��Y'�N� ��
���Y7h\'�)N� � �GY\�	�Y'�?N� ��g�Yg��\	'�\������Y�)8'��Y����'\��LCh8���?	G��\�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �GH����' �GH�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7�N�mKW�0[����cK�P���D<*�L*7�LG�P���G�\��Q��Y������YW��Yg��Y����?@�@�a`�g��\��D<*�L*G�LG�P���G�\��Qg��Y����g��YW��Yw��Y����?@�@�Y`�g��\� ��	�W YL*��\*'�\����?X8?X8��Q�@���Q
W��\W��L=@��^
��\�?X9
��/0�����?X9��/0�'��K����g X\w X\�?X8�"���?X8�?Y8�?Y8� �@�		��\	@X8	@@X8���@�	�@h9	Ah8�	�Y�*�@�	7�Y	�Y		�@h9�(�@��	�Yg	�Y7h\�(����Y���YW	�Y� ��Wh\Gh\�	�Q�(����Y���Y�h\� ���
�Q
�Q	@X8� �`���Q
�h\�h\�B����Y�?X8��/0ر"�@�
w�/0�w�/0�w�/0��"���	��/0�	w�/0���Y� �@�'4��
�h\
�?Y8�"���
�Y�Y�h\�"��G��Y
�Y\
�?Y8� �`�W�Y���Y
�Y\� �@���Y
'��Y
��
�Y� �@�
��Y�?X8@X8�"�@��Y\'h\
7�Y� �@����Y�/0�
w�/0رB�@��/0��/0�	�/0�� @��h\w�/0�w�/0رD��	w�/0��Y\���Y�"�B؟'�N'�?N	'�O�`����0['4���h\�"�@��h\
�h\�h\� �@�'�N��Y��Y�b����Y��YG��Y�"��W�Y'�N'�Y� ��7�Y'�\
�Y�"��
�Y'�)N
g�Y� ��
w�Y�)8�Y����
�Y� �\g�Y� ��G
�Y'\��L��HCh8Ch8	��\� ���L	���\��L�@��_�L �� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�g2@\����6@�7��L�W���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�G\G�P����?	�0Y\
���
��Q����7�\���Y
���Y����	���Y
w�Y		���Y� �����8��YW\����w�8
G\
����������l6�@�
��i6����@�
�m6 ����
���m7 �������Y
��k[���� ��
 ��Y�@����YwH\G���[������k[
�\��K[����!��P7(\(8������8��<7G\���G\gG\ ����' ����g�\ �G�H����' �G�H�@�D ���W�P��� �GX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���W�\�Qg�Y����g��Yw�Y��Y����?@�@�Q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�I`�w��\����W��L*'�\'��K1 ��*7�\?X8�W YL����?X8�Q��\@��
��\�Q
g��\�"�@�@X8@@X8�@h9� ��
Ah8	� X\�?X9�"��_�?X8@X8� X\�"�@�	�?X8	G��Y	�?Y8�"���?X9@X8
�?X8�*��W��Y	gh\�?Y8�(��G��Y'��Y���Y� ���@h9�Y		�Q�"�@��� ��� ��� �� ��
��Y��Y
�?X8�����Y
���Y�� ر"�@��� �
�� ��� ��"�@��� ��� ��� ��"�@�
�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Gh\� ����Q�Q
�Q����g4��	gh\�Y���@��?Y8	�h\��Y� ��74����Y	Gh\�"��	gY\	�h\'�N� ��	'�O
��Y�Y���GY\�?Y84��� �@���Y'�N	�0[� �@����Y7��Y��Y� ��GY\��Y'�N� ��
���Y7h\'�)N� � �GY\�	�Y'�?N� ��g�Yg��\	'�\������Y�)8'��Y����'\��LCh8���?	G��\�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �GH����' �GH�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7�N�mK'�0[����cK�P���D<*�L*7�LG�P���W�\�Qw�Y����'��Y���Yw�Y����?@�@�q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����g��Y���Yw�Y����?@�@�i`�w��\� ��W��L*�\	'�N1���*G�\
'�?N'��K����?X8?X8�Q�"��'�N
���\�Q�b�@�'�N'�O'�)N3 ��
'��\g�0[�)8� ��?�W YL	� �\�?X9���^w X\G�\��/ ����
�?X9��\
��/ ر"��w�/ �W X\�?X8� �@�	\��/ ��@h9� �@��?X8	�?X8Ah8�"��@X8@@X8�?Y8�
��
w
�Y�
�Y
w�/ ر"�@�0w�/ ��?Y8��Y� �@� ��Y��Y	g�Y�@����Y
@X8�Y���
w�/ �
��/ ��h\� ���@h9�h\�h\� ���h\��Q		g�Y� ����Y�
�Q
�Q� ��@X8��QW4��� �@�7h\	'��Yh\����	�?Y874��7�Y����?X8Y\7�/ ر"�@�
7�/ �7�/ �	G	�Y�b��Y\74��W
�Y�"�@�w�/ �
w�/ �
7�/ ��"�@�'
�Yw�/ �
w�/ ��������Y�h\7��Y����g4���h\	��
�Y� ��'4����
�YWh\� ���?Y8	g�
�Y'�Y� �@�WY\��O �4����@�G�
�Yg��Y
��O �� ��'Y\'
�Y��L� �@�G��Y��O �
��O ر"�@�w�O �
w�O �w�O ��B��
w�O �Ch8�L] �B�	���\7�O �w�O ��' �@� ��
7�O �
w�O ر"�@�7�O �w�O �
7�O ؿ���

w�O ���L�4���@�Wh\	g�
�Y�4������G�
�Y�h\�
�Y����	���Yg4�����Y�"���h\���Y�h\�`���h\'4��	'��Y�`��		��Y��Y7��Y�������Y�L7�Y�������Y'�
�Y��Y����?Ch8	���\ ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g���[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����7�\
��Y���Y����
	���Y

��Y	���Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��P7(\(8������8�<7G\���W\wG\ ����' ����w�\ �G�H����' �G�H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'NW0[7�O�GB��7N�mKW0[����cK�P���D<*�L*7�LG�P���W�\�Qg�Y����g��Yw�Y��Y����?@�@�Q`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�I`�w��\����W��L*'�\'��K1 ��*7�\?X8�W YL����?X8�Q��\@��
��\�Q
g��\�"�@�@X8@@X8�@h9� ��
Ah8	� X\�?X9�"��_�?X8@X8� X\�"�@�	�?X8	G��Y	�?Y8�"���?X9@X8
�?X8�*��W��Y	gh\�?Y8�(��G��Y'��Y���Y� ���@h9�Y		�Q�"�@��� ��� ��� �� ��
��Y��Y
�?X8�����Y
���Y�� ر"�@��� �
�� ��� ��"�@��� ��� ��� ��"�@�
�� ��� ��� ر"�@��� ��� ��� �� ��wh\gh\Gh\� ����Q�Q
�Q����g4��	gh\�Y���@��?Y8	�h\��Y� ��74����Y	Gh\�"��	gY\	�h\'�N� ��	'�O
��Y�Y���GY\�?Y84��� �@���Y'�N	�0[� �@����Y7��Y��Y� ��GY\��Y'�N� ��
���Y7h\'�)N� � �GY\�	�Y'�?N� ��g�Yg��\	'�\������Y�)8'��Y����'\��LCh8���?	G��\�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@�7��L�g�<�[� ��2@�����0�����6���������6�/@�70AL�����!��P,@�70AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �GH����' �GH�@�D ���W�P��� �WX\ �����@��P�P������L�P�P����P����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�L`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�C`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�5`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��?@�,`�7��YG��Y���7��YG��Yga0ر"@�g�0���0���0� ��'��L�)8�)8����'��8'(8G�[� ��?G�['�[��[�@��G�[G�[�
0[� ����['�['� [� ��'�
([��
0[��0[� �@�g��\'�\�)8����G�
�\7\4��� ������ ���	��� ���7�\�� ��
��W�\�a����g��\� ���

��� ��'��8'��8�� ��
���\'�8'(8� ��'(8'��8'(8� �@�����6���K'(8������6�����6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P������L�P�P����P����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�L`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�C`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�5`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��?@�,`�7��YG��Y���7��YG��Yga0ر"@�g�0���0���0� ��'��L�)8�)8����'��8'(8G�[� ��?G�['�[��[�@��G�[G�[�
0[� ����['�['� [� ��'�
([��
0[��0[� �@�g��\'�\�)8����G�
�\7\4��� ������ ���	��� ���7�\�� ��
��W�\�a����g��\� ���

��� ��'��8'��8�� ��
���\'�8'(8� ��'(8'��8'(8� �@�����6���K'(8������6�����6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P������L�P�P����P����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�L`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�C`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�5`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��?@�,`�7��YG��Y���7��YG��Yga0ر"@�
g�0���0��0� ��'��L�)8�)8����'��8'(8G�[� ��?G�['�[��[�@��G�[G�[�
0[� ����['�['� [� ��'�
([��
0[��0[� �@�g��\'�\�)8����G�
�\7\4��� ���

��������G�\� ��
���� � �
���� ��W��\�		�� ��
�
g�\�� ��'	�8'�	�8�� ��
w��\'�8'(8�@��'(8'��8'(8� �@�����6���K'(8�������6����6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P������L�P�P����P����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�L`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�C`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�5`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��?@�,`�7��YG��Y���7��YG��Yga0ر"@�
g�0���0��0� ��'��L�)8�)8����'��8'(8G�[� ��?G�['�[��[�@��G�[G�[�
0[� ����['�['� [� ��'�
([��
0[��0[� �@�g��\'�\�)8����G�
�\7\4��� ���

��������G�\� ��
���� � �
���� ��W��\�		�� ��
�
g�\�� ��'	�8'�	�8�� ��
w��\'�8'(8�@��'(8'��8'(8� �@�����6���K'(8�������6����6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@G`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@>`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@0`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��?@'`�7��YG��Y�@�@�
7��Y
��/ �G��Y�"�@�	
��/ ���/ �
��/ ر"�@���O �
��O ���O ر��
��O �'��L�)8� ����8(8�)8� ��'�['�[�[� ���[
�[�[� ��'� [�['�	([� ���[
��0[w�
0[����'��\�0[
��\���'�\�)8�\� ��@�6'\4��� ��

��		����� ��������� ����	���\��L� � �g��\��	'��8�`��	�L��L'��8���'(8�L ���`��'(8 ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@`�7��Y
7��Y���G��Y	G��Y�� ر"�@�
�� ��� �
�� ����'��L�)8
(8������8�)8��[� ��?��[
'�['�[� ��'� [	��[
��[� ����[
'�([��[� ����0[	�0[
� �\� ����0[���\w�\� ���)8'\����K�!������������g��\��'��8�@���'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@D`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@;`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@-`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@$`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0����'��L�)8��8����(8�)8'�[� ��'�[�[�[� ��_'� [�[�[� ���[
'�	([�[�@��w�
0[�0[

' 
�\� ���0[
���\'�\�����)8
�\
��6�����\��L

��� ��������� ��		�������`������\���\� ��
�L��L'�8� ��'��8
�L'(8�@�� ��'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@`�7��Y
7��Y���G��Y	G��Y�� ر"�@�
�� ��� �
�� ����'��L�)8
(8������8�)8��[� ��?��[
'�['�[� ��'� [	��[
��[� ����[
'�([��[� ����0[	�0[
� �\� ����0[���\w�\� ���)8'\����K�!������������g��\��'��8�@���'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@H`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@?`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@1`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@(`�7��Y
7��Y���G��YG��Y7�/0ر"�@�

7�/0���/0�
��/0����'��L�)8��8����(8�)8'�[� ��'�[�[�[� ��
�[�['� [� ���['�	([�[�@��
��0[w�
0[7 	�\� ���0[
7�\'�\�����)87\��� ��

����		��� ���������`������\���\� ��'�8'��8'\� ��'(8'(8@�6�`����L��	��� ���L�(8��L�@��		�(8gG\�L�@�� ��		G\	 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@>`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@5`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@'`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� ����'��L�)8��8����(8�)8
'�[� ����[��[��[� ����['�['� [� ��
'�([0[�0[� ����[��[
���\� ���0[g��\��� ����		��

�����@�	7�\'�\'�8� ���)8'(8W\���@�������K�(8�@����G\ ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�H`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�?`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�1`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��?@�(`�7��YG��Y�@�@�
7��Y
��/ �G��Y�"�@�	
��/ ���/ �
��/ ر"�@���O �
��O ���O ر��
��O �'��L�)8� ����8(8�)8� ��'�['�[�[� ���[
�[�[� ��'� [�['�	([� ���[
��0[w�
0[����'��\�0[
��\���'�\�)8�\� ��@�6'\4��� ��

�		��� ������ ������\�� ��W�\'��8��L� ��'�8'(8	�L�`����L'(8�6� ���L ���6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�`�7��Y7��Y���G��Y	G��YW� ر"�@�W� �
�� ��� ����'��L�)8
(8� ����8��[��[� ��?
'�[�)8'�[� ��'� ['�([	��[� ����[��[��[� ��W �\'�0[	�0[� ����0[���\w�\� ���)8'\����K�!����
������G�\'��8����'(8��6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@E`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@<`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@.`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@%`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0����'��L�)8��8����(8�)8'�[� ��'�[�[�[� ��_'� [�[�[� ���[
'�	([�[�@��w�
0[�0[

' 
�\� ���0[
���\'�\�����)8
�\
��6�����\��L

�� ������ ��		����`�����\���\� ��'�8'��8
�L�`����L'(8'(8�@��
�L�6 ��� ���6 ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�`�7��Y7��Y���G��Y	G��YW� ر"�@�W� �
�� ��� ����'��L�)8
(8� ����8��[��[� ��?
'�[�)8'�[� ��'� ['�([	��[� ����[��[��[� ��W �\'�0[	�0[� ����0[���\w�\� ���)8'\����K�!����
������G�\'��8����'(8��6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�H`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�?`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�1`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��?@�(`�7��YG��Y�@�@�
7��Y
��/ �G��Y�"�@�	
��/ ���/ �
��/ ر"�@���O �
��O ���O ر��
��O �'��L�)8� ����8(8�)8� ��'�['�[�[� ���[
�[�[� ��'� [�['�	([� ���[
��0[w�
0[����'��\�0[
��\���'�\�)8�\� ��@�6'\4��� ��

�		��� ������ ������\�� ��W�\'��8��L� ��'�8'(8	�L�`����L'(8�6� ���L ���6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�`�7��Y7��Y���G��Y	G��YW� ر"�@�W� �
�� ��� ����'��L�)8
(8� ����8��[��[� ��?
'�[�)8'�[� ��'� ['�([	��[� ����[��[��[� ��W �\'�0[	�0[� ����0[���\w�\� ���)8'\����K�!����
������G�\'��8����'(8��6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�D`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�;`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�-`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�$`�
7��Y7��Y���G��Y
G��Y	
��/ ر"�@���/ �
��/ ���/ ر"@�
��O ���O �
��O ر����O �'��L�)8� ��?'(8'��8�)8� ���[�['�[� ���
'�['� [
��[����'([G�	0[
���\� ��G4��		����� ����	��\��[� ����[��[��� ��������� ���0[�0[���@��G�\	'��8	���\�"��'�\�)8'��8� ��'(8�\���K� ��'(8���6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@`�7��Y
7��Y���G��Y	G��Y�� ر"�@�
�� ��� �
�� ����'��L�)8
(8������8�)8��[� ��?��[
'�['�[� ��'� [	��[
��[� ����[
'�([��[� ����0[	�0[
� �\� ����0[���\w�\� ���)8'\����K�!������������g��\��'��8�@���'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@A`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@8`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@*`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@!`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0����'��L�)8'��8����'(8�)8'�[� ��'�[�[�[� ��_'� [�[�[� ���[
'�	([�[�@��w�
0[�0[

' 
�\� ���0[
���\'�\� ���)8
�\���K�!��

������� ����		�����@��������\�@�����\'�8'��8� ��'(8'(8��� ��6 ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@`�7��Y
7��Y���G��Y	G��Y�� ر"�@�
�� ��� �
�� ����'��L�)8
(8������8�)8��[� ��?��[
'�['�[� ��'� [	��[
��[� ����[
'�([��[� ����0[	�0[
� �\� ����0[���\w�\� ���)8'\����K�!������������g��\��'��8�@���'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@E`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@<`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@.`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@%`�7��Y
7��Y���G��YG��Y
7�/0��"�@�
7�/0���/0�
��/0����'��L�)8'��8����'(8�)8'�[� ��'�[�[�[� ��
�[�['� [� ���['�	([�[�@��
��0[w�
0[7 	�\� ���0[
7�\'�\�����)87\

��� ��������� ��		�������@������\���\����'�8'��8'(8�@��'(8����� ���(8�(8���K� ��7G\WG\�� ��6 ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@>`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@5`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@'`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� ����'��L�)8��8����(8�)8
'�[� ����[��[��[� ����['�['� [� ��
'�([0[�0[� ����[��[
���\� ���0[g��\��� ����		��

�����@�	7�\'�\'�8� ���)8'(8W\���@�������K�(8�@����G\ ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@F`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@=`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@/`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@&`�
7��Y7��Y���G��Y
G��Y	
��/ ر"�@���/ �
��/ ���/ ر"@�
��O ���O �
��O ر����O �'��L�)8����'��8'(8'�[� ��?�[�[�[� ���[
'�['� [� ��'�	([�)8

0[� ���0[� 	�\
�[� ��4��		��� ������ ���	��\�[� � ���G�\� �@��0['�\�)8� ��		'��8��\'��8� ��		'(8g\'(8�������K		�6�6�@����6 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�`�7��Y7��Y���G��Y	G��YW� ر"�@�W� �
�� ��� ����'��L�)8
(8� ����8��[��[� ��?
'�[�)8'�[� ��'� ['�([	��[� ����[��[��[� ��W �\'�0[	�0[� ����0[���\w�\� ���)8'\����K�!����
������G�\'��8����'(8��6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�B`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�9`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�+`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�"`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0����'��L�)8'��8����'(8�)8'�[� ��'�[�[�[� ��_'� [�[�[� ���[
'�	([�[�@��w�
0[�0[

' 
�\� ���0[
���\'�\� ���)8
�\���K�!��

���� ���		���@������\�@�����\'�8'��8����'(8'(8�6�����6��6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�`�7��Y7��Y���G��Y	G��YW� ر"�@�W� �
�� ��� ����'��L�)8
(8� ����8��[��[� ��?
'�[�)8'�[� ��'� ['�([	��[� ����[��[��[� ��W �\'�0[	�0[� ����0[���\w�\� ���)8'\����K�!����
������G�\'��8����'(8��6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@F`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@=`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@/`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@&`�
7��Y7��Y���G��Y
G��Y	
��/ ر"�@���/ �
��/ ���/ ر"@�
��O ���O �
��O ر����O �'��L�)8����'��8'(8'�[� ��?�[�[�[� ���[
'�['� [� ��'�	([�)8

0[� ���0[� 	�\
�[� ��4��		��� ������ ���	��\�[� � ���G�\� �@��0['�\�)8� ��		'��8��\'��8� ��		'(8g\'(8�������K		�6�6�@����6 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�`�7��Y7��Y���G��Y	G��YW� ر"�@�W� �
�� ��� ����'��L�)8
(8� ����8��[��[� ��?
'�[�)8'�[� ��'� ['�([	��[� ����[��[��[� ��W �\'�0[	�0[� ����0[���\w�\� ���)8'\����K�!����
������G�\'��8����'(8��6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@F`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@=`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@/`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@&`�
7��Y7��Y���G��Y
G��Y	
��/ ر"�@���/ �
��/ ���/ ر"@�
��O ���O �
��O ر����O �'��L�)8����'��8'(8'�[� ��?�[�[�[� ���[
'�['� [� ��'�	([�)8

0[� ���0[� 	�\
�[� ��4��		����� ��������� ����	��\�[� � �����G�\� �@��0['�\�)8� ��		'��8��\'��8� ��		'(8g\'(8�������K		���@����6 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�`�7��Y7��Y���G��Y	G��YW� ر"�@�W� �
�� ��� ����'��L�)8
(8� ����8��[��[� ��?
'�[�)8'�[� ��'� ['�([	��[� ����[��[��[� ��W �\'�0[	�0[� ����0[���\w�\� ���)8'\����K�!������
��������G�\'��8����'(8����� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�B`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�9`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�+`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�"`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0����'��L�)8'��8����'(8�)8'�[� ��'�[�[�[� ��_'� [�[�[� ���[
'�	([�[�@��w�
0[�0[

' 
�\� ���0[
���\'�\� ���)8
�\���K�!��

������� ����		�����@��������\�@�����\'�8'��8����'(8'(8��������6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�`�7��Y7��Y���G��Y	G��YW� ر"�@�W� �
�� ��� ����'��L�)8
(8� ����8��[��[� ��?
'�[�)8'�[� ��'� ['�([	��[� ����[��[��[� ��W �\'�0[	�0[� ����0[���\w�\� ���)8'\����K�!������
��������G�\'��8����'(8����� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@A`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@8`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@*`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@!`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0����'��L�)8'��8����'(8�)8'�[� ��'�[�[�[� ��_'� [�[�[� ���[
'�	([�[�@��w�
0[�0[

' 
�\� ���0[
���\'�\� ���)8
�\���K�!��

������� ����		�����@��������\�@�����\'�8'��8� ��'(8'(8��� ��6 ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@<`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@3`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@%`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@`�7��Y
7��Y���G��Y	G��Y�� ر"�@�
�� ��� �
�� ����'��L�)8
(8������8�)8��[� ��?��[
'�['�[� ��'� [	��[
��[� ����[
'�([��[� ����0[	�0[
� �\� ����0[���\w�\� ���)8'\����K�!������������g��\��'��8�@���'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@G`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@>`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@0`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@'`�
7��Y7��Y���G��Y
G��Y	
��/ ر"�@���/ �
��/ ���/ ر"@�
��O ���O �
��O ر����O �'��L�)8����?'@�8'(8'�[� ��
w�[�[�[� ��'([�)8
'�[����'� [G�	0[
� �\� ��74��		��������	��\� � �w�[��� ����0[
w�[� ��w�[G�\��@��		'��8w�0['��8�B��'�\�)8���\����		'(8'(8W\� ��		�6�6���K� ��		����� ���6 ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@=`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@4`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@&`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@`�7��Y7��Y���G��YG��YW� ر"6@�
W� �	g� �
g� ����'��L�)8��8����
(8�)8'�[� ����[��[��[� ����['�[
'� [� ��'�([��[��[�@��0[w�0[���\� ���0[W�\�� ���		�

�� �@�	7�\'�\�)8����'�8W\'(8� ������K�6��� ��� ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@D`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@;`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@-`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@$`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0����'��L�)8'��8����'(8�)8'�[� ��'�[�[�[� ��_'� [�[�[� ���[
'�	([�[�@��w�
0[�0[

' 
�\� ���0[
���\'�\� ���)8
�\���K�!��

���� ���		���@������\�@�����\'�8'��8����'(8'(8�6�@���6���@����6 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@=`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@4`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@&`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@`�7��Y7��Y���G��YG��YW� ر"6@�
W� �	g� �
g� ����'��L�)8��8����
(8�)8'�[� ����[��[��[� ����['�[
'� [� ��'�([��[��[�@��0[w�0[���\� ���0[W�\�� ���		�

�� �@�	7�\'�\�)8����'�8W\'(8� ������K�6��� ��� ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@G`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@>`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@0`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@'`�
7��Y7��Y���G��Y
G��Y	
��/ ر"�@���/ �
��/ ���/ ر"@�
��O ���O �
��O ر����O �'��L�)8����?'@�8'(8'�[� ��
w�[�[�[� ��'([�)8
'�[����'� [G�	0[
� �\� ��74��		��������	��\� � �w�[��� ����0[
w�[� ��w�[G�\��@��		'��8w�0['��8�B��'�\�)8���\����		'(8'(8W\� ��		�6�6���K� ��		����� ���6 ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@=`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@4`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@&`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@`�7��Y7��Y���G��YG��YW� ر"6@�
W� �	g� �
g� ����'��L�)8��8����
(8�)8'�[� ����[��[��[� ����['�[
'� [� ��'�([��[��[�@��0[w�0[���\� ���0[W�\�� ���		�

�� �@�	7�\'�\�)8����'�8W\'(8� ������K�6��� ��� ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@>`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@5`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@'`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y
7��Y���G��YG��Y	��/ ر"�@�
��/ �
��/ �
��/ ر"�@���O �
��O ���O ؿ$��
��O �'�N'�N�"�@�'�O'�N'�?N����'�)N�0['�	�\����?�\�)8
\�`����L4��		��� ����
����� ��������� �����\	
�L��L� � �g��\��'�8� ��
�L'��8�(8�@�� ���(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!������		��� ��

��	7�\�L�@��'�8�(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�:`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�1`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�#`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@�`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0�� �@�'�N'�O'�N�b�@�'�N'�?N�0[� ��'�)N�\'�	�\� ���)8\��L�!��

������� ����		����� ��������\�"���L��L���\� ��'�8�L'��8� ���(8 ���(8��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!������		��� ��

��	7�\�L�@��'�8�(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�:`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�1`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�#`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@�`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0�� �@�'�N'�O'�N�b�@�'�N'�?N�0[� ��'�)N�\'�	�\� ���)8\��L�!��

������� ����		����� ��������\�"���L��L���\� ��'�8�L'��8� ���(8 ���(8��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!������		��� ��

��	7�\�L�@��'�8�(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@>`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@5`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@'`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y
7��Y���G��YG��Y	��/ ر"�@�
��/ �
��/ �
��/ ر"�@���O �
��O ���O ؿ$��
��O �'�N'�N�"�@�'�O'�N'�?N����'�)N�0['�	�\����?�\�)8
\�`����L4��		�� ���
��� ������ �����\	
�L��L� � �g��\�'�8� ��
�L'��8'(8�@�� ��'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!����		�� ��

�	7�\�L�@��'�8'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�:`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�1`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�#`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@�`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0�� �@�'�N'�O'�N�b�@�'�N'�?N�0[� ��'�)N�\'�	�\� ���)8\��L�!��

���� ���		��� ������\�"���L��L���\� ��'�8�L'��8� ��'(8 ��'(8��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!����		�� ��

�	7�\�L�@��'�8'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@>`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@5`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@'`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y
7��Y���G��YG��Y	��/ ر"�@�
��/ �
��/ �
��/ ر"�@���O �
��O ���O ؿ$��
��O �'�N'�N�"�@�'�O'�N'�?N����'�)N�0['�	�\����?�\�)8
\�`����L4��		�� ���
��� ������ �����\	
�L��L� � �g��\�'�8� ��
�L'��8'(8�@�� ��'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!����		�� ��

�	7�\�L�@��'�8'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�D`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�;`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�-`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�$`�
7��Y7��Y���G��Y
G��Y	
��/ ر"�@���/ �
��/ ���/ ر"@�
��O ���O �
��O ر����O �'��L�)8� ��?(8��8�)8� ���[�['�[� ���
'�['� [
��[����'([G�	0[
���\� ��G4��		����� ����	��\��[� ����[��[��� ��������� ���0[�0[���@��G�\	'��8	���\�"��'�\�)8'��8� ���(8�\����K� ���(8������6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!������		��� ��

��	7�\�L�@��'�8�(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@A`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@8`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@*`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@!`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0����'��L�)8��8����(8�)8'�[� ��'�[�[�[� ��_'� [�[�[� ���[
'�	([�[�@��w�
0[�0[

' 
�\� ���0[
���\'�\� ���)8
�\����K�!��

������� ����		�����@��������\�@�����\'�8'��8� ���(8�(8��� �����6 ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!������		��� ��

��	7�\�L�@��'�8�(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@A`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@8`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@*`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@!`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0����'��L�)8��8����(8�)8'�[� ��'�[�[�[� ��_'� [�[�[� ���[
'�	([�[�@��w�
0[�0[

' 
�\� ���0[
���\'�\� ���)8
�\����K�!��

������� ����		�����@��������\�@�����\'�8'��8� ���(8�(8��� �����6 ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!������		��� ��

��	7�\�L�@��'�8�(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�D`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�;`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�-`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�$`�
7��Y7��Y���G��Y
G��Y	
��/ ر"�@���/ �
��/ ���/ ر"@�
��O ���O �
��O ر����O �'��L�)8� ��?(8��8�)8� ���[�['�[� ���
'�['� [
��[����'([G�	0[
���\� ��G4��		��� ���	��\��[� ����[��[�� ������ ���0[�0[��@��G�\	'��8	���\�"��'�\�)8'��8� ��'(8�\����K� ��'(8������6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!����		�� ��

�	7�\�L�@��'�8'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@A`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@8`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@*`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@!`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0����'��L�)8��8����(8�)8'�[� ��'�[�[�[� ��_'� [�[�[� ���[
'�	([�[�@��w�
0[�0[

' 
�\� ���0[
���\'�\� ���)8
�\����K�!��

���� ���		���@������\�@�����\'�8'��8� ��'(8'(8��� �����6 ���� ��	w�8	����
w�8������h6����� h6�������\@�
���0����
���0��2@\6@������L�ǀ<�[2@������
����0�����6�
����6����
/@�
0AL�!��P����,@�0AL�!��P� ��(@���m[��m[� �����\����

���2����	���2	
	�����

�\
G�P
�0Y\� �������Q
��\�����Y
��Y�Y������Y���8�Y����	��Y�\
	w�8����
�\
����
��l6�����@���i6�@������m6 ����m7����		 �
��Y� ����k[

���
� ��
 
��Y��Y����
�H\
����[
��k[����
�\

��	K[!��P����
�(\
(8��8�����<�G\�\���	�G\ �		���		�' �	��\���� �	'H		�'��� �	'H �� ��	��		W�P ����	
�X\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!����		�� ��

�	7�\�L�@��'�8'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�D`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�;`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�-`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ��@�$`�
7��Y7��Y���G��Y
G��Y	
��/ ر"�@���/ �
��/ ���/ ر"@�
��O ���O �
��O ر����O �'��L�)8� ��?(8��8�)8� ���[�['�[� ���
'�['� [
��[����'([G�	0[
���\� ��G4��		��� ���	��\��[� ����[��[�� ������ ���0[�0[��@��G�\	'��8	���\�"��'�\�)8'��8� ��'(8�\����K� ��'(8������6��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!����		�� ��

�	7�\�L�@��'�8'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@>`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@5`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@'`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y
7��Y���G��YG��Y	��/ ر"�@�
��/ �
��/ �
��/ ر"�@���O �
��O ���O ؿ$��
��O �'�N'�N�"�@�'�O'�N'�?N����'�)N�0['�	�\����?�\�)8
\�`����L4��		��� ����
����� ��������� �����\	
�L��L� � �g��\��'�8� ��
�L'��8�(8�@�� ���(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!������		��� ��

��	7�\�L�@��'�8�(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�:`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�1`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�#`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@�`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0�� �@�'�N'�O'�N�b�@�'�N'�?N�0[� ��'�)N�\'�	�\� ���)8\��L�!��

������� ����		����� ��������\�"���L��L���\� ��'�8�L'��8� ���(8 ���(8��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!������		��� ��

��	7�\�L�@��'�8�(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�:`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�1`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�#`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@�`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0�� �@�'�N'�O'�N�b�@�'�N'�?N�0[� ��'�)N�\'�	�\� ���)8\��L�!��

������� ����		����� ��������\�"���L��L���\� ��'�8�L'��8� ���(8 ���(8��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!������		��� ��

��	7�\�L�@��'�8�(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@>`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@5`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@'`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y
7��Y���G��YG��Y	��/ ر"�@�
��/ �
��/ �
��/ ر"�@���O �
��O ���O ؿ$��
��O �'�N'�N�"�@�'�O'�N'�?N����'�)N�0['�	�\����?�\�)8
\�`����L4��		�� ���
��� ������ �����\	
�L��L� � �g��\�'�8� ��
�L'��8'(8�@�� ��'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!����		�� ��

�	7�\�L�@��'�8'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@�:`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@�1`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@�#`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@�`�7��Y
7��Y���G��YG��Y
7�/0ر"@�
7�/0���/0�
��/0�� �@�'�N'�O'�N�b�@�'�N'�?N�0[� ��'�)N�\'�	�\� ���)8\��L�!��

���� ���		��� ������\�"���L��L���\� ��'�8�L'��8� ��'(8 ��'(8��� ���	w�8����	����
w�8��h6��������� h6���\� ��@�
���0
���0������2@\6@���L�����ǀ<�[2@�
����0��������6�
����6�/@������
0AL�!��P,@������0AL�!��P(@�������m[��m[���\� ������

���2	���2����	
	�

�\ ��
G�P
�0Y\���������Q
��\�Y����
��Y�Y��Y�������8�Y	��Y�����\
	w�8
�\����
����
��l6@�������i6�@��m6���� ����m7		� �� �
��Y��k[����

���

 ����
��Y��Y
�H\� ��
����[
��k[
�\� ��

��	K[!��P
�(\����
(8��8�<�����G\�\	�G\���� �				�'��� �	��\ ���	'H		�' ����	'H �	����		W�P �	
�X\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!����		�� ��

�	7�\�L�@��'�8'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@>`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@5`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@'`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y
7��Y���G��YG��Y	��/ ر"�@�
��/ �
��/ �
��/ ر"�@���O �
��O ���O ؿ$��
��O �'�N'�N�"�@�'�O'�N'�?N����'�)N�0['�	�\����?�\�)8
\�`����L4��		�� ���
��� ������ �����\	
�L��L� � �g��\�'�8� ��
�L'��8'(8�@�� ��'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y� ���@�
G��\W��\���D@4`����\*�L= ��_*G�LG�Pg�\�����Qw�YG��Y���	w��Y���Y@�����
W��\g��\@+`��D���\�?X9*�\� ���?X9?l8?l8��P?X8?X8G�P���A���\��Qg��Y���*'�\
W��Y��Y���?X8��Y@�����
���\���\@`�@�@P���\?X8	G�P�����\
��Q
	���Y����	���Y���Y	
���Y� ��@�
w��\���\� ���@`�7��Y7��Y���G��YG��YW� ر"�@�W� �	g� �
g� �� �@�
'�N'�N'�O�B�@�'�N'�?N
'�)N�@����0[

���\�\� ���)8
\��L�!����		�� ��

�	7�\�L�@��'�8'(8 ������	w�8	����� ��
w�8��h6�������� h6���\@�����
���0
���0��2@\����6@���L�ǀ<�[� ��2@�
����0�����6�����
����6�/@�
0AL�����!��P,@�0AL�����!��P(@���m[� ����m[���\��������

���2	���2	�@�P
	�

�\
G�P����?
�0Y\�����Q����
��\�Y
��Y� ���Y��Y���8� ���Y	��Y�\����
	w�8
�\
��������
��l6�@���i6����@��m6 �������m7		 �����
��Y��k[

���� ��

 
��Y�@����Y
�H\
����[����
��k[
�\

��	K[����!��P
�(\
(8������8�<�G\����\	�G\ ���				�' ����	��\ �	'H���		�' �	'H�@�D �	��		W�P��� �	
�X\ �����@��P�P������L�P�P����P����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�!`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W@0��)8� ��	'(8'��8�)8� ����[��[	'�[� ����[
��[��[� ����[
'�['� [� ��'�([
G0[��0[�`��	0[� �\7�\�����)8
���\'\������K����6�@����6���6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P������L�P�P����P����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�!`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W@0��)8� ��	'(8'��8�)8� ����[��[	'�[� ����[
��[��[� ����[
'�['� [� ��'�([
G0[��0[�`��	0[� �\7�\�����)8
���\'\������K����6�@����6���6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P������L�P�P����P����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�!`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W@0��)8� ��	'(8'��8�)8� ����[��[	'�[� ����[
��[��[� ����[
'�['� [� ��'�([
G0[��0[�`��	0[� �\7�\�����)8
���\'\������K����6�@����6���6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P������L�P�P����P����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�!`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W@0��)8� ��	'(8'��8�)8� ����[��[	'�[� ����[
��[��[� ����[
'�['� [� ��'�([
G0[��0[�`��	0[� �\7�\�����)8
���\'\������K����6�@����6���6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�#`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\����'��L*'�\�)81 ��_*�\?X8?X8�@��gh\7h\g�/ ؿ$��g�O ���8	(8� ���)8'�[
'�[�&��
��[��[��[� ��'� [��[	��[�`�@�'�([��[0[� ����0[� �\��0[� �@�W�\'�\�)8����
'\W\
��L� �@�@�6�L

��L� � ��L ��
 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�"`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W�/0��)8� ��(8��8�)8�&��w�[w�['�[�&���	��[
��[��[� �����['�['� [� �@�
'�([�0[0[�`����0[
g �\7�\�`���)8g��\7\�`��'\��L@�6����L��L�L�!�� �� ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�&`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W�/0��)8� ����8(8�)8� ��	'�[
w�[w�[�@����[��['�([� ����[	��[
'�[� ��'� [0[��0[������0[���\w�\�����)87�\w\�@��'\��L@�6�����L��L�L����	����		�(8�@���(8		GG\WG\� ��	 �� ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�!`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K����@���(8WG\ ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@&`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\����'��L*'�\�)81 ��_*�\?X8?X8�@��gh\7h\g�/ ؿ$��g�O ���8	(8� ���)8'�[
'�[�&��
��[��[��[� ��'� [��[	��[�`�@�'�([��[0[� ����0[� �\��0[� �@�W�\'�\�)8����'\W\��L� �@�@�6�L��L� � �	�L������6�6 ����� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@� `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K��� ���6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�%`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W�/0��)8� ��(8��8�)8�&��w�[w�['�[�&���	��[
��[��[� �����['�['� [� �@�
'�([�0[0[�`����0[
g �\7�\�`���)8g��\7\�`��'\��L@�6����L��L�L�������6�@���6 �� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@� `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K��� ���6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@&`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\����'��L*'�\�)81 ��_*�\?X8?X8�@��gh\7h\g�/ ؿ$��g�O ���8	(8� ���)8'�[
'�[�&��
��[��[��[� ��'� [��[	��[�`�@�'�([��[0[� ����0[� �\��0[� �@�W�\'�\�)8����'\W\��L� �@�@�6�L��L� � �	�L������6�6 ����� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@� `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K��� ���6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@� `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\����'��L*'�\�)81 ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �	'(8'��8� ���)8��[
��[� ��	'�[
'�[��[�@����[��['�([� ��	��['� [0[� ���0[��0[���\�"��7�\W��\�)8���@�'\���K��!��6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@ `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W�/0��)8� ��'(8'��8�)8� ��w�[w�['�[� ��	��[
��[��[� ����['�['� [� ��
'�([0[��0[�`���0[
g �\7�\�����)8g��\'\������K�6��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@$`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W�/0��)8� ��'(8'��8�)8� ��w�[w�['�[� ��	��[
��[��[� ����['�['� [� ��
'�([0[��0[�`���0[
g �\7�\�����)8g��\'\������K���� �����(8�(8����GG\WG\6��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�!`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K����@���(8WG\ ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�#`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\����'��L*'�\�)81 ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �	'(8'��8� ���)8��[
��[� ��	'�[
'�[��[�@����[��['�([� ��	��['� [0[� ���0[��0[���\�"��7�\W��\�)8���@�'\���K��������6�@���66 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@� `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K��� ���6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�"`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W�/0��)8� ��'(8'��8�)8� ��w�[w�['�[� ��	��[
��[��[� ����['�['� [� ��
'�([0[��0[�`���0[
g �\7�\�����)8g��\'\������K��� ����6�6� ��6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@� `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K��� ���6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�#`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\����'��L*'�\�)81 ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �	'(8'��8� ���)8��[
��[� ��	'�[
'�[��[�@����[��['�([� ��	��['� [0[� ���0[��0[���\�"��7�\W��\�)8���@�'\���K��������6�@���66 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@� `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K��� ���6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@"`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\����'��L*'�\�)81 ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �	'(8'��8� ���)8��[
��[� ��	'�[
'�[��[�@����[��['�([� ��	��['� [0[� ���0[��0[���\�"��7�\W��\�)8���@�'\���K�������6��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@ `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K����� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�!`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W�/0��)8� ��'(8'��8�)8� ��w�[w�['�[� ��	��[
��[��[� ����['�['� [� ��
'�([0[��0[�`���0[
g �\7�\�����)8g��\'\������K���@���6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@ `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K����� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@ `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W�/0��)8� ��'(8'��8�)8� ��w�[w�['�[� ��	��[
��[��[� ����['�['� [� ��
'�([0[��0[�`���0[
g �\7�\�����)8g��\'\������K�6��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��(8@�8�)8� ����[
��['�[� ��w�[w�[w�[� ��	'�['([w�[� ��
'� [�0[�0[������0[� �\7�\����G�\�)8'\�������K� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�$`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\����'��L*'�\�)81 ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �	'(8'��8�"���)8��[
��[�&��	'�[
'�[��[�$����[��['� [�@�@�'�([	��[0[� ���0[���\��0[� �@�W��\7�\�)8����'\���K��������6� ���6��� ��6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�!`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��@�8(8�)8� ��	'�[
'�[��[� ��
��[w�[w�[� ��?'� [w�['�([� ��w�[�0[�0[�������\w0[W�\���'�\�)8W\��� �����K��6�@����� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@$`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W�/0��)8� ��'(8'��8�)8�&��w�[w�['�[�&���	��[
��[��[� �����['�['� [� �@�
'�([�0[0[�`����0[
g �\7�\�����)8g��\'\������K��� ����6�6������6��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�!`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��@�8(8�)8� ��	'�[
'�[��[� ��
��[w�[w�[� ��?'� [w�['�([� ��w�[�0[�0[�������\w0[W�\���'�\�)8W\��� �����K��6�@����� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�$`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\����'��L*'�\�)81 ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �	'(8'��8�"���)8��[
��[�&��	'�[
'�[��[�$����[��['� [�@�@�'�([	��[0[� ���0[���\��0[� �@�W��\7�\�)8����'\���K��������6� ���6��� ��6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�!`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*�\*'�\����?X8?X87h\����gh\W� ��)8� ��@�8(8�)8� ��	'�[
'�[��[� ��
��[w�[w�[� ��?'� [w�['�([� ��w�[�0[�0[�������\w0[W�\���'�\�)8W\��� �����K��6�@����� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��	'�O*'�\'�N= ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �'�N
'�N�b�@�'�?N')N	�0[� ��
���\	�\�)8���@�?
\��L�L�����L�L�(8�@���(8 �� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L�(8��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*'�\'�O= ��_*�\?X8?X8�@��gh\7h\W�/0�� �@�'�N'�N	'�?N�b��'�)N��0[g��\�����\�)8\�"�@���L�L��L�@� ��L�(8�(8� �� �� ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L�(8��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*'�\'�O= ��_*�\?X8?X8�@��gh\7h\W�/0�� �@�'�N'�N	'�?N�b��'�)N��0[g��\�����\�)8\�"�@���L�L��L�@� ��L�(8�(8� �� �� ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L�(8��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��	'�O*'�\'�N= ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �'�N
'�N�b�@�'�?N')N	�0[� ��
���\	�\�)8���@�?
\��L�L�����L�L ����� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*'�\'�O= ��_*�\?X8?X8�@��gh\7h\W�/0�� �@�'�N'�N	'�?N�b��'�)N��0[g��\�����\�)8\�"�@���L�L��L� � ��L �� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��	'�O*'�\'�N= ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �'�N
'�N�b�@�'�?N')N	�0[� ��
���\	�\�)8���@�?
\��L�L�����L�L ����� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@"`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\����'��L*'�\�)81 ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �	(8��8� ���)8��[
��[� ��	'�[
'�[��[�@����[��['�([� ��	��['� [0[� ���0[��0[���\�"��7�\W��\�)8���@�'\����K������(8�(8���6��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L�(8��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�!`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W�/0��)8� ��(8��8�)8� ��w�[w�['�[� ��	��[
��[��[� ����['�['� [� ��
'�([0[��0[�`���0[
g �\7�\�����)8g��\'\�������K��(8�@���(8���6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L�(8��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�!`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W�/0��)8� ��(8��8�)8� ��w�[w�['�[� ��	��[
��[��[� ����['�['� [� ��
'�([0[��0[�`���0[
g �\7�\�����)8g��\'\�������K��(8�@���(8���6 ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L�(8��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@� `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\����'��L*'�\�)81 ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �	(8��8� ���)8��[
��[� ��	'�[
'�[��[�@����[��['�([� ��	��['� [0[� ���0[��0[���\�"��7�\W��\�)8���@�'\����K��!�����6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@ `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\� ��'��L*'�\*�\����?X8?X8gh\���7h\W�/0��)8� ��(8��8�)8� ��w�[w�['�[� ��	��[
��[��[� ����['�['� [� ��
'�([0[��0[�`���0[
g �\7�\�����)8g��\'\�������K����6��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@� `�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\����'��L*'�\�)81 ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �	(8��8� ���)8��[
��[� ��	'�[
'�[��[�@����[��['�([� ��	��['� [0[� ���0[��0[���\�"��7�\W��\�)8���@�'\����K��!�����6 ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@�����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��	'�O*'�\'�N= ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �'�N
'�N�b�@�'�?N')N	�0[� ��
���\	�\�)8���@�?
\��L�L�����L�L�(8�@���(8 �� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L�(8��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*'�\'�O= ��_*�\?X8?X8�@��gh\7h\W�/0�� �@�'�N'�N	'�?N�b��'�)N��0[g��\�����\�)8\�"�@���L�L��L�@� ��L�(8�(8� �� �� ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L�(8��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*'�\'�O= ��_*�\?X8?X8�@��gh\7h\W�/0�� �@�'�N'�N	'�?N�b��'�)N��0[g��\�����\�)8\�"�@���L�L��L�@� ��L�(8�(8� �� �� ���� ��w�8	����
w�8����	��h6
����� h6�������\@����0�������0�w2@\6@������L�g�<�[2@����������0�����6�����6����
/@�0AL�!��P����,@�0AL�!��P� ��(@���m[	��m[� �����\�������2����	���2	�����W\G�P	�0Y\� ��
���
��QG�\����
��Y
���Y	��Y� ��

��Y	��Y���8����
��Yg\w�8����	W\	������l6�����@�	��i6�@�����	�m6 �	���m7���� �
��Y� ��	��k[���	� �� 
��Y
��Y�����H\W���[��k[����	�\��K[!��P����G(\(8��8�����<GG\W\���wG\ �����' �w�\���� �'H�'��� �'H �� ����W�P ����WX\ ����@� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L�(8��� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��	'�O*'�\'�N= ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �'�N
'�N�b�@�'�?N')N	�0[� ��
���\	�\�)8���@�?
\��L�L�����L�L ����� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*'�\'�O= ��_*�\?X8?X8�@��gh\7h\W�/0�� �@�'�N'�N	'�?N�b��'�)N��0[g��\�����\�)8\�"�@���L�L��L� � ��L �� ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@`�w��\�"��	'�O*'�\'�N= ��_*�\?X8?X8�@��gh\7h\W�/ ؿ$��W�O �'�N
'�N�b�@�'�?N')N	�0[� ��
���\	�\�)8���@�?
\��L�L�����L�L ����� ���w�8����	����
w�8	��h6����
����� h6���\� ��@����0���0�����w2@\6@���L�����g�<�[2@�����0��������6�����6�/@������0AL�!��P,@������0AL�!��P(@�������m[	��m[���\� ���������2	���2����	�W\ ��G�P	�0Y\
�������
��QG�\
��Y����
���Y	��Y

��Y����	��Y���8
��Y����g\w�8	W\����	������l6@�����	��i6�@�	�m6���� �	���m7� �� �
��Y	��k[�������	 ����
��Y
��Y�H\� ��W���[��k[	�\� ����K[!��PG(\����(8��8�<����GG\W\wG\���� ��'��� �w�\ ���'H�' ����'H �����W�P �WX\��� ����@��P� �����LW����Q���?g��'��'�O�"�ğ'�NW�0[7�O�GB��7N�mK70[����cK�P���D<*�L*7�LG�P���W�\��Qg��Y����7��Yg�Y��Y����?@�@�`�w��\��D<*�L*G�LG�P���W�\�Qw�Y����w��Yg�Y��Y����?@�@�`�w��\�"��'�N*�\'�O= ��_*'�\?X8?X8�@��7h\gh\W� �� �@�'�N'�N	'�?N�b��')Nw0[g��\�����\�)8\�����L�L ������w�8	����� ��
w�8	��h6
�������� h6���\@��������0���0�w2@\����6@���L�g�<�[� ��2@�����0�����6���������6�/@�0AL�����!��P,@�0AL�����!��P(@���m[� ��	��m[���\�����������2	���2	�@�P�W\G�P����?	�0Y\
���
��Q����G�\
��Y
���Y����	��Y

��Y	��Y� �����8
��Yg\����w�8	W\	����������l6�@�	��i6����@�	�m6 ����	���m7 �����
��Y	��k[���� ��	 
��Y�@��
��Y�H\W���[������k[	�\��K[����!��PG(\(8������8�<GG\���W\wG\ ����' ����w�\ �'H����' �'H�@�D ���W�P��� �WX\ �����@��P�P����P�P�P@�\)]��(���)p��p;Wp@(��)p@4���p@����p@����p@���p@����p@��xp@�	��yp@x���p@@��
p@��Cp@���lp@�$���
p@`)���p@(.��p@�5��2p@�:��[p@�?��{p@HD���p@L���p@�P��p@�X��=p@h]��Tp@0b��_p@�f��vp@�k���p@�p���p@Px���p@}���p@���� p@�����!p@p����"p@8���!$p@���E%p@ȟ��\&p@����g'p@X���~(p@ ����)p@����*p@�����+p@x����,p@@����-p@����.p@�����/p@����01p@`���Z2p@(���w3p@�����4p@�����5p@�����6p@H����7p@����8p@����9p@���;p@h��*<p@0��B=p@���k>p@����?p@�$���@p@P)���Ap@.���Bp@�2���Cp@�7���Dp@p?���Ep@8D���Fp@I���Gp@�M���Hp@�U���Ip@XZ���Jp@ b��"Lp@�f��?Mp@�k��PNp@xp��mOp@@u�Pp@z��Qp@Ё��Rp@����Sp@`���Tp@(���Up@��Wp@���Wp@ĝ$�Xp@�	�Yp@�$
�Zp@�_[p@$�$R\p@H�
$]p@T�$%^p@x�$x_p@��$�`p@��$�ap@�$cp@�$Mdp@,�$wep@P�$�fp@t�$�gp@��$ip@��$'jp@�$bkp@�$�lp@(�$�mp@L�$�np@p�$pp@��$qp@��$"rp@ܸ$ .sp@�$!Ktp@$�$"Yup@H�$#dvp@l�$$fwp@��$%�xp@��$&�yp@�$'�zp@��$(�{p@ �$)}p@D�$*~p@h�$+*p@��$,6�p@��$-S�p@��$.a�p@��$/l�p@�$0n�p@@�$1��p@d�$2��p@��$3܇p@��$4�p@��$5#�p@��$64�p@�$7Q�p@<�$8c�p@`�$9��p@��$:��p@��$;��p@��$<��p@��$=֒p@�$>�p@8�$?�p@\�$@/�p@��$A:�p@��$B9�p@��$CD�p@��$DD�p@�$EU�p@4�$FW�p@X�$GV�p@|�$HL�p@��$I]�p@��$Ji�p@��$K��p@�$L΢p@0�$M�p@T�$N��p@x�$O�p@��$P+�p@��$QN�p@��$Rb�p@�$Ss�p@,�$T{�p@P�$U��p@t�$V��p@��W��p@��$X��p@�YW�p@�$ZP�p@��['�p@�$\ �p@(�]��p@4�$^��p@X$_W�p@|$`��p@�$aڸp@�$b	�p@�$cD�p@$dt�p@0$e��p@T$f�p@x	$g�p@�
$h<�p@�$i}�p@�$j��p@$k��p@,$l�p@P$m3�p@t$nD�p@�$oa�p@�$ps�p@�$q��p@$r��p@($s��p@L$t��p@p$u��p@�$v�p@�$w<�p@�$xf�p@$y��p@$$z��p@H ${��p@l!$|��p@�"$}��p@�#$~��p@�$$�p@�%$��p@ '$�6�p@D($�R�p@h)$���p@�*$���p@�+$���p@�,$���p@�-$� �p@/$�8�p@@0$�a�p@d1$�{�p@�2$���p@�3$���p@�4$���p@�5$���p@7$��p@<8$�4�p@`9$�E�p@�:$�J�p@�;$�[�p@�<$�a�p@�=$�x�p@?$���p@8@$���p@\A$���p@�B$���p@�C$���p@�D$��p@�E$��p@G$�>�p@4H$�U�p@XI$�x�p@|J$���p@�K$��p@�L$��p@�M$��p@O$��p@0P$�!p@TQ$�7p@xR�	p@�S$��p@�T��p@�U$��	p@�V��
p@�W$��p@Y�Xp@Z$�Y
p@8[$��p@\\$��p@�]$�#p@�^$�Lp@�_$��p@�`$��p@b$��p@4c$�p@Xd$�;p@|e$�[p@�f$��p@�g$��p@�h$��p@j$�p@0k$�4p@Tl$�? p@xm$�V!p@�n$�b"p@�o$�#p@�p$��$p@r$��%p@,s$��&p@Pt$��'p@tu$��(p@�v$�*p@�w$�%+p@�x$�<,p@z$�G-p@({$�^.p@L|$�j/p@p}$��0p@�~$��1p@�$��2p@܀$��3p@�$��4p@$�$��5p@H�$�7p@l�$�:8p@��$�W9p@��$�h:p@؈$��;p@��$��<p@ �$��=p@D�$��>p@h�$��?p@��$��@p@��$�
Bp@Ԑ$�"Cp@��$�KDp@�$�cEp@@�$�nFp@d�$�mGp@��$�xHp@��$�xIp@И$��Jp@�$��Kp@�$��Lp@<�$��Mp@`�$��Np@��$��Op@��$��Pp@̠$�Rp@�$�Sp@�$�0Tp@8�$�MUp@\�$�_Vp@��$��Wp@��$��Xp@Ȩ$��Yp@�$��Zp@�$��[p@4�$��\pX� �\px��B����uB ���B0���nBȰ��:B`��dBp���3B����B���)B�����BH����B���B����B���"B����4	B0��l	B@���r
Bؽ��
B����B����B�����B(��
B8����
B���2B����1Bx��hB����fB ���B0����B����B�����Bp���B�����B��B(���B���JB����HBh��~Bx���wB���B ����B����B�����B`���Bp����B���B����B����B�����BX��Bh����B��$B���B���2B���� BP��4 B`���!B���H!B���-"B���\"B����P#BH���#BX���$B����$B����%B����%B�����&B@���&BP����'B����'B�����(B���)B�����)B8��*BH����*B���,+B����,B���:,B����-B0��<-B@���.B���P.B����5/B���d/B����[0B(���0B8����1B���1B����2Bx���2B�����3B ���3B0����4B��5B����5Bp��*6B����7B�H7B(��28B��b8B���F9Bh�v9Bx��Q:B�~:B ��e;B���;B����<B`��<Bp���=B
��=B
���>B���>B����?BX
�@Bh
���@B�
AB���AB��BB����BBP�CB`���CB�� DB���DB��(EB����EBH�*FBX���FB�� GB���GB��(HB���IB@�0IBP��#JB��ZJB���XKB���KB���~LB8!��LBH!���MB�"��MB�"���NB�$��NB�$���OB0&�OB@&��PB�'QB�'��QB�)*RB�)�SB(+>SB8+�TB�,FTB�,�-UBx.`UB�.�JVB 0zVB00�UWB�1�!XB`3KXBt3�YB5�	�YB�6
ZB�6�
�ZBP8��[B�9�[B�9��\B�;�
p]B,=�]B@=��^B�>�^B�>��_B�@`B�@�aB0BTaBDB�MbB�C�bB�C�|cB�E�cB�E��dB4G�dBHG��eB�HfB�H�gB�JFgB�J�>hB8LrhBLL�aiB�M�iB�M��jB�O�jB�O��kB<Q�kBPQ��lB�R*mB�R�#nB�TVnB�T�CoB@VtoBTV�TpB�W�pBX�eqB�Y�qB�Y�vrBD[ �rBX[� �sB�\!�sB]�!�tB�^"�tB�^�"�uBH`#�uB\`�#�vB�a$�vBb�$�wB�c%�wB�c�%�xBLe&yB`e�&�yB�f'2zBg�'+{B�h(^{B�h�(K|BPj)||Bdj�)\}B�k*�}Bl�*m~B�m+�~B�m�+~BTo,�Bho�,��Bq-ĀBq�-��B�r.؁B�r�.��BXt/�Blt�/��Bv0�Bv�0ʄB�w1��B�w�1�B\y2�Bpy�2�B{3>�B{�3<�B�|4p�B�|�4b�B`~5��Bt~�5y�B�6��B ��6��B��7‹B́�7��Bd�8֌Bx��8��B�9�B$��9ގB��:�BІ�:�Bh�;"�B|��;��B�<*�B(��<�B��=D�Bԋ�=.�Bl�>^�B���>N�B�?��B,��?q�BĐ@��Bؐ�@��Bp�A��B���A��B�B��B0��B��BȕC��Bܕ�C��Bt�D��B���D��B �E̚B4��E��B̚FԛB��F��Bx�G֜B���G��B$�H̝B8��H��BПIԞB��I��B|�JܟB���JϠB(�K�B<��K�BԤL8�B��L*�B��M\�B���MA�B,�Np�B@��NX�BةO��B��Oo�B��P��B���P��B0�Q��BD��Q��BܮR֨B��R��B��S�B���SŪB4�T�BH��T٫B�U�B��U��B��V&�B���V�B8��WծBиX�Bܸ�XԯBt��Y��B�ZаB��Z��B���[u�BH�\��BT��\t�B��]E�B��^p�B���^n�B(�_��B4��_��B��`��B���`�Bp�a<�B|��a:�B�bn�B ��bo�B��c��B���c��B\�dػBh��dܼB�e�B��e�B��fL�B���fI�BH�g~�BT��gr�B��h��B���h��B��i�B���i��B4�j�B@��j�B��kN�B���kL�B|�l��B���lr�B �m��B,��m��B��n��B���n��Bh�o��Bt��o��B�p��B��p��B��q�B���q��BT�r�B`��r�B��s2�B��s
�B��t:�B���t!�B@�uT�BL��u>�B��vn�B���vg�B��w��B���w��B,�x��B8��x��B��y��B���y��Bt�z�B���z��B�{"�B$��{�B��|6�B���|!�B`�}T�Bl��}>�B�~n�B��~R�B����B���]�BL����BX���q�B����B������B�����B������B8����BD�����B��*�B���!�B��T�B���>�B$�n�B0��[�B����B���x�Bl���Bx����B	���B	����B�
���B�
����BX��Bd����B�
��B���B��4�B���#�BD�T�BP��I�B��~�B���r�B����B�����B0���B<����B����B�����Bx���B�����B���B(����B����B�����Bd���Bp����B ��B ���B�!���B�!���BP#��B\#���B�$��B%���B�&�J�B�&��M�B<(���BH(��y�B�)���B�)����B�+��B�+����B(-��B4-���B�.��B�.���Bp0�$B|0��B2�DB 2��-B�3�^B�3��>B\5�lBh5��XB7��B7��{B�8��B�8���BH:��UB�;�B�;��NB�=��	B?�D	B(?��
B�@���
BXB�	BdB���B�C���B�E��B�E���
B8G�BDG��B�H�LB�H��RB�J��B�J���B$L��B0L���B�M��B�M���BlO�BxO��BQ�HBQ��FB�R�zB�R��rBXT��BdT���B�U��BV���B�W��B�W���BDY�*BPY��(B�Z�^B�Z��WB�\��B�\��wB0^��B<^���B�_��B�_��� Bxa�� B�a���!Bc��!B(c���"B�d��"B�d���#Bdf�$Bpf���$Bh�%Bh���%B�i�&B�i���&BPk�('B\k��
(B�l�<(Bm��0)B�n�f)B�n��_*B<p��*BHp��+B�q��+B�q���,B�s��,B�s���-B(u��-B4u���.B�v��.B�v���/Bpx��/B|x���0Bz�1B z���1B�{�2B�{���2B\}�3Bh}���3B�04B��5B���D5B����;6BH��r6BT���p7B���7B�����8B����8B�����9B4���9B@����:B؈��:B����;B|��
<B�����<B ��(=B,���>Bč�B>BЍ��&?Bh��V?Bt���1@B��^@B���EAB���xAB����bBBT���BB`����CB����CB����DB����DB�����EB@���EBL����FB���FB����GB����GB�����HB,���HB8����IBП�JBܟ���JBt��KB�����KB��
LB$����LB���MBȤ���MB`��NBl����NB��OB���PB���:PB����8QBL��lQBX���^RB���RB����uSB����SB�����TB8���TBD����UBܱ��UB����VB����VB�����WB$��
XB0����XBȶ�YBԶ���YBl��&ZBx���
[B��@[B���*\B���Z\B����2`�@� ����6 � �@� �`���6 ���@� � �@�6 �`	@� V�	@�6 F�+	�%�8 ��Q	��  �	 h	�(  �	��  >��	@�( v
�	��  ���	�%�8 �`�	��   
@�( 2`
��  L 1
�%�8 ��V
��  ��m
�%�H �`�
��  �
�( .�
��  6`�
�( P`�
�  X �%H x�,�  ��C( ��[�  � `r�%H �! ��  �"��%H �#���  %`�	( 6&`�
  >' ( X( 2�  `)�H�%
H �*`n�  �+ �( �, ��  �-��%H �.���  �/`�@%7 1�
  02�+
�( P3 C
  ^4 Y
�( ~5�p
  �6��
%7 �7��
  �8��
�( �9 �
  �: �
%7 <   = *�%H B>�O   \?�e!( j@�}"  fA�#( tB�$  pC��%%H �D��&  �E��'( �F�(  xG�+�%)H �H`Q*  �I`g@%+7 �J��,  �K���-( M �.  &N ��/( FO��0  TP��%17 zQ�"2  �R�8�3( �S P4  �T f%57 �U �6  �V �@7 �W`��
85 �X �@9 xY`��
:5 hZ �@; =[`��
<0 -\ �@= ]`��
>0 �] ��?  H_���
@  �`��A  �a���
B  �b`@C( "d�D  He�
E  �f�+F  �g�6G  �h�BH  �i�M
I  6k�ZJ  bl�e�K  �m r�
L  �n�|�M  �o���
N  �p`�O( �q`�P  s`��Q  $t �R  2u ��S  @v��T  <w���U  \x��V  jy���W  �z`�X  �{`�Y  �| Z  �} �[  �*�
\  ��5�]  ,�`B^  :�`M_  H�`Y`  D�`d�a  d� qb  r� |@c  ��`�@
d  ܈��@e  ���@
f  
� �@g  *�`�@
h  8���@i  ^���@
j  r� �@k  ��`�@
l  ����@m  ���@
n  “ �o  ��@
p  � �q  ��"@
r  � -�s   ��8@
t  � C�u  0��O@
v  2��Y�w  4��e@
x  $��o�y  8�`|@
z  :���@{  x��@
|  �� �@}  ģ`�@
~  Ҥ��@  ��@
�  � �@�  &�`�@
�  :���@�  N���@
�  P� �@�  v�`@
�  ���
@� d��

� Z��@� 4� 
� *� @� �`
� ��`)@� Գ�)
� ʴ�3�	� &�`=	� p�`F�	� ���O	� ڹ�X�	� ��b@	� D��k�	� ���u	� ���~�	� � �	� � ��	� P��	� ����	� ��`�	� ��`�@	� ���	� ����	� :� �@	� H�`��	� n� �	� �� ��	� ����	� ����	� ��`
	� ��`�	� � 	� <� &�	� \��/	� j��8@	� ���A	� ���J�	� ���T	� ���]�	� �� g	� �� p�	� ��y	� "��@	� f� ��� ����	� ������ �� �	� �� ��� ���@	� 8���� R�`�	� l�`��� t���@	� �� ��� �����	� �� ��� ���@	�  ���� "�`@	� 6���� 8� "�	� R��+�� Z� 4@	� b�`=�� X��E�	� r�`O�� z��W@	� �� a�� ��i	� ��r�� *� {	� P� ��� d���@	� ���� �`�	� �`��� ��@	� � ��� ��@� ��@� � �@� �`�@� �	��@� q
��@� a �@� 6`�@� &
���� | �@� �`�@� ���@� ��� V`@� |��� � @� �`�� �!@� , )�� j�0@� ��7@� � ?@� �`F@� �M@�  �T�� 0!`\@� 8"�c�� X# k@� f$`r@� t%�y@� p&��� �'`�@� �(��@� �)�@� �* �@� ,`�@� -��@� 8.�@� @/ ��� `0��@� n1���� |2`�@ x3��� �4 �@ �5`� �6`�� 8 �� 09�� >:�� ^;`	� l< �	 �=��
 �>�� �?`$� �@ +�
 �A�1� �B�8@ "D�?� <E�F@ JF�M� FG�T@ TH�[� PI�b@ dJ�i� fK�p@ hL�w� XM�~@ lN�� nO�� �P��� �Q`�� �R �� T�� &U���  4V`��! ZW ��" nX���# �Y���$ �Z`��% �[ ��& �����X0X0���

Anon7 - 2021