BetterWuhuIsland/Assets/Editor/x64/Bakery/denoiseFinishSH.ptx

1125 lines
32 KiB
Text

//
// Generated by NVIDIA NVVM Compiler
//
// Compiler Build ID: CL-23083092
// Cuda compilation tools, release 9.1, V9.1.85
// Based on LLVM 3.4svn
//
.version 6.1
.target sm_30
.address_size 64
// .globl _Z6oxMainv
.global .align 8 .b8 pixelID[8];
.global .align 8 .b8 resolution[8];
.global .align 1 .b8 output_buffer[1];
.global .align 1 .b8 image2[1];
.global .align 1 .b8 image3[1];
.global .align 4 .u32 mode;
.global .align 4 .b8 _ZN21rti_internal_typeinfo7pixelIDE[8] = {82, 97, 121, 0, 8, 0, 0, 0};
.global .align 4 .b8 _ZN21rti_internal_typeinfo10resolutionE[8] = {82, 97, 121, 0, 8, 0, 0, 0};
.global .align 4 .b8 _ZN21rti_internal_typeinfo4modeE[8] = {82, 97, 121, 0, 4, 0, 0, 0};
.global .align 8 .u64 _ZN21rti_internal_register20reg_bitness_detectorE;
.global .align 8 .u64 _ZN21rti_internal_register24reg_exception_64_detail0E;
.global .align 8 .u64 _ZN21rti_internal_register24reg_exception_64_detail1E;
.global .align 8 .u64 _ZN21rti_internal_register24reg_exception_64_detail2E;
.global .align 8 .u64 _ZN21rti_internal_register24reg_exception_64_detail3E;
.global .align 8 .u64 _ZN21rti_internal_register24reg_exception_64_detail4E;
.global .align 8 .u64 _ZN21rti_internal_register24reg_exception_64_detail5E;
.global .align 8 .u64 _ZN21rti_internal_register24reg_exception_64_detail6E;
.global .align 8 .u64 _ZN21rti_internal_register24reg_exception_64_detail7E;
.global .align 8 .u64 _ZN21rti_internal_register24reg_exception_64_detail8E;
.global .align 8 .u64 _ZN21rti_internal_register24reg_exception_64_detail9E;
.global .align 4 .u32 _ZN21rti_internal_register21reg_exception_detail0E;
.global .align 4 .u32 _ZN21rti_internal_register21reg_exception_detail1E;
.global .align 4 .u32 _ZN21rti_internal_register21reg_exception_detail2E;
.global .align 4 .u32 _ZN21rti_internal_register21reg_exception_detail3E;
.global .align 4 .u32 _ZN21rti_internal_register21reg_exception_detail4E;
.global .align 4 .u32 _ZN21rti_internal_register21reg_exception_detail5E;
.global .align 4 .u32 _ZN21rti_internal_register21reg_exception_detail6E;
.global .align 4 .u32 _ZN21rti_internal_register21reg_exception_detail7E;
.global .align 4 .u32 _ZN21rti_internal_register21reg_exception_detail8E;
.global .align 4 .u32 _ZN21rti_internal_register21reg_exception_detail9E;
.global .align 4 .u32 _ZN21rti_internal_register14reg_rayIndex_xE;
.global .align 4 .u32 _ZN21rti_internal_register14reg_rayIndex_yE;
.global .align 4 .u32 _ZN21rti_internal_register14reg_rayIndex_zE;
.global .align 8 .b8 _ZN21rti_internal_typename7pixelIDE[6] = {117, 105, 110, 116, 50, 0};
.global .align 8 .b8 _ZN21rti_internal_typename10resolutionE[6] = {117, 105, 110, 116, 50, 0};
.global .align 4 .b8 _ZN21rti_internal_typename4modeE[4] = {105, 110, 116, 0};
.global .align 4 .u32 _ZN21rti_internal_typeenum7pixelIDE = 4919;
.global .align 4 .u32 _ZN21rti_internal_typeenum10resolutionE = 4919;
.global .align 4 .u32 _ZN21rti_internal_typeenum4modeE = 4919;
.global .align 16 .b8 _ZN21rti_internal_semantic7pixelIDE[14] = {114, 116, 76, 97, 117, 110, 99, 104, 73, 110, 100, 101, 120, 0};
.global .align 16 .b8 _ZN21rti_internal_semantic10resolutionE[12] = {114, 116, 76, 97, 117, 110, 99, 104, 68, 105, 109, 0};
.global .align 1 .b8 _ZN21rti_internal_semantic4modeE[1];
.global .align 1 .b8 _ZN23rti_internal_annotation7pixelIDE[1];
.global .align 1 .b8 _ZN23rti_internal_annotation10resolutionE[1];
.global .align 1 .b8 _ZN23rti_internal_annotation4modeE[1];
.visible .entry _Z6oxMainv(
)
{
.reg .pred %p<106>;
.reg .b16 %rs<16>;
.reg .f32 %f<689>;
.reg .b32 %r<106>;
.reg .b64 %rd<54>;
ld.global.v2.u32 {%r3, %r4}, [pixelID];
cvt.u64.u32 %rd3, %r3;
cvt.u64.u32 %rd4, %r4;
mov.u64 %rd7, output_buffer;
cvta.global.u64 %rd2, %rd7;
mov.u32 %r1, 2;
mov.u32 %r2, 16;
mov.u64 %rd6, 0;
// inline asm
call (%rd1), _rt_buffer_get_64, (%rd2, %r1, %r2, %rd3, %rd4, %rd6, %rd6);
// inline asm
ld.v4.f32 {%f90, %f91, %f92, %f93}, [%rd1];
abs.f32 %f5, %f90;
setp.lt.f32 %p7, %f5, 0f00800000;
mul.f32 %f97, %f5, 0f4B800000;
selp.f32 %f98, 0fC3170000, 0fC2FE0000, %p7;
selp.f32 %f99, %f97, %f5, %p7;
mov.b32 %r7, %f99;
and.b32 %r8, %r7, 8388607;
or.b32 %r9, %r8, 1065353216;
mov.b32 %f100, %r9;
shr.u32 %r10, %r7, 23;
cvt.rn.f32.u32 %f101, %r10;
add.f32 %f102, %f98, %f101;
setp.gt.f32 %p8, %f100, 0f3FB504F3;
mul.f32 %f103, %f100, 0f3F000000;
add.f32 %f104, %f102, 0f3F800000;
selp.f32 %f105, %f103, %f100, %p8;
selp.f32 %f106, %f104, %f102, %p8;
add.f32 %f107, %f105, 0fBF800000;
add.f32 %f89, %f105, 0f3F800000;
// inline asm
rcp.approx.ftz.f32 %f88,%f89;
// inline asm
add.f32 %f108, %f107, %f107;
mul.f32 %f109, %f88, %f108;
mul.f32 %f110, %f109, %f109;
mov.f32 %f111, 0f3C4CAF63;
mov.f32 %f112, 0f3B18F0FE;
fma.rn.f32 %f113, %f112, %f110, %f111;
mov.f32 %f114, 0f3DAAAABD;
fma.rn.f32 %f115, %f113, %f110, %f114;
mul.rn.f32 %f116, %f115, %f110;
mul.rn.f32 %f117, %f116, %f109;
sub.f32 %f118, %f107, %f109;
neg.f32 %f119, %f109;
add.f32 %f120, %f118, %f118;
fma.rn.f32 %f121, %f119, %f107, %f120;
mul.rn.f32 %f122, %f88, %f121;
add.f32 %f123, %f117, %f109;
sub.f32 %f124, %f109, %f123;
add.f32 %f125, %f117, %f124;
add.f32 %f126, %f122, %f125;
add.f32 %f127, %f123, %f126;
sub.f32 %f128, %f123, %f127;
add.f32 %f129, %f126, %f128;
mov.f32 %f130, 0f3F317200;
mul.rn.f32 %f131, %f106, %f130;
mov.f32 %f132, 0f35BFBE8E;
mul.rn.f32 %f133, %f106, %f132;
add.f32 %f134, %f131, %f127;
sub.f32 %f135, %f131, %f134;
add.f32 %f136, %f127, %f135;
add.f32 %f137, %f129, %f136;
add.f32 %f138, %f133, %f137;
add.f32 %f139, %f134, %f138;
sub.f32 %f140, %f134, %f139;
add.f32 %f141, %f138, %f140;
mov.f32 %f142, 0f400CCCCD;
mul.rn.f32 %f143, %f142, %f139;
neg.f32 %f144, %f143;
fma.rn.f32 %f145, %f142, %f139, %f144;
fma.rn.f32 %f146, %f142, %f141, %f145;
mov.f32 %f147, 0f00000000;
fma.rn.f32 %f148, %f147, %f139, %f146;
add.rn.f32 %f149, %f143, %f148;
neg.f32 %f150, %f149;
add.rn.f32 %f151, %f143, %f150;
add.rn.f32 %f152, %f151, %f148;
mov.b32 %r11, %f149;
setp.eq.s32 %p9, %r11, 1118925336;
add.s32 %r12, %r11, -1;
mov.b32 %f153, %r12;
add.f32 %f154, %f152, 0f37000000;
selp.f32 %f155, %f153, %f149, %p9;
selp.f32 %f6, %f154, %f152, %p9;
mul.f32 %f156, %f155, 0f3FB8AA3B;
cvt.rzi.f32.f32 %f157, %f156;
mov.f32 %f158, 0fBF317200;
fma.rn.f32 %f159, %f157, %f158, %f155;
mov.f32 %f160, 0fB5BFBE8E;
fma.rn.f32 %f161, %f157, %f160, %f159;
mul.f32 %f162, %f161, 0f3FB8AA3B;
ex2.approx.ftz.f32 %f163, %f162;
add.f32 %f164, %f157, 0f00000000;
ex2.approx.f32 %f165, %f164;
mul.f32 %f166, %f163, %f165;
setp.lt.f32 %p10, %f155, 0fC2D20000;
selp.f32 %f167, 0f00000000, %f166, %p10;
setp.gt.f32 %p11, %f155, 0f42D20000;
selp.f32 %f671, 0f7F800000, %f167, %p11;
setp.eq.f32 %p12, %f671, 0f7F800000;
@%p12 bra BB0_2;
fma.rn.f32 %f671, %f671, %f6, %f671;
BB0_2:
mov.f32 %f668, 0f3F8CCCCD;
cvt.rzi.f32.f32 %f667, %f668;
fma.rn.f32 %f666, %f667, 0fC0000000, 0f400CCCCD;
abs.f32 %f665, %f666;
setp.lt.f32 %p13, %f90, 0f00000000;
setp.eq.f32 %p14, %f665, 0f3F800000;
and.pred %p1, %p13, %p14;
mov.b32 %r13, %f671;
xor.b32 %r14, %r13, -2147483648;
mov.b32 %f168, %r14;
selp.f32 %f673, %f168, %f671, %p1;
setp.eq.f32 %p15, %f90, 0f00000000;
@%p15 bra BB0_5;
bra.uni BB0_3;
BB0_5:
add.f32 %f171, %f90, %f90;
selp.f32 %f673, %f171, 0f00000000, %p14;
bra.uni BB0_6;
BB0_3:
setp.geu.f32 %p16, %f90, 0f00000000;
@%p16 bra BB0_6;
mov.f32 %f670, 0f400CCCCD;
cvt.rzi.f32.f32 %f170, %f670;
setp.neu.f32 %p17, %f170, 0f400CCCCD;
selp.f32 %f673, 0f7FFFFFFF, %f673, %p17;
BB0_6:
abs.f32 %f607, %f90;
add.f32 %f172, %f607, 0f400CCCCD;
mov.b32 %r15, %f172;
setp.lt.s32 %p19, %r15, 2139095040;
@%p19 bra BB0_11;
abs.f32 %f648, %f90;
setp.gtu.f32 %p20, %f648, 0f7F800000;
@%p20 bra BB0_10;
bra.uni BB0_8;
BB0_10:
add.f32 %f673, %f90, 0f400CCCCD;
bra.uni BB0_11;
BB0_8:
abs.f32 %f649, %f90;
setp.neu.f32 %p21, %f649, 0f7F800000;
@%p21 bra BB0_11;
selp.f32 %f673, 0fFF800000, 0f7F800000, %p1;
BB0_11:
mov.f32 %f669, 0f400CCCCD;
mov.f32 %f615, 0fB5BFBE8E;
mov.f32 %f614, 0fBF317200;
mov.f32 %f613, 0f00000000;
mov.f32 %f612, 0f35BFBE8E;
mov.f32 %f611, 0f3F317200;
mov.f32 %f610, 0f3DAAAABD;
mov.f32 %f609, 0f3C4CAF63;
mov.f32 %f608, 0f3B18F0FE;
setp.eq.f32 %p22, %f90, 0f3F800000;
selp.f32 %f175, 0f3F800000, %f673, %p22;
cvt.sat.f32.f32 %f17, %f175;
abs.f32 %f18, %f91;
setp.lt.f32 %p23, %f18, 0f00800000;
mul.f32 %f176, %f18, 0f4B800000;
selp.f32 %f177, 0fC3170000, 0fC2FE0000, %p23;
selp.f32 %f178, %f176, %f18, %p23;
mov.b32 %r16, %f178;
and.b32 %r17, %r16, 8388607;
or.b32 %r18, %r17, 1065353216;
mov.b32 %f179, %r18;
shr.u32 %r19, %r16, 23;
cvt.rn.f32.u32 %f180, %r19;
add.f32 %f181, %f177, %f180;
setp.gt.f32 %p24, %f179, 0f3FB504F3;
mul.f32 %f182, %f179, 0f3F000000;
add.f32 %f183, %f181, 0f3F800000;
selp.f32 %f184, %f182, %f179, %p24;
selp.f32 %f185, %f183, %f181, %p24;
add.f32 %f186, %f184, 0fBF800000;
add.f32 %f174, %f184, 0f3F800000;
// inline asm
rcp.approx.ftz.f32 %f173,%f174;
// inline asm
add.f32 %f187, %f186, %f186;
mul.f32 %f188, %f173, %f187;
mul.f32 %f189, %f188, %f188;
fma.rn.f32 %f192, %f608, %f189, %f609;
fma.rn.f32 %f194, %f192, %f189, %f610;
mul.rn.f32 %f195, %f194, %f189;
mul.rn.f32 %f196, %f195, %f188;
sub.f32 %f197, %f186, %f188;
neg.f32 %f198, %f188;
add.f32 %f199, %f197, %f197;
fma.rn.f32 %f200, %f198, %f186, %f199;
mul.rn.f32 %f201, %f173, %f200;
add.f32 %f202, %f196, %f188;
sub.f32 %f203, %f188, %f202;
add.f32 %f204, %f196, %f203;
add.f32 %f205, %f201, %f204;
add.f32 %f206, %f202, %f205;
sub.f32 %f207, %f202, %f206;
add.f32 %f208, %f205, %f207;
mul.rn.f32 %f210, %f185, %f611;
mul.rn.f32 %f212, %f185, %f612;
add.f32 %f213, %f210, %f206;
sub.f32 %f214, %f210, %f213;
add.f32 %f215, %f206, %f214;
add.f32 %f216, %f208, %f215;
add.f32 %f217, %f212, %f216;
add.f32 %f218, %f213, %f217;
sub.f32 %f219, %f213, %f218;
add.f32 %f220, %f217, %f219;
mul.rn.f32 %f222, %f669, %f218;
neg.f32 %f223, %f222;
fma.rn.f32 %f224, %f669, %f218, %f223;
fma.rn.f32 %f225, %f669, %f220, %f224;
fma.rn.f32 %f227, %f613, %f218, %f225;
add.rn.f32 %f228, %f222, %f227;
neg.f32 %f229, %f228;
add.rn.f32 %f230, %f222, %f229;
add.rn.f32 %f231, %f230, %f227;
mov.b32 %r20, %f228;
setp.eq.s32 %p25, %r20, 1118925336;
add.s32 %r21, %r20, -1;
mov.b32 %f232, %r21;
add.f32 %f233, %f231, 0f37000000;
selp.f32 %f234, %f232, %f228, %p25;
selp.f32 %f19, %f233, %f231, %p25;
mul.f32 %f235, %f234, 0f3FB8AA3B;
cvt.rzi.f32.f32 %f236, %f235;
fma.rn.f32 %f238, %f236, %f614, %f234;
fma.rn.f32 %f240, %f236, %f615, %f238;
mul.f32 %f241, %f240, 0f3FB8AA3B;
ex2.approx.ftz.f32 %f242, %f241;
add.f32 %f243, %f236, 0f00000000;
ex2.approx.f32 %f244, %f243;
mul.f32 %f245, %f242, %f244;
setp.lt.f32 %p26, %f234, 0fC2D20000;
selp.f32 %f246, 0f00000000, %f245, %p26;
setp.gt.f32 %p27, %f234, 0f42D20000;
selp.f32 %f674, 0f7F800000, %f246, %p27;
setp.eq.f32 %p28, %f674, 0f7F800000;
@%p28 bra BB0_13;
fma.rn.f32 %f674, %f674, %f19, %f674;
BB0_13:
setp.lt.f32 %p29, %f91, 0f00000000;
and.pred %p2, %p29, %p14;
mov.b32 %r22, %f674;
xor.b32 %r23, %r22, -2147483648;
mov.b32 %f247, %r23;
selp.f32 %f676, %f247, %f674, %p2;
setp.eq.f32 %p31, %f91, 0f00000000;
@%p31 bra BB0_16;
bra.uni BB0_14;
BB0_16:
add.f32 %f250, %f91, %f91;
selp.f32 %f676, %f250, 0f00000000, %p14;
bra.uni BB0_17;
BB0_14:
setp.geu.f32 %p32, %f91, 0f00000000;
@%p32 bra BB0_17;
mov.f32 %f661, 0f400CCCCD;
cvt.rzi.f32.f32 %f249, %f661;
setp.neu.f32 %p33, %f249, 0f400CCCCD;
selp.f32 %f676, 0f7FFFFFFF, %f676, %p33;
BB0_17:
abs.f32 %f650, %f91;
add.f32 %f251, %f650, 0f400CCCCD;
mov.b32 %r24, %f251;
setp.lt.s32 %p35, %r24, 2139095040;
@%p35 bra BB0_22;
abs.f32 %f659, %f91;
setp.gtu.f32 %p36, %f659, 0f7F800000;
@%p36 bra BB0_21;
bra.uni BB0_19;
BB0_21:
add.f32 %f676, %f91, 0f400CCCCD;
bra.uni BB0_22;
BB0_19:
abs.f32 %f660, %f91;
setp.neu.f32 %p37, %f660, 0f7F800000;
@%p37 bra BB0_22;
selp.f32 %f676, 0fFF800000, 0f7F800000, %p2;
BB0_22:
mov.f32 %f651, 0f400CCCCD;
mov.f32 %f623, 0fB5BFBE8E;
mov.f32 %f622, 0fBF317200;
mov.f32 %f621, 0f00000000;
mov.f32 %f620, 0f35BFBE8E;
mov.f32 %f619, 0f3F317200;
mov.f32 %f618, 0f3DAAAABD;
mov.f32 %f617, 0f3C4CAF63;
mov.f32 %f616, 0f3B18F0FE;
setp.eq.f32 %p38, %f91, 0f3F800000;
selp.f32 %f254, 0f3F800000, %f676, %p38;
cvt.sat.f32.f32 %f30, %f254;
abs.f32 %f31, %f92;
setp.lt.f32 %p39, %f31, 0f00800000;
mul.f32 %f255, %f31, 0f4B800000;
selp.f32 %f256, 0fC3170000, 0fC2FE0000, %p39;
selp.f32 %f257, %f255, %f31, %p39;
mov.b32 %r25, %f257;
and.b32 %r26, %r25, 8388607;
or.b32 %r27, %r26, 1065353216;
mov.b32 %f258, %r27;
shr.u32 %r28, %r25, 23;
cvt.rn.f32.u32 %f259, %r28;
add.f32 %f260, %f256, %f259;
setp.gt.f32 %p40, %f258, 0f3FB504F3;
mul.f32 %f261, %f258, 0f3F000000;
add.f32 %f262, %f260, 0f3F800000;
selp.f32 %f263, %f261, %f258, %p40;
selp.f32 %f264, %f262, %f260, %p40;
add.f32 %f265, %f263, 0fBF800000;
add.f32 %f253, %f263, 0f3F800000;
// inline asm
rcp.approx.ftz.f32 %f252,%f253;
// inline asm
add.f32 %f266, %f265, %f265;
mul.f32 %f267, %f252, %f266;
mul.f32 %f268, %f267, %f267;
fma.rn.f32 %f271, %f616, %f268, %f617;
fma.rn.f32 %f273, %f271, %f268, %f618;
mul.rn.f32 %f274, %f273, %f268;
mul.rn.f32 %f275, %f274, %f267;
sub.f32 %f276, %f265, %f267;
neg.f32 %f277, %f267;
add.f32 %f278, %f276, %f276;
fma.rn.f32 %f279, %f277, %f265, %f278;
mul.rn.f32 %f280, %f252, %f279;
add.f32 %f281, %f275, %f267;
sub.f32 %f282, %f267, %f281;
add.f32 %f283, %f275, %f282;
add.f32 %f284, %f280, %f283;
add.f32 %f285, %f281, %f284;
sub.f32 %f286, %f281, %f285;
add.f32 %f287, %f284, %f286;
mul.rn.f32 %f289, %f264, %f619;
mul.rn.f32 %f291, %f264, %f620;
add.f32 %f292, %f289, %f285;
sub.f32 %f293, %f289, %f292;
add.f32 %f294, %f285, %f293;
add.f32 %f295, %f287, %f294;
add.f32 %f296, %f291, %f295;
add.f32 %f297, %f292, %f296;
sub.f32 %f298, %f292, %f297;
add.f32 %f299, %f296, %f298;
mul.rn.f32 %f301, %f651, %f297;
neg.f32 %f302, %f301;
fma.rn.f32 %f303, %f651, %f297, %f302;
fma.rn.f32 %f304, %f651, %f299, %f303;
fma.rn.f32 %f306, %f621, %f297, %f304;
add.rn.f32 %f307, %f301, %f306;
neg.f32 %f308, %f307;
add.rn.f32 %f309, %f301, %f308;
add.rn.f32 %f310, %f309, %f306;
mov.b32 %r29, %f307;
setp.eq.s32 %p41, %r29, 1118925336;
add.s32 %r30, %r29, -1;
mov.b32 %f311, %r30;
add.f32 %f312, %f310, 0f37000000;
selp.f32 %f313, %f311, %f307, %p41;
selp.f32 %f32, %f312, %f310, %p41;
mul.f32 %f314, %f313, 0f3FB8AA3B;
cvt.rzi.f32.f32 %f315, %f314;
fma.rn.f32 %f317, %f315, %f622, %f313;
fma.rn.f32 %f319, %f315, %f623, %f317;
mul.f32 %f320, %f319, 0f3FB8AA3B;
ex2.approx.ftz.f32 %f321, %f320;
add.f32 %f322, %f315, 0f00000000;
ex2.approx.f32 %f323, %f322;
mul.f32 %f324, %f321, %f323;
setp.lt.f32 %p42, %f313, 0fC2D20000;
selp.f32 %f325, 0f00000000, %f324, %p42;
setp.gt.f32 %p43, %f313, 0f42D20000;
selp.f32 %f677, 0f7F800000, %f325, %p43;
setp.eq.f32 %p44, %f677, 0f7F800000;
@%p44 bra BB0_24;
fma.rn.f32 %f677, %f677, %f32, %f677;
BB0_24:
setp.lt.f32 %p45, %f92, 0f00000000;
and.pred %p3, %p45, %p14;
mov.b32 %r31, %f677;
xor.b32 %r32, %r31, -2147483648;
mov.b32 %f326, %r32;
selp.f32 %f679, %f326, %f677, %p3;
setp.eq.f32 %p47, %f92, 0f00000000;
@%p47 bra BB0_27;
bra.uni BB0_25;
BB0_27:
add.f32 %f329, %f92, %f92;
selp.f32 %f679, %f329, 0f00000000, %p14;
bra.uni BB0_28;
BB0_25:
setp.geu.f32 %p48, %f92, 0f00000000;
@%p48 bra BB0_28;
mov.f32 %f658, 0f400CCCCD;
cvt.rzi.f32.f32 %f328, %f658;
setp.neu.f32 %p49, %f328, 0f400CCCCD;
selp.f32 %f679, 0f7FFFFFFF, %f679, %p49;
BB0_28:
add.f32 %f330, %f31, 0f400CCCCD;
mov.b32 %r33, %f330;
setp.lt.s32 %p51, %r33, 2139095040;
@%p51 bra BB0_33;
setp.gtu.f32 %p52, %f31, 0f7F800000;
@%p52 bra BB0_32;
bra.uni BB0_30;
BB0_32:
add.f32 %f679, %f92, 0f400CCCCD;
bra.uni BB0_33;
BB0_30:
setp.neu.f32 %p53, %f31, 0f7F800000;
@%p53 bra BB0_33;
selp.f32 %f679, 0fFF800000, 0f7F800000, %p3;
BB0_33:
setp.eq.f32 %p54, %f92, 0f3F800000;
selp.f32 %f331, 0f3F800000, %f679, %p54;
cvt.sat.f32.f32 %f43, %f331;
ld.global.u32 %r34, [mode];
setp.eq.s32 %p55, %r34, 0;
@%p55 bra BB0_70;
setp.eq.s32 %p56, %r34, 1;
@%p56 bra BB0_71;
bra.uni BB0_35;
BB0_71:
mov.u64 %rd50, 0;
mov.u32 %r103, 2;
ld.global.v2.u32 {%r94, %r95}, [pixelID];
cvt.u64.u32 %rd43, %r94;
cvt.u64.u32 %rd44, %r95;
mov.u64 %rd47, image3;
cvta.global.u64 %rd42, %rd47;
mov.u32 %r93, 4;
// inline asm
call (%rd41), _rt_buffer_get_64, (%rd42, %r103, %r93, %rd43, %rd44, %rd50, %rd50);
// inline asm
cvt.sat.f32.f32 %f601, %f17;
mul.f32 %f602, %f601, 0f437F0000;
cvt.rzi.u32.f32 %r98, %f602;
cvt.sat.f32.f32 %f603, %f30;
mul.f32 %f604, %f603, 0f437F0000;
cvt.rzi.u32.f32 %r99, %f604;
cvt.sat.f32.f32 %f605, %f43;
mul.f32 %f606, %f605, 0f437F0000;
cvt.rzi.u32.f32 %r100, %f606;
cvt.u16.u32 %rs12, %r100;
cvt.u16.u32 %rs13, %r99;
cvt.u16.u32 %rs14, %r98;
mov.u16 %rs15, 255;
st.v4.u8 [%rd41], {%rs14, %rs13, %rs12, %rs15};
bra.uni BB0_72;
BB0_70:
mov.u64 %rd51, 0;
mov.u32 %r104, 2;
max.f32 %f594, %f17, %f30;
max.f32 %f595, %f594, %f43;
mov.f32 %f593, 0f3F800000;
sub.f32 %f596, %f593, %f595;
rcp.rn.f32 %f597, %f596;
mul.f32 %f598, %f17, %f597;
mul.f32 %f599, %f30, %f597;
mul.f32 %f600, %f43, %f597;
mul.f32 %f590, %f598, 0f3E800000;
mul.f32 %f591, %f599, 0f3E800000;
mul.f32 %f592, %f600, 0f3E800000;
ld.global.v2.u32 {%r88, %r89}, [pixelID];
cvt.u64.u32 %rd36, %r88;
cvt.u64.u32 %rd37, %r89;
mov.u64 %rd40, image2;
cvta.global.u64 %rd35, %rd40;
mov.u32 %r87, 8;
// inline asm
call (%rd34), _rt_buffer_get_64, (%rd35, %r104, %r87, %rd36, %rd37, %rd51, %rd51);
// inline asm
// inline asm
{ cvt.rn.f16.f32 %rs10, %f592;}
// inline asm
// inline asm
{ cvt.rn.f16.f32 %rs9, %f591;}
// inline asm
// inline asm
{ cvt.rn.f16.f32 %rs8, %f590;}
// inline asm
// inline asm
{ cvt.rn.f16.f32 %rs11, %f593;}
// inline asm
st.v4.u16 [%rd34], {%rs8, %rs9, %rs10, %rs11};
bra.uni BB0_72;
BB0_35:
setp.ne.s32 %p57, %r34, 2;
@%p57 bra BB0_72;
mov.f32 %f652, 0f400CCCCD;
mov.u64 %rd48, 0;
mov.u32 %r101, 2;
mov.f32 %f631, 0fB5BFBE8E;
mov.f32 %f630, 0fBF317200;
mov.f32 %f629, 0f00000000;
mov.f32 %f628, 0f35BFBE8E;
mov.f32 %f627, 0f3F317200;
mov.f32 %f626, 0f3DAAAABD;
mov.f32 %f625, 0f3C4CAF63;
mov.f32 %f624, 0f3B18F0FE;
ld.global.v2.u32 {%r41, %r42}, [pixelID];
cvt.u64.u32 %rd10, %r41;
cvt.u64.u32 %rd11, %r42;
mov.u64 %rd26, image2;
cvta.global.u64 %rd9, %rd26;
mov.u32 %r40, 8;
// inline asm
call (%rd8), _rt_buffer_get_64, (%rd9, %r101, %r40, %rd10, %rd11, %rd48, %rd48);
// inline asm
ld.u16 %rs1, [%rd8];
// inline asm
{ cvt.f32.f16 %f332, %rs1;}
// inline asm
ld.global.v2.u32 {%r45, %r46}, [pixelID];
cvt.u64.u32 %rd16, %r45;
cvt.u64.u32 %rd17, %r46;
// inline asm
call (%rd14), _rt_buffer_get_64, (%rd9, %r101, %r40, %rd16, %rd17, %rd48, %rd48);
// inline asm
ld.u16 %rs2, [%rd14+2];
// inline asm
{ cvt.f32.f16 %f333, %rs2;}
// inline asm
ld.global.v2.u32 {%r49, %r50}, [pixelID];
cvt.u64.u32 %rd22, %r49;
cvt.u64.u32 %rd23, %r50;
// inline asm
call (%rd20), _rt_buffer_get_64, (%rd9, %r101, %r40, %rd22, %rd23, %rd48, %rd48);
// inline asm
ld.u16 %rs3, [%rd20+4];
// inline asm
{ cvt.f32.f16 %f334, %rs3;}
// inline asm
mov.f32 %f337, 0f3F800000;
sub.f32 %f47, %f337, %f17;
abs.f32 %f48, %f47;
setp.lt.f32 %p58, %f48, 0f00800000;
mul.f32 %f338, %f48, 0f4B800000;
selp.f32 %f339, 0fC3170000, 0fC2FE0000, %p58;
selp.f32 %f340, %f338, %f48, %p58;
mov.b32 %r53, %f340;
and.b32 %r54, %r53, 8388607;
or.b32 %r55, %r54, 1065353216;
mov.b32 %f341, %r55;
shr.u32 %r56, %r53, 23;
cvt.rn.f32.u32 %f342, %r56;
add.f32 %f343, %f339, %f342;
setp.gt.f32 %p59, %f341, 0f3FB504F3;
mul.f32 %f344, %f341, 0f3F000000;
add.f32 %f345, %f343, 0f3F800000;
selp.f32 %f346, %f344, %f341, %p59;
selp.f32 %f347, %f345, %f343, %p59;
add.f32 %f348, %f346, 0fBF800000;
add.f32 %f336, %f346, 0f3F800000;
// inline asm
rcp.approx.ftz.f32 %f335,%f336;
// inline asm
add.f32 %f349, %f348, %f348;
mul.f32 %f350, %f335, %f349;
mul.f32 %f351, %f350, %f350;
fma.rn.f32 %f354, %f624, %f351, %f625;
fma.rn.f32 %f356, %f354, %f351, %f626;
mul.rn.f32 %f357, %f356, %f351;
mul.rn.f32 %f358, %f357, %f350;
sub.f32 %f359, %f348, %f350;
neg.f32 %f360, %f350;
add.f32 %f361, %f359, %f359;
fma.rn.f32 %f362, %f360, %f348, %f361;
mul.rn.f32 %f363, %f335, %f362;
add.f32 %f364, %f358, %f350;
sub.f32 %f365, %f350, %f364;
add.f32 %f366, %f358, %f365;
add.f32 %f367, %f363, %f366;
add.f32 %f368, %f364, %f367;
sub.f32 %f369, %f364, %f368;
add.f32 %f370, %f367, %f369;
mul.rn.f32 %f372, %f347, %f627;
mul.rn.f32 %f374, %f347, %f628;
add.f32 %f375, %f372, %f368;
sub.f32 %f376, %f372, %f375;
add.f32 %f377, %f368, %f376;
add.f32 %f378, %f370, %f377;
add.f32 %f379, %f374, %f378;
add.f32 %f380, %f375, %f379;
sub.f32 %f381, %f375, %f380;
add.f32 %f382, %f379, %f381;
mul.rn.f32 %f384, %f652, %f380;
neg.f32 %f385, %f384;
fma.rn.f32 %f386, %f652, %f380, %f385;
fma.rn.f32 %f387, %f652, %f382, %f386;
fma.rn.f32 %f389, %f629, %f380, %f387;
add.rn.f32 %f390, %f384, %f389;
neg.f32 %f391, %f390;
add.rn.f32 %f392, %f384, %f391;
add.rn.f32 %f393, %f392, %f389;
mov.b32 %r57, %f390;
setp.eq.s32 %p60, %r57, 1118925336;
add.s32 %r58, %r57, -1;
mov.b32 %f394, %r58;
add.f32 %f395, %f393, 0f37000000;
selp.f32 %f396, %f394, %f390, %p60;
selp.f32 %f49, %f395, %f393, %p60;
mul.f32 %f397, %f396, 0f3FB8AA3B;
cvt.rzi.f32.f32 %f398, %f397;
fma.rn.f32 %f400, %f398, %f630, %f396;
fma.rn.f32 %f402, %f398, %f631, %f400;
mul.f32 %f403, %f402, 0f3FB8AA3B;
ex2.approx.ftz.f32 %f404, %f403;
add.f32 %f405, %f398, 0f00000000;
ex2.approx.f32 %f406, %f405;
mul.f32 %f407, %f404, %f406;
setp.lt.f32 %p61, %f396, 0fC2D20000;
selp.f32 %f408, 0f00000000, %f407, %p61;
setp.gt.f32 %p62, %f396, 0f42D20000;
selp.f32 %f680, 0f7F800000, %f408, %p62;
setp.eq.f32 %p63, %f680, 0f7F800000;
@%p63 bra BB0_38;
fma.rn.f32 %f680, %f680, %f49, %f680;
BB0_38:
setp.lt.f32 %p64, %f47, 0f00000000;
and.pred %p4, %p64, %p14;
mov.b32 %r59, %f680;
xor.b32 %r60, %r59, -2147483648;
mov.b32 %f409, %r60;
selp.f32 %f682, %f409, %f680, %p4;
setp.eq.f32 %p66, %f47, 0f00000000;
@%p66 bra BB0_41;
bra.uni BB0_39;
BB0_41:
add.f32 %f412, %f47, %f47;
selp.f32 %f682, %f412, 0f00000000, %p14;
bra.uni BB0_42;
BB0_39:
setp.geu.f32 %p67, %f47, 0f00000000;
@%p67 bra BB0_42;
mov.f32 %f657, 0f400CCCCD;
cvt.rzi.f32.f32 %f411, %f657;
setp.neu.f32 %p68, %f411, 0f400CCCCD;
selp.f32 %f682, 0f7FFFFFFF, %f682, %p68;
BB0_42:
add.f32 %f413, %f48, 0f400CCCCD;
mov.b32 %r61, %f413;
setp.lt.s32 %p70, %r61, 2139095040;
@%p70 bra BB0_47;
setp.gtu.f32 %p71, %f48, 0f7F800000;
@%p71 bra BB0_46;
bra.uni BB0_44;
BB0_46:
add.f32 %f682, %f47, 0f400CCCCD;
bra.uni BB0_47;
BB0_44:
setp.neu.f32 %p72, %f48, 0f7F800000;
@%p72 bra BB0_47;
selp.f32 %f682, 0fFF800000, 0f7F800000, %p4;
BB0_47:
mov.f32 %f662, 0f3F800000;
mov.f32 %f653, 0f400CCCCD;
mov.f32 %f639, 0fB5BFBE8E;
mov.f32 %f638, 0fBF317200;
mov.f32 %f637, 0f00000000;
mov.f32 %f636, 0f35BFBE8E;
mov.f32 %f635, 0f3F317200;
mov.f32 %f634, 0f3DAAAABD;
mov.f32 %f633, 0f3C4CAF63;
mov.f32 %f632, 0f3B18F0FE;
setp.eq.f32 %p73, %f47, 0f3F800000;
selp.f32 %f416, 0f3F800000, %f682, %p73;
cvt.sat.f32.f32 %f60, %f416;
sub.f32 %f61, %f662, %f30;
abs.f32 %f62, %f61;
setp.lt.f32 %p74, %f62, 0f00800000;
mul.f32 %f418, %f62, 0f4B800000;
selp.f32 %f419, 0fC3170000, 0fC2FE0000, %p74;
selp.f32 %f420, %f418, %f62, %p74;
mov.b32 %r62, %f420;
and.b32 %r63, %r62, 8388607;
or.b32 %r64, %r63, 1065353216;
mov.b32 %f421, %r64;
shr.u32 %r65, %r62, 23;
cvt.rn.f32.u32 %f422, %r65;
add.f32 %f423, %f419, %f422;
setp.gt.f32 %p75, %f421, 0f3FB504F3;
mul.f32 %f424, %f421, 0f3F000000;
add.f32 %f425, %f423, 0f3F800000;
selp.f32 %f426, %f424, %f421, %p75;
selp.f32 %f427, %f425, %f423, %p75;
add.f32 %f428, %f426, 0fBF800000;
add.f32 %f415, %f426, 0f3F800000;
// inline asm
rcp.approx.ftz.f32 %f414,%f415;
// inline asm
add.f32 %f429, %f428, %f428;
mul.f32 %f430, %f414, %f429;
mul.f32 %f431, %f430, %f430;
fma.rn.f32 %f434, %f632, %f431, %f633;
fma.rn.f32 %f436, %f434, %f431, %f634;
mul.rn.f32 %f437, %f436, %f431;
mul.rn.f32 %f438, %f437, %f430;
sub.f32 %f439, %f428, %f430;
neg.f32 %f440, %f430;
add.f32 %f441, %f439, %f439;
fma.rn.f32 %f442, %f440, %f428, %f441;
mul.rn.f32 %f443, %f414, %f442;
add.f32 %f444, %f438, %f430;
sub.f32 %f445, %f430, %f444;
add.f32 %f446, %f438, %f445;
add.f32 %f447, %f443, %f446;
add.f32 %f448, %f444, %f447;
sub.f32 %f449, %f444, %f448;
add.f32 %f450, %f447, %f449;
mul.rn.f32 %f452, %f427, %f635;
mul.rn.f32 %f454, %f427, %f636;
add.f32 %f455, %f452, %f448;
sub.f32 %f456, %f452, %f455;
add.f32 %f457, %f448, %f456;
add.f32 %f458, %f450, %f457;
add.f32 %f459, %f454, %f458;
add.f32 %f460, %f455, %f459;
sub.f32 %f461, %f455, %f460;
add.f32 %f462, %f459, %f461;
mul.rn.f32 %f464, %f653, %f460;
neg.f32 %f465, %f464;
fma.rn.f32 %f466, %f653, %f460, %f465;
fma.rn.f32 %f467, %f653, %f462, %f466;
fma.rn.f32 %f469, %f637, %f460, %f467;
add.rn.f32 %f470, %f464, %f469;
neg.f32 %f471, %f470;
add.rn.f32 %f472, %f464, %f471;
add.rn.f32 %f473, %f472, %f469;
mov.b32 %r66, %f470;
setp.eq.s32 %p76, %r66, 1118925336;
add.s32 %r67, %r66, -1;
mov.b32 %f474, %r67;
add.f32 %f475, %f473, 0f37000000;
selp.f32 %f476, %f474, %f470, %p76;
selp.f32 %f63, %f475, %f473, %p76;
mul.f32 %f477, %f476, 0f3FB8AA3B;
cvt.rzi.f32.f32 %f478, %f477;
fma.rn.f32 %f480, %f478, %f638, %f476;
fma.rn.f32 %f482, %f478, %f639, %f480;
mul.f32 %f483, %f482, 0f3FB8AA3B;
ex2.approx.ftz.f32 %f484, %f483;
add.f32 %f485, %f478, 0f00000000;
ex2.approx.f32 %f486, %f485;
mul.f32 %f487, %f484, %f486;
setp.lt.f32 %p77, %f476, 0fC2D20000;
selp.f32 %f488, 0f00000000, %f487, %p77;
setp.gt.f32 %p78, %f476, 0f42D20000;
selp.f32 %f683, 0f7F800000, %f488, %p78;
setp.eq.f32 %p79, %f683, 0f7F800000;
@%p79 bra BB0_49;
fma.rn.f32 %f683, %f683, %f63, %f683;
BB0_49:
setp.lt.f32 %p80, %f61, 0f00000000;
and.pred %p5, %p80, %p14;
mov.b32 %r68, %f683;
xor.b32 %r69, %r68, -2147483648;
mov.b32 %f489, %r69;
selp.f32 %f685, %f489, %f683, %p5;
setp.eq.f32 %p82, %f61, 0f00000000;
@%p82 bra BB0_52;
bra.uni BB0_50;
BB0_52:
add.f32 %f492, %f61, %f61;
selp.f32 %f685, %f492, 0f00000000, %p14;
bra.uni BB0_53;
BB0_50:
setp.geu.f32 %p83, %f61, 0f00000000;
@%p83 bra BB0_53;
mov.f32 %f656, 0f400CCCCD;
cvt.rzi.f32.f32 %f491, %f656;
setp.neu.f32 %p84, %f491, 0f400CCCCD;
selp.f32 %f685, 0f7FFFFFFF, %f685, %p84;
BB0_53:
add.f32 %f493, %f62, 0f400CCCCD;
mov.b32 %r70, %f493;
setp.lt.s32 %p86, %r70, 2139095040;
@%p86 bra BB0_58;
setp.gtu.f32 %p87, %f62, 0f7F800000;
@%p87 bra BB0_57;
bra.uni BB0_55;
BB0_57:
add.f32 %f685, %f61, 0f400CCCCD;
bra.uni BB0_58;
BB0_55:
setp.neu.f32 %p88, %f62, 0f7F800000;
@%p88 bra BB0_58;
selp.f32 %f685, 0fFF800000, 0f7F800000, %p5;
BB0_58:
mov.f32 %f663, 0f3F800000;
mov.f32 %f654, 0f400CCCCD;
mov.f32 %f647, 0fB5BFBE8E;
mov.f32 %f646, 0fBF317200;
mov.f32 %f645, 0f00000000;
mov.f32 %f644, 0f35BFBE8E;
mov.f32 %f643, 0f3F317200;
mov.f32 %f642, 0f3DAAAABD;
mov.f32 %f641, 0f3C4CAF63;
mov.f32 %f640, 0f3B18F0FE;
setp.eq.f32 %p89, %f61, 0f3F800000;
selp.f32 %f496, 0f3F800000, %f685, %p89;
cvt.sat.f32.f32 %f74, %f496;
sub.f32 %f75, %f663, %f43;
abs.f32 %f76, %f75;
setp.lt.f32 %p90, %f76, 0f00800000;
mul.f32 %f498, %f76, 0f4B800000;
selp.f32 %f499, 0fC3170000, 0fC2FE0000, %p90;
selp.f32 %f500, %f498, %f76, %p90;
mov.b32 %r71, %f500;
and.b32 %r72, %r71, 8388607;
or.b32 %r73, %r72, 1065353216;
mov.b32 %f501, %r73;
shr.u32 %r74, %r71, 23;
cvt.rn.f32.u32 %f502, %r74;
add.f32 %f503, %f499, %f502;
setp.gt.f32 %p91, %f501, 0f3FB504F3;
mul.f32 %f504, %f501, 0f3F000000;
add.f32 %f505, %f503, 0f3F800000;
selp.f32 %f506, %f504, %f501, %p91;
selp.f32 %f507, %f505, %f503, %p91;
add.f32 %f508, %f506, 0fBF800000;
add.f32 %f495, %f506, 0f3F800000;
// inline asm
rcp.approx.ftz.f32 %f494,%f495;
// inline asm
add.f32 %f509, %f508, %f508;
mul.f32 %f510, %f494, %f509;
mul.f32 %f511, %f510, %f510;
fma.rn.f32 %f514, %f640, %f511, %f641;
fma.rn.f32 %f516, %f514, %f511, %f642;
mul.rn.f32 %f517, %f516, %f511;
mul.rn.f32 %f518, %f517, %f510;
sub.f32 %f519, %f508, %f510;
neg.f32 %f520, %f510;
add.f32 %f521, %f519, %f519;
fma.rn.f32 %f522, %f520, %f508, %f521;
mul.rn.f32 %f523, %f494, %f522;
add.f32 %f524, %f518, %f510;
sub.f32 %f525, %f510, %f524;
add.f32 %f526, %f518, %f525;
add.f32 %f527, %f523, %f526;
add.f32 %f528, %f524, %f527;
sub.f32 %f529, %f524, %f528;
add.f32 %f530, %f527, %f529;
mul.rn.f32 %f532, %f507, %f643;
mul.rn.f32 %f534, %f507, %f644;
add.f32 %f535, %f532, %f528;
sub.f32 %f536, %f532, %f535;
add.f32 %f537, %f528, %f536;
add.f32 %f538, %f530, %f537;
add.f32 %f539, %f534, %f538;
add.f32 %f540, %f535, %f539;
sub.f32 %f541, %f535, %f540;
add.f32 %f542, %f539, %f541;
mul.rn.f32 %f544, %f654, %f540;
neg.f32 %f545, %f544;
fma.rn.f32 %f546, %f654, %f540, %f545;
fma.rn.f32 %f547, %f654, %f542, %f546;
fma.rn.f32 %f549, %f645, %f540, %f547;
add.rn.f32 %f550, %f544, %f549;
neg.f32 %f551, %f550;
add.rn.f32 %f552, %f544, %f551;
add.rn.f32 %f553, %f552, %f549;
mov.b32 %r75, %f550;
setp.eq.s32 %p92, %r75, 1118925336;
add.s32 %r76, %r75, -1;
mov.b32 %f554, %r76;
add.f32 %f555, %f553, 0f37000000;
selp.f32 %f556, %f554, %f550, %p92;
selp.f32 %f77, %f555, %f553, %p92;
mul.f32 %f557, %f556, 0f3FB8AA3B;
cvt.rzi.f32.f32 %f558, %f557;
fma.rn.f32 %f560, %f558, %f646, %f556;
fma.rn.f32 %f562, %f558, %f647, %f560;
mul.f32 %f563, %f562, 0f3FB8AA3B;
ex2.approx.ftz.f32 %f564, %f563;
add.f32 %f565, %f558, 0f00000000;
ex2.approx.f32 %f566, %f565;
mul.f32 %f567, %f564, %f566;
setp.lt.f32 %p93, %f556, 0fC2D20000;
selp.f32 %f568, 0f00000000, %f567, %p93;
setp.gt.f32 %p94, %f556, 0f42D20000;
selp.f32 %f686, 0f7F800000, %f568, %p94;
setp.eq.f32 %p95, %f686, 0f7F800000;
@%p95 bra BB0_60;
fma.rn.f32 %f686, %f686, %f77, %f686;
BB0_60:
setp.lt.f32 %p96, %f75, 0f00000000;
and.pred %p6, %p96, %p14;
mov.b32 %r77, %f686;
xor.b32 %r78, %r77, -2147483648;
mov.b32 %f569, %r78;
selp.f32 %f688, %f569, %f686, %p6;
setp.eq.f32 %p98, %f75, 0f00000000;
@%p98 bra BB0_63;
bra.uni BB0_61;
BB0_63:
add.f32 %f572, %f75, %f75;
selp.f32 %f688, %f572, 0f00000000, %p14;
bra.uni BB0_64;
BB0_61:
setp.geu.f32 %p99, %f75, 0f00000000;
@%p99 bra BB0_64;
mov.f32 %f655, 0f400CCCCD;
cvt.rzi.f32.f32 %f571, %f655;
setp.neu.f32 %p100, %f571, 0f400CCCCD;
selp.f32 %f688, 0f7FFFFFFF, %f688, %p100;
BB0_64:
add.f32 %f573, %f76, 0f400CCCCD;
mov.b32 %r79, %f573;
setp.lt.s32 %p102, %r79, 2139095040;
@%p102 bra BB0_69;
setp.gtu.f32 %p103, %f76, 0f7F800000;
@%p103 bra BB0_68;
bra.uni BB0_66;
BB0_68:
add.f32 %f688, %f75, 0f400CCCCD;
bra.uni BB0_69;
BB0_66:
setp.neu.f32 %p104, %f76, 0f7F800000;
@%p104 bra BB0_69;
selp.f32 %f688, 0fFF800000, 0f7F800000, %p6;
BB0_69:
mov.u32 %r105, 8;
mov.u64 %rd53, image2;
cvta.global.u64 %rd52, %rd53;
mov.f32 %f664, 0f3F800000;
mov.u64 %rd49, 0;
mov.u32 %r102, 2;
setp.eq.f32 %p105, %f75, 0f3F800000;
selp.f32 %f578, 0f3F800000, %f688, %p105;
cvt.sat.f32.f32 %f579, %f578;
max.f32 %f580, %f60, %f74;
max.f32 %f581, %f580, %f579;
sub.f32 %f582, %f664, %f581;
rcp.rn.f32 %f583, %f582;
mul.f32 %f584, %f60, %f583;
mul.f32 %f585, %f74, %f583;
mul.f32 %f586, %f579, %f583;
mul.f32 %f587, %f584, 0f3E800000;
mul.f32 %f588, %f585, 0f3E800000;
mul.f32 %f589, %f586, 0f3E800000;
min.f32 %f574, %f587, %f332;
min.f32 %f575, %f588, %f333;
min.f32 %f576, %f589, %f334;
ld.global.v2.u32 {%r82, %r83}, [pixelID];
cvt.u64.u32 %rd29, %r82;
cvt.u64.u32 %rd30, %r83;
// inline asm
call (%rd27), _rt_buffer_get_64, (%rd52, %r102, %r105, %rd29, %rd30, %rd49, %rd49);
// inline asm
// inline asm
{ cvt.rn.f16.f32 %rs6, %f576;}
// inline asm
// inline asm
{ cvt.rn.f16.f32 %rs5, %f575;}
// inline asm
// inline asm
{ cvt.rn.f16.f32 %rs4, %f574;}
// inline asm
// inline asm
{ cvt.rn.f16.f32 %rs7, %f664;}
// inline asm
st.v4.u16 [%rd27], {%rs4, %rs5, %rs6, %rs7};
BB0_72:
ret;
}