diff --git a/Ghidra/Processors/x86/data/languages/avx.sinc b/Ghidra/Processors/x86/data/languages/avx.sinc index 08badb2c7e..66e68cdcb2 100644 --- a/Ghidra/Processors/x86/data/languages/avx.sinc +++ b/Ghidra/Processors/x86/data/languages/avx.sinc @@ -2827,7 +2827,6 @@ define pcodeop vunpcklps_avx ; } # VBROADCAST 5-12 PAGE 1836 LINE 94913 -define pcodeop vbroadcastsd_avx ; :VBROADCASTSD YmmReg1, XmmReg2_m64 is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x19; (YmmReg1 & ZmmReg1) ... & XmmReg2_m64 { local val:8 = XmmReg2_m64[0,64]; @@ -2839,7 +2838,6 @@ define pcodeop vbroadcastsd_avx ; } # VBROADCAST 5-12 PAGE 1836 LINE 94915 -define pcodeop vbroadcastf128_avx ; :VBROADCASTF128 YmmReg1, XmmReg2_m128 is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x1A; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 { local val:16 = XmmReg2_m128; diff --git a/Ghidra/Processors/x86/data/languages/avx512.sinc b/Ghidra/Processors/x86/data/languages/avx512.sinc index 0791af6105..a882bd1061 100644 --- a/Ghidra/Processors/x86/data/languages/avx512.sinc +++ b/Ghidra/Processors/x86/data/languages/avx512.sinc @@ -4,237 +4,303 @@ # ADDPD 3-33 PAGE 603 LINE 33411 define pcodeop vaddpd_avx512vl ; -:VADDPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x58; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VADDPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x58; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vaddpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vaddpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + + ZmmReg1 = zext(XmmResult); } # ADDPD 3-33 PAGE 603 LINE 33414 -:VADDPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x58; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VADDPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x58; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vaddpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vaddpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + + ZmmReg1 = zext(YmmResult); } # ADDPD 3-33 PAGE 603 LINE 33417 define pcodeop vaddpd_avx512f ; -:VADDPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x58; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VADDPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x58; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vaddpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vaddpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + + ZmmReg1 = ZmmResult; } # ADDPS 3-36 PAGE 606 LINE 33562 define pcodeop vaddps_avx512vl ; -:VADDPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x58; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VADDPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x58; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vaddps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vaddps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + + ZmmReg1 = zext(XmmResult); } # ADDPS 3-36 PAGE 606 LINE 33565 -:VADDPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x58; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VADDPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x58; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vaddps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vaddps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst); + YmmMask = YmmReg1; + build YmmOpMask32; + + ZmmReg1 = zext(YmmResult); } # ADDPS 3-36 PAGE 606 LINE 33568 define pcodeop vaddps_avx512f ; -:VADDPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x58; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VADDPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x58; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vaddps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vaddps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + + ZmmReg1 = ZmmResult; } # ADDSD 3-39 PAGE 609 LINE 33721 define pcodeop vaddsd_avx512f ; -:VADDSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x58; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VADDSD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x58; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vaddsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vaddsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + + ZmmReg1 = zext(XmmResult); } # ADDSS 3-41 PAGE 611 LINE 33815 define pcodeop vaddss_avx512f ; -:VADDSS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x58; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VADDSS XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x58; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vaddss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vaddss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask64; + + ZmmReg1 = zext(XmmResult); } # ANDPD 3-64 PAGE 634 LINE 34827 define pcodeop vandpd_avx512vl ; -:VANDPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x54; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VANDPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x54; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vandpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vandpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + + ZmmReg1 = zext(XmmResult); } # ANDPD 3-64 PAGE 634 LINE 34830 -:VANDPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x54; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VANDPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x54; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vandpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vandpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + + ZmmReg1 = zext(YmmResult); } # ANDPD 3-64 PAGE 634 LINE 34833 define pcodeop vandpd_avx512dq ; -:VANDPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x54; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VANDPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x54; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vandpd_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vandpd_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + + ZmmReg1 = ZmmResult; } # ANDPS 3-67 PAGE 637 LINE 34953 define pcodeop vandps_avx512vl ; -:VANDPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x54; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VANDPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x54; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vandps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vandps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + + ZmmReg1 = zext(XmmResult); } # ANDPS 3-67 PAGE 637 LINE 34956 -:VANDPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x54; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VANDPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x54; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { local tmp:32 = vandps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + build YmmOpMask32; + + ZmmReg1 = zext(YmmResult); } # ANDPS 3-67 PAGE 637 LINE 34959 define pcodeop vandps_avx512dq ; -:VANDPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x54; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VANDPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x54; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vandps_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vandps_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # ANDNPD 3-70 PAGE 640 LINE 35087 define pcodeop vandnpd_avx512vl ; -:VANDNPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x55; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VANDNPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x55; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vandnpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vandnpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # ANDNPD 3-70 PAGE 640 LINE 35090 -:VANDNPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x55; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VANDNPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x55; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vandnpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vandnpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # ANDNPD 3-70 PAGE 640 LINE 35093 define pcodeop vandnpd_avx512dq ; -:VANDNPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x55; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VANDNPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x55; (ZmmReg1 & ZmmOpMask & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vandnpd_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vandnpd_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # ANDNPS 3-73 PAGE 643 LINE 35213 define pcodeop vandnps_avx512vl ; -:VANDNPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x55; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VANDNPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x55; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vandnps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vandnps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # ANDNPS 3-73 PAGE 643 LINE 35216 -:VANDNPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x55; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VANDNPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x55; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vandnps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vandnps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # ANDNPS 3-73 PAGE 643 LINE 35219 define pcodeop vandnps_avx512dq ; -:VANDNPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x55; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VANDNPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x55; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vandnps_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vandnps_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # CMPPD 3-155 PAGE 725 LINE 39246 define pcodeop vcmppd_avx512vl ; -:^VCMPPD_mon KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst^VCMPPD_op is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xC2; KReg_reg ... & XmmReg2_m128_m64bcst; VCMPPD_mon & VCMPPD_op +:^VCMPPD_mon KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst^VCMPPD_op is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & AVXOpMask & vexVVVV_XmmReg; byte=0xC2; KReg_reg ... & XmmReg2_m128_m64bcst; VCMPPD_mon & VCMPPD_op [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vcmppd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst, VCMPPD_op ); + local tmp = vcmppd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst, VCMPPD_op ); + KReg_reg = zext(AVXOpMask[0,2]) & tmp; } # CMPPD 3-155 PAGE 725 LINE 39250 -:^VCMPPD_mon KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst^VCMPPD_op is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xC2; KReg_reg ... & YmmReg2_m256_m64bcst; VCMPPD_mon & VCMPPD_op +:^VCMPPD_mon KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst^VCMPPD_op is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & AVXOpMask & vexVVVV_YmmReg; byte=0xC2; KReg_reg ... & YmmReg2_m256_m64bcst; VCMPPD_mon & VCMPPD_op [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vcmppd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst, VCMPPD_op ); + local tmp = vcmppd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst, VCMPPD_op ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; } # CMPPD 3-155 PAGE 725 LINE 39254 define pcodeop vcmppd_avx512f ; -:^VCMPPD_mon KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst^VCMPPD_op is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xC2; KReg_reg ... & ZmmReg2_m512_m64bcst; VCMPPD_mon & VCMPPD_op +:^VCMPPD_mon KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst^VCMPPD_op is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & AVXOpMask & evexV5_ZmmReg; byte=0xC2; KReg_reg ... & ZmmReg2_m512_m64bcst; VCMPPD_mon & VCMPPD_op [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vcmppd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst, VCMPPD_op ); + local tmp = vcmppd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst, VCMPPD_op ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # CMPPS 3-162 PAGE 732 LINE 39613 define pcodeop vcmpps_avx512vl ; -:^VCMPPS_mon KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst^VCMPPS_op is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xC2; KReg_reg ... & XmmReg2_m128_m32bcst; VCMPPS_mon & VCMPPS_op +:^VCMPPS_mon KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst^VCMPPS_op is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & AVXOpMask & vexVVVV_XmmReg; byte=0xC2; KReg_reg ... & XmmReg2_m128_m32bcst; VCMPPS_mon & VCMPPS_op [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vcmpps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst, VCMPPS_op ); + local tmp = vcmpps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst, VCMPPS_op ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # CMPPS 3-162 PAGE 732 LINE 39617 -:^VCMPPS_mon KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst^VCMPPS_op is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xC2; KReg_reg ... & YmmReg2_m256_m32bcst; VCMPPS_mon & VCMPPS_op +:^VCMPPS_mon KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst^VCMPPS_op is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & AVXOpMask & vexVVVV_YmmReg; byte=0xC2; KReg_reg ... & YmmReg2_m256_m32bcst; VCMPPS_mon & VCMPPS_op [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vcmpps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst, VCMPPS_op ); + local tmp = vcmpps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst, VCMPPS_op ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # CMPPS 3-162 PAGE 732 LINE 39621 define pcodeop vcmpps_avx512f ; -:^VCMPPS_mon KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst^VCMPPS_op is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xC2; KReg_reg ... & ZmmReg2_m512_m32bcst; VCMPPS_mon & VCMPPS_op +:^VCMPPS_mon KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst^VCMPPS_op is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & AVXOpMask & evexV5_ZmmReg; byte=0xC2; KReg_reg ... & ZmmReg2_m512_m32bcst; VCMPPS_mon & VCMPPS_op [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vcmpps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst, VCMPPS_op ); + local tmp = vcmpps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst, VCMPPS_op ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # CMPSD 3-173 PAGE 743 LINE 40157 define pcodeop vcmpsd_avx512f ; -:^VCMPSD_mon KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64^VCMPSD_op is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xC2; KReg_reg ... & XmmReg2_m64; VCMPSD_mon & VCMPSD_op +:^VCMPSD_mon KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m64^VCMPSD_op is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & AVXOpMask & vexVVVV_XmmReg; byte=0xC2; KReg_reg ... & XmmReg2_m64; VCMPSD_mon & VCMPSD_op [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - KReg_reg = vcmpsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64, VCMPSD_op ); + local tmp = vcmpsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64, VCMPSD_op ); + KReg_reg = zext(AVXOpMask[0,1]) & tmp; } # CMPSS 3-177 PAGE 747 LINE 40393 define pcodeop vcmpss_avx512f ; -:^VCMPSS_mon KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32^VCMPSS_op is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xC2; KReg_reg ... & XmmReg2_m32; VCMPSS_mon & VCMPSS_op +:^VCMPSS_mon KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m32^VCMPSS_op is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & AVXOpMask & vexVVVV_XmmReg; byte=0xC2; KReg_reg ... & XmmReg2_m32; VCMPSS_mon & VCMPSS_op [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - KReg_reg = vcmpss_avx512f( vexVVVV_XmmReg, XmmReg2_m32, VCMPSS_op ); + local tmp = vcmpss_avx512f( vexVVVV_XmmReg, XmmReg2_m32, VCMPSS_op ); + KReg_reg = zext(AVXOpMask[0,1]) & tmp; } # COMISD 3-186 PAGE 756 LINE 40863 define pcodeop vcomisd_avx512f ; -:VCOMISD XmmReg1, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x2F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VCOMISD XmmReg1, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x2F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vcomisd_avx512f( XmmReg2_m64 ); @@ -243,7 +309,7 @@ define pcodeop vcomisd_avx512f ; # COMISS 3-188 PAGE 758 LINE 40941 define pcodeop vcomiss_avx512f ; -:VCOMISS XmmReg1, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x2F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VCOMISS XmmReg1, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x2F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vcomiss_avx512f( XmmReg2_m32 ); @@ -252,159 +318,199 @@ define pcodeop vcomiss_avx512f ; # CVTDQ2PD 3-228 PAGE 798 LINE 43080 define pcodeop vcvtdq2pd_avx512vl ; -:VCVTDQ2PD XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0xE6; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTDQ2PD XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0xE6; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:16 = vcvtdq2pd_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtdq2pd_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # CVTDQ2PD 3-228 PAGE 798 LINE 43083 -:VCVTDQ2PD YmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0xE6; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTDQ2PD YmmReg1 YmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0xE6; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:32 = vcvtdq2pd_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtdq2pd_avx512vl( XmmReg2_m128_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # CVTDQ2PD 3-228 PAGE 798 LINE 43086 define pcodeop vcvtdq2pd_avx512f ; -:VCVTDQ2PD ZmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0xE6; ZmmReg1 ... & YmmReg2_m256_m32bcst +:VCVTDQ2PD ZmmReg1 ZmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0xE6; (ZmmReg1 & ZmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - ZmmReg1 = vcvtdq2pd_avx512f( YmmReg2_m256_m32bcst ); + ZmmResult = vcvtdq2pd_avx512f( YmmReg2_m256_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # CVTDQ2PS 3-232 PAGE 802 LINE 43248 define pcodeop vcvtdq2ps_avx512vl ; -:VCVTDQ2PS XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x5B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTDQ2PS XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x5B; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtdq2ps_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtdq2ps_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # CVTDQ2PS 3-232 PAGE 802 LINE 43251 -:VCVTDQ2PS YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x5B; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VCVTDQ2PS YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x5B; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtdq2ps_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtdq2ps_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # CVTDQ2PS 3-232 PAGE 802 LINE 43254 define pcodeop vcvtdq2ps_avx512f ; -:VCVTDQ2PS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x5B; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VCVTDQ2PS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x5B; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vcvtdq2ps_avx512f( ZmmReg2_m512_m32bcst ); + ZmmResult = vcvtdq2ps_avx512f( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # CVTPD2DQ 3-235 PAGE 805 LINE 43414 define pcodeop vcvtpd2dq_avx512vl ; -:VCVTPD2DQ XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0xE6; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTPD2DQ XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0xE6; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtpd2dq_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtpd2dq_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # CVTPD2DQ 3-235 PAGE 805 LINE 43417 -:VCVTPD2DQ XmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0xE6; (XmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTPD2DQ YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0xE6; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtpd2dq_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtpd2dq_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # CVTPD2DQ 3-235 PAGE 805 LINE 43420 define pcodeop vcvtpd2dq_avx512f ; -:VCVTPD2DQ YmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0xE6; (YmmReg1 & ZmmReg1) ... & ZmmReg2_m512_m64bcst +:VCVTPD2DQ YmmReg1 YmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0xE6; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtpd2dq_avx512f( ZmmReg2_m512_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtpd2dq_avx512f( ZmmReg2_m512_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # CVTPD2PS 3-240 PAGE 810 LINE 43649 define pcodeop vcvtpd2ps_avx512vl ; -:VCVTPD2PS XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x5A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTPD2PS XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x5A; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtpd2ps_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtpd2ps_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # CVTPD2PS 3-240 PAGE 810 LINE 43653 -:VCVTPD2PS XmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x5A; (XmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTPD2PS XmmReg1 XmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x5A; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtpd2ps_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtpd2ps_avx512vl( YmmReg2_m256_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # CVTPD2PS 3-240 PAGE 810 LINE 43657 define pcodeop vcvtpd2ps_avx512f ; -:VCVTPD2PS YmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x5A; (YmmReg1 & ZmmReg1) ... & ZmmReg2_m512_m64bcst +:VCVTPD2PS YmmReg1 YmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x5A; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtpd2ps_avx512f( ZmmReg2_m512_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtpd2ps_avx512f( ZmmReg2_m512_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # CVTPS2DQ 3-246 PAGE 816 LINE 43933 define pcodeop vcvtps2dq_avx512vl ; -:VCVTPS2DQ XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x5B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTPS2DQ XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x5B; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtps2dq_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtps2dq_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # CVTPS2DQ 3-246 PAGE 816 LINE 43936 -:VCVTPS2DQ YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x5B; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VCVTPS2DQ YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x5B; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtps2dq_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtps2dq_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # CVTPS2DQ 3-246 PAGE 816 LINE 43939 define pcodeop vcvtps2dq_avx512f ; -:VCVTPS2DQ ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x5B; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VCVTPS2DQ ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x5B; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vcvtps2dq_avx512f( ZmmReg2_m512_m32bcst ); + ZmmResult = vcvtps2dq_avx512f( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # CVTPS2PD 3-249 PAGE 819 LINE 44104 define pcodeop vcvtps2pd_avx512vl ; -:VCVTPS2PD XmmReg1^KWriteMask, XmmReg2_m64_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x5A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64_m32bcst +:VCVTPS2PD XmmReg1 XmmOpMask64, XmmReg2_m64_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x5A; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:16 = vcvtps2pd_avx512vl( XmmReg2_m64_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtps2pd_avx512vl( XmmReg2_m64_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # CVTPS2PD 3-249 PAGE 819 LINE 44107 -:VCVTPS2PD YmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x5A; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTPS2PD YmmReg1 YmmOpMask64, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x5A; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:32 = vcvtps2pd_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtps2pd_avx512vl( XmmReg2_m128_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # CVTPS2PD 3-249 PAGE 819 LINE 44110 define pcodeop vcvtps2pd_avx512f ; -:VCVTPS2PD ZmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x5A; ZmmReg1 ... & YmmReg2_m256_m32bcst +:VCVTPS2PD ZmmReg1 ZmmOpMask64, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x5A; (ZmmReg1 & ZmmOpMask64) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - ZmmReg1 = vcvtps2pd_avx512f( YmmReg2_m256_m32bcst ); + ZmmResult = vcvtps2pd_avx512f( YmmReg2_m256_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # CVTSD2SI 3-253 PAGE 823 LINE 44320 define pcodeop vcvtsd2si_avx512f ; -:VCVTSD2SI Reg32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x2D; Reg32 ... & XmmReg2_m64 +:VCVTSD2SI Reg32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x2D; Reg32 ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg32 = vcvtsd2si_avx512f( XmmReg2_m64 ); @@ -413,7 +519,7 @@ define pcodeop vcvtsd2si_avx512f ; # CVTSD2SI 3-253 PAGE 823 LINE 44322 @ifdef IA64 -:VCVTSD2SI Reg64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1); byte=0x2D; Reg64 ... & XmmReg2_m64 +:VCVTSD2SI Reg64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1); byte=0x2D; Reg64 ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg64 = vcvtsd2si_avx512f( XmmReg2_m64 ); @@ -422,16 +528,19 @@ define pcodeop vcvtsd2si_avx512f ; # CVTSD2SS 3-255 PAGE 825 LINE 44417 define pcodeop vcvtsd2ss_avx512f ; -:VCVTSD2SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x5A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VCVTSD2SS XmmReg1 XmmOpMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x5A; (XmmReg1 & ZmmReg1 & XmmOpMask) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vcvtsd2ss_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtsd2ss_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask; + XmmResult[0,32] = (zext(XmmOpMask[0,1]) * XmmResult[0,32]) + (zext(!XmmOpMask[0,1]) * XmmMask[0,32]); + ZmmReg1 = zext(XmmResult); } # CVTSI2SD 3-257 PAGE 827 LINE 44522 define pcodeop vcvtsi2sd_avx512f ; -:VCVTSI2SD XmmReg1, vexVVVV_XmmReg, rm32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x2A; (XmmReg1 & ZmmReg1) ... & rm32 +:VCVTSI2SD XmmReg1, vexVVVV_XmmReg, rm32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x2A; (XmmReg1 & ZmmReg1) ... & rm32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vcvtsi2sd_avx512f( vexVVVV_XmmReg, rm32 ); @@ -440,7 +549,7 @@ define pcodeop vcvtsi2sd_avx512f ; # CVTSI2SD 3-257 PAGE 827 LINE 44525 @ifdef IA64 -:VCVTSI2SD XmmReg1, vexVVVV_XmmReg, rm64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x2A; (XmmReg1 & ZmmReg1) ... & rm64 +:VCVTSI2SD XmmReg1, vexVVVV_XmmReg, rm64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x2A; (XmmReg1 & ZmmReg1) ... & rm64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vcvtsi2sd_avx512f( vexVVVV_XmmReg, rm64 ); @@ -450,7 +559,7 @@ define pcodeop vcvtsi2sd_avx512f ; # CVTSI2SS 3-259 PAGE 829 LINE 44636 define pcodeop vcvtsi2ss_avx512f ; -:VCVTSI2SS XmmReg1, vexVVVV_XmmReg, rm32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x2A; (XmmReg1 & ZmmReg1) ... & rm32 +:VCVTSI2SS XmmReg1, vexVVVV_XmmReg, rm32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x2A; (XmmReg1 & ZmmReg1) ... & rm32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vcvtsi2ss_avx512f( vexVVVV_XmmReg, rm32 ); @@ -459,7 +568,7 @@ define pcodeop vcvtsi2ss_avx512f ; # CVTSI2SS 3-259 PAGE 829 LINE 44638 @ifdef IA64 -:VCVTSI2SS XmmReg1, vexVVVV_XmmReg, rm64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x2A; (XmmReg1 & ZmmReg1) ... & rm64 +:VCVTSI2SS XmmReg1, vexVVVV_XmmReg, rm64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x2A; (XmmReg1 & ZmmReg1) ... & rm64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vcvtsi2ss_avx512f( vexVVVV_XmmReg, rm64 ); @@ -469,16 +578,19 @@ define pcodeop vcvtsi2ss_avx512f ; # CVTSS2SD 3-261 PAGE 831 LINE 44747 define pcodeop vcvtss2sd_avx512f ; -:VCVTSS2SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x5A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VCVTSS2SD XmmReg1 XmmOpMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x5A; (XmmReg1 & ZmmReg1 & XmmOpMask) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vcvtss2sd_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtss2sd_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask; + XmmResult[0,64] = (zext(XmmOpMask[0,1]) * XmmResult[0,64]) + (zext(!XmmOpMask[0,1]) * XmmMask[0,64]); + ZmmReg1 = zext(XmmResult); } # CVTSS2SI 3-263 PAGE 833 LINE 44839 define pcodeop vcvtss2si_avx512f ; -:VCVTSS2SI Reg32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x2D; Reg32 ... & XmmReg2_m32 +:VCVTSS2SI Reg32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x2D; Reg32 ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg32 = vcvtss2si_avx512f( XmmReg2_m32 ); @@ -487,7 +599,7 @@ define pcodeop vcvtss2si_avx512f ; # CVTSS2SI 3-263 PAGE 833 LINE 44841 @ifdef IA64 -:VCVTSS2SI Reg64, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x2D; Reg64 ... & XmmReg2_m32 +:VCVTSS2SI Reg64, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x2D; Reg64 ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg64 = vcvtss2si_avx512f( XmmReg2_m32 ); @@ -496,58 +608,71 @@ define pcodeop vcvtss2si_avx512f ; # CVTTPD2DQ 3-265 PAGE 835 LINE 44936 define pcodeop vcvttpd2dq_avx512vl ; -:VCVTTPD2DQ XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0xE6; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTTPD2DQ XmmReg1 XmmOpMask32, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0xE6; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvttpd2dq_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvttpd2dq_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # CVTTPD2DQ 3-265 PAGE 835 LINE 44940 -:VCVTTPD2DQ XmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0xE6; (XmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTTPD2DQ XmmReg1 XmmOpMask32, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0xE6; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvttpd2dq_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvttpd2dq_avx512vl( YmmReg2_m256_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # CVTTPD2DQ 3-265 PAGE 835 LINE 44944 define pcodeop vcvttpd2dq_avx512f ; -:VCVTTPD2DQ YmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0xE6; (YmmReg1 & ZmmReg1) ... & ZmmReg2_m512_m64bcst +:VCVTTPD2DQ YmmReg1 YmmOpMask32, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0xE6; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvttpd2dq_avx512f( ZmmReg2_m512_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvttpd2dq_avx512f( ZmmReg2_m512_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # CVTTPS2DQ 3-270 PAGE 840 LINE 45169 define pcodeop vcvttps2dq_avx512vl ; -:VCVTTPS2DQ XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x5B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTTPS2DQ XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x5B; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvttps2dq_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvttps2dq_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # CVTTPS2DQ 3-270 PAGE 840 LINE 45173 -:VCVTTPS2DQ YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x5B; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VCVTTPS2DQ YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x5B; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvttps2dq_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvttps2dq_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # CVTTPS2DQ 3-270 PAGE 840 LINE 45177 define pcodeop vcvttps2dq_avx512f ; -:VCVTTPS2DQ ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x5B; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VCVTTPS2DQ ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x5B; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vcvttps2dq_avx512f( ZmmReg2_m512_m32bcst ); + ZmmResult = vcvttps2dq_avx512f( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # CVTTSD2SI 3-274 PAGE 844 LINE 45385 define pcodeop vcvttsd2si_avx512f ; -:VCVTTSD2SI Reg32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x2C; Reg32 ... & XmmReg2_m64 +:VCVTTSD2SI Reg32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x2C; Reg32 ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg32 = vcvttsd2si_avx512f( XmmReg2_m64 ); @@ -556,7 +681,7 @@ define pcodeop vcvttsd2si_avx512f ; # CVTTSD2SI 3-274 PAGE 844 LINE 45388 @ifdef IA64 -:VCVTTSD2SI Reg64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1); byte=0x2C; Reg64 ... & XmmReg2_m64 +:VCVTTSD2SI Reg64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1); byte=0x2C; Reg64 ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg64 = vcvttsd2si_avx512f( XmmReg2_m64 ); @@ -565,7 +690,7 @@ define pcodeop vcvttsd2si_avx512f ; # CVTTSS2SI 3-276 PAGE 846 LINE 45479 define pcodeop vcvttss2si_avx512f ; -:VCVTTSS2SI Reg32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x2C; Reg32 ... & XmmReg2_m32 +:VCVTTSS2SI Reg32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x2C; Reg32 ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg32 = vcvttss2si_avx512f( XmmReg2_m32 ); @@ -574,7 +699,7 @@ define pcodeop vcvttss2si_avx512f ; # CVTTSS2SI 3-276 PAGE 846 LINE 45482 @ifdef IA64 -:VCVTTSS2SI Reg64, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x2C; Reg64 ... & XmmReg2_m32 +:VCVTTSS2SI Reg64, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x2C; Reg64 ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg64 = vcvttss2si_avx512f( XmmReg2_m32 ); @@ -583,75 +708,93 @@ define pcodeop vcvttss2si_avx512f ; # DIVPD 3-288 PAGE 858 LINE 46029 define pcodeop vdivpd_avx512vl ; -:VDIVPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x5E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VDIVPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x5E; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vdivpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vdivpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # DIVPD 3-288 PAGE 858 LINE 46033 -:VDIVPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x5E; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VDIVPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x5E; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vdivpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vdivpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # DIVPD 3-288 PAGE 858 LINE 46037 define pcodeop vdivpd_avx512f ; -:VDIVPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x5E; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VDIVPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x5E; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vdivpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vdivpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # DIVPS 3-291 PAGE 861 LINE 46170 define pcodeop vdivps_avx512vl ; -:VDIVPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x5E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VDIVPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x5E; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vdivps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vdivps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # DIVPS 3-291 PAGE 861 LINE 46174 -:VDIVPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x5E; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VDIVPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x5E; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vdivps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vdivps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # DIVPS 3-291 PAGE 861 LINE 46178 define pcodeop vdivps_avx512f ; -:VDIVPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x5E; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VDIVPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x5E; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vdivps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vdivps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # DIVSD 3-294 PAGE 864 LINE 46315 define pcodeop vdivsd_avx512f ; -:VDIVSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x5E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VDIVSD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x5E; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vdivsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vdivsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # DIVSS 3-296 PAGE 866 LINE 46413 define pcodeop vdivss_avx512f ; -:VDIVSS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x5E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VDIVSS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x5E; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vdivss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vdivss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # EXTRACTPS 3-307 PAGE 877 LINE 46983 define pcodeop vextractps_avx512f ; -:VEXTRACTPS rm32, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG); byte=0x17; XmmReg1 ... & rm32; imm8 +:VEXTRACTPS rm32, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG); byte=0x17; XmmReg1 ... & rm32; imm8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { rm32 = vextractps_avx512f( XmmReg1, imm8:1 ); @@ -659,682 +802,473 @@ define pcodeop vextractps_avx512f ; # INSERTPS 3-454 PAGE 1024 LINE 53785 define pcodeop vinsertps_avx512f ; -:VINSERTPS XmmReg1, vexVVVV_XmmReg, XmmReg2_m32, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x21; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32; imm8 +:VINSERTPS XmmReg1, vexVVVV_XmmReg, XmmReg2_m32, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x21; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32; imm8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vinsertps_avx512f( vexVVVV_XmmReg, XmmReg2_m32, imm8:1 ); ZmmReg1 = zext(tmp); } -# KADDW/KADDB/KADDQ/KADDD 3-496 PAGE 1066 LINE 55984 -define pcodeop kaddw_avx512dq ; -:KADDW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x4A; KReg_reg & KReg_rm -{ - KReg_reg = kaddw_avx512dq( vex1VVV_KReg, KReg_rm ); -} - -# KADDW/KADDB/KADDQ/KADDD 3-496 PAGE 1066 LINE 55986 -define pcodeop kaddb_avx512dq ; -:KADDB KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x4A; KReg_reg & KReg_rm -{ - KReg_reg = kaddb_avx512dq( vex1VVV_KReg, KReg_rm ); -} - -# KADDW/KADDB/KADDQ/KADDD 3-496 PAGE 1066 LINE 55988 -define pcodeop kaddq_avx512bw ; -:KADDQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x4A; KReg_reg & KReg_rm -{ - KReg_reg = kaddq_avx512bw( vex1VVV_KReg, KReg_rm ); -} - -# KADDW/KADDB/KADDQ/KADDD 3-496 PAGE 1066 LINE 55990 -define pcodeop kaddd_avx512bw ; -:KADDD KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x4A; KReg_reg & KReg_rm -{ - KReg_reg = kaddd_avx512bw( vex1VVV_KReg, KReg_rm ); -} - -# KANDW/KANDB/KANDQ/KANDD 3-497 PAGE 1067 LINE 56039 -define pcodeop kandw_avx512f ; -:KANDW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x41; KReg_reg & KReg_rm -{ - KReg_reg = kandw_avx512f( vex1VVV_KReg, KReg_rm ); -} - -# KANDW/KANDB/KANDQ/KANDD 3-497 PAGE 1067 LINE 56041 -define pcodeop kandb_avx512dq ; -:KANDB KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x41; KReg_reg & KReg_rm -{ - KReg_reg = kandb_avx512dq( vex1VVV_KReg, KReg_rm ); -} - -# KANDW/KANDB/KANDQ/KANDD 3-497 PAGE 1067 LINE 56043 -define pcodeop kandq_avx512bw ; -:KANDQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x41; KReg_reg & KReg_rm -{ - KReg_reg = kandq_avx512bw( vex1VVV_KReg, KReg_rm ); -} - -# KANDW/KANDB/KANDQ/KANDD 3-497 PAGE 1067 LINE 56045 -define pcodeop kandd_avx512bw ; -:KANDD KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x41; KReg_reg & KReg_rm -{ - KReg_reg = kandd_avx512bw( vex1VVV_KReg, KReg_rm ); -} - -# KANDNW/KANDNB/KANDNQ/KANDND 3-498 PAGE 1068 LINE 56100 -define pcodeop kandnw_avx512f ; -:KANDNW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x42; KReg_reg & KReg_rm -{ - KReg_reg = kandnw_avx512f( vex1VVV_KReg, KReg_rm ); -} - -# KANDNW/KANDNB/KANDNQ/KANDND 3-498 PAGE 1068 LINE 56102 -define pcodeop kandnb_avx512dq ; -:KANDNB KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x42; KReg_reg & KReg_rm -{ - KReg_reg = kandnb_avx512dq( vex1VVV_KReg, KReg_rm ); -} - -# KANDNW/KANDNB/KANDNQ/KANDND 3-498 PAGE 1068 LINE 56104 -define pcodeop kandnq_avx512bw ; -:KANDNQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x42; KReg_reg & KReg_rm -{ - KReg_reg = kandnq_avx512bw( vex1VVV_KReg, KReg_rm ); -} - -# KANDNW/KANDNB/KANDNQ/KANDND 3-498 PAGE 1068 LINE 56106 -define pcodeop kandnd_avx512bw ; -:KANDND KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x42; KReg_reg & KReg_rm -{ - KReg_reg = kandnd_avx512bw( vex1VVV_KReg, KReg_rm ); -} - - - -# KNOTW/KNOTB/KNOTQ/KNOTD 3-501 PAGE 1071 LINE 56266 -define pcodeop knotw_avx512f ; -:KNOTW KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x44; KReg_reg & KReg_rm -{ - KReg_reg = knotw_avx512f( KReg_rm ); -} - -# KNOTW/KNOTB/KNOTQ/KNOTD 3-501 PAGE 1071 LINE 56268 -define pcodeop knotb_avx512dq ; -:KNOTB KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x44; KReg_reg & KReg_rm -{ - KReg_reg = knotb_avx512dq( KReg_rm ); -} - -# KNOTW/KNOTB/KNOTQ/KNOTD 3-501 PAGE 1071 LINE 56270 -define pcodeop knotq_avx512bw ; -:KNOTQ KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1); byte=0x44; KReg_reg & KReg_rm -{ - KReg_reg = knotq_avx512bw( KReg_rm ); -} - -# KNOTW/KNOTB/KNOTQ/KNOTD 3-501 PAGE 1071 LINE 56272 -define pcodeop knotd_avx512bw ; -:KNOTD KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x44; KReg_reg & KReg_rm -{ - KReg_reg = knotd_avx512bw( KReg_rm ); -} - -# KORW/KORB/KORQ/KORD 3-502 PAGE 1072 LINE 56325 -define pcodeop korw_avx512f ; -:KORW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x45; KReg_reg & KReg_rm -{ - KReg_reg = korw_avx512f( vex1VVV_KReg, KReg_rm ); -} - -# KORW/KORB/KORQ/KORD 3-502 PAGE 1072 LINE 56327 -define pcodeop korb_avx512dq ; -:KORB KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x45; KReg_reg & KReg_rm -{ - KReg_reg = korb_avx512dq( vex1VVV_KReg, KReg_rm ); -} - -# KORW/KORB/KORQ/KORD 3-502 PAGE 1072 LINE 56329 -define pcodeop korq_avx512bw ; -:KORQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x45; KReg_reg & KReg_rm -{ - KReg_reg = korq_avx512bw( vex1VVV_KReg, KReg_rm ); -} - -# KORW/KORB/KORQ/KORD 3-502 PAGE 1072 LINE 56331 -define pcodeop kord_avx512bw ; -:KORD KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x45; KReg_reg & KReg_rm -{ - KReg_reg = kord_avx512bw( vex1VVV_KReg, KReg_rm ); -} - -# KORTESTW/KORTESTB/KORTESTQ/KORTESTD 3-503 PAGE 1073 LINE 56385 -define pcodeop kortestw_avx512f ; -:KORTESTW KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x98; KReg_reg & KReg_rm -{ - KReg_reg = kortestw_avx512f( KReg_rm ); - # TODO set flags AF, CF, OF, OR, PF, SF, ZF -} - -# KORTESTW/KORTESTB/KORTESTQ/KORTESTD 3-503 PAGE 1073 LINE 56387 -define pcodeop kortestb_avx512dq ; -:KORTESTB KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x98; KReg_reg & KReg_rm -{ - KReg_reg = kortestb_avx512dq( KReg_rm ); - # TODO set flags AF, CF, OF, OR, PF, SF, ZF -} - -# KORTESTW/KORTESTB/KORTESTQ/KORTESTD 3-503 PAGE 1073 LINE 56389 -define pcodeop kortestq_avx512bw ; -:KORTESTQ KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1); byte=0x98; KReg_reg & KReg_rm -{ - KReg_reg = kortestq_avx512bw( KReg_rm ); - # TODO set flags AF, CF, OF, OR, PF, SF, ZF -} - -# KORTESTW/KORTESTB/KORTESTQ/KORTESTD 3-503 PAGE 1073 LINE 56391 -define pcodeop kortestd_avx512bw ; -:KORTESTD KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x98; KReg_reg & KReg_rm -{ - KReg_reg = kortestd_avx512bw( KReg_rm ); - # TODO set flags AF, CF, OF, OR, PF, SF, ZF -} - -# KSHIFTLW/KSHIFTLB/KSHIFTLQ/KSHIFTLD 3-505 PAGE 1075 LINE 56481 -define pcodeop kshiftlw_avx512f ; -:KSHIFTLW KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1); byte=0x32; KReg_reg & KReg_rm; imm8 -{ - KReg_reg = kshiftlw_avx512f( KReg_rm, imm8:1 ); -} - -# KSHIFTLW/KSHIFTLB/KSHIFTLQ/KSHIFTLD 3-505 PAGE 1075 LINE 56483 -define pcodeop kshiftlb_avx512dq ; -:KSHIFTLB KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x32; KReg_reg & KReg_rm; imm8 -{ - KReg_reg = kshiftlb_avx512dq( KReg_rm, imm8:1 ); -} - -# KSHIFTLW/KSHIFTLB/KSHIFTLQ/KSHIFTLD 3-505 PAGE 1075 LINE 56485 -define pcodeop kshiftlq_avx512bw ; -:KSHIFTLQ KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1); byte=0x33; KReg_reg & KReg_rm; imm8 -{ - KReg_reg = kshiftlq_avx512bw( KReg_rm, imm8:1 ); -} - -# KSHIFTLW/KSHIFTLB/KSHIFTLQ/KSHIFTLD 3-505 PAGE 1075 LINE 56487 -define pcodeop kshiftld_avx512bw ; -:KSHIFTLD KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x33; KReg_reg & KReg_rm; imm8 -{ - KReg_reg = kshiftld_avx512bw( KReg_rm, imm8:1 ); -} - -# KSHIFTRW/KSHIFTRB/KSHIFTRQ/KSHIFTRD 3-507 PAGE 1077 LINE 56562 -define pcodeop kshiftrw_avx512f ; -:KSHIFTRW KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1); byte=0x30; KReg_reg & KReg_rm; imm8 -{ - KReg_reg = kshiftrw_avx512f( KReg_rm, imm8:1 ); -} - -# KSHIFTRW/KSHIFTRB/KSHIFTRQ/KSHIFTRD 3-507 PAGE 1077 LINE 56564 -define pcodeop kshiftrb_avx512dq ; -:KSHIFTRB KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x30; KReg_reg & KReg_rm; imm8 -{ - KReg_reg = kshiftrb_avx512dq( KReg_rm, imm8:1 ); -} - -# KSHIFTRW/KSHIFTRB/KSHIFTRQ/KSHIFTRD 3-507 PAGE 1077 LINE 56566 -define pcodeop kshiftrq_avx512bw ; -:KSHIFTRQ KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1); byte=0x31; KReg_reg & KReg_rm; imm8 -{ - KReg_reg = kshiftrq_avx512bw( KReg_rm, imm8:1 ); -} - -# KSHIFTRW/KSHIFTRB/KSHIFTRQ/KSHIFTRD 3-507 PAGE 1077 LINE 56568 -define pcodeop kshiftrd_avx512bw ; -:KSHIFTRD KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x31; KReg_reg & KReg_rm; imm8 -{ - KReg_reg = kshiftrd_avx512bw( KReg_rm, imm8:1 ); -} - -# KTESTW/KTESTB/KTESTQ/KTESTD 3-509 PAGE 1079 LINE 56643 -define pcodeop ktestw_avx512dq ; -:KTESTW KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x99; KReg_reg & KReg_rm -{ - ktestw_avx512dq( KReg_reg, KReg_rm ); - # TODO missing destination or side effects -} - -# KTESTW/KTESTB/KTESTQ/KTESTD 3-509 PAGE 1079 LINE 56645 -define pcodeop ktestb_avx512dq ; -:KTESTB KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x99; KReg_reg & KReg_rm -{ - ktestb_avx512dq( KReg_reg, KReg_rm ); - # TODO missing destination or side effects -} - -# KTESTW/KTESTB/KTESTQ/KTESTD 3-509 PAGE 1079 LINE 56647 -define pcodeop ktestq_avx512bw ; -:KTESTQ KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1); byte=0x99; KReg_reg & KReg_rm -{ - ktestq_avx512bw( KReg_reg, KReg_rm ); - # TODO missing destination or side effects -} - -# KTESTW/KTESTB/KTESTQ/KTESTD 3-509 PAGE 1079 LINE 56649 -define pcodeop ktestd_avx512bw ; -:KTESTD KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x99; KReg_reg & KReg_rm -{ - ktestd_avx512bw( KReg_reg, KReg_rm ); - # TODO missing destination or side effects -} - -# KUNPCKBW/KUNPCKWD/KUNPCKDQ 3-511 PAGE 1081 LINE 56747 -define pcodeop kunpckbw_avx512f ; -:KUNPCKBW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x4B; KReg_reg & KReg_rm -{ - KReg_reg = kunpckbw_avx512f( vex1VVV_KReg, KReg_rm ); -} - -# KUNPCKBW/KUNPCKWD/KUNPCKDQ 3-511 PAGE 1081 LINE 56749 -define pcodeop kunpckwd_avx512bw ; -:KUNPCKWD KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x4B; KReg_reg & KReg_rm -{ - KReg_reg = kunpckwd_avx512bw( vex1VVV_KReg, KReg_rm ); -} - -# KUNPCKBW/KUNPCKWD/KUNPCKDQ 3-511 PAGE 1081 LINE 56751 -define pcodeop kunpckdq_avx512bw ; -:KUNPCKDQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x4B; KReg_reg & KReg_rm -{ - KReg_reg = kunpckdq_avx512bw( vex1VVV_KReg, KReg_rm ); -} - -# KXNORW/KXNORB/KXNORQ/KXNORD 3-512 PAGE 1082 LINE 56806 -define pcodeop kxnorw_avx512f ; -:KXNORW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x46; KReg_reg & KReg_rm -{ - KReg_reg = kxnorw_avx512f( vex1VVV_KReg, KReg_rm ); -} - -# KXNORW/KXNORB/KXNORQ/KXNORD 3-512 PAGE 1082 LINE 56808 -define pcodeop kxnorb_avx512dq ; -:KXNORB KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x46; KReg_reg & KReg_rm -{ - KReg_reg = kxnorb_avx512dq( vex1VVV_KReg, KReg_rm ); -} - -# KXNORW/KXNORB/KXNORQ/KXNORD 3-512 PAGE 1082 LINE 56810 -define pcodeop kxnorq_avx512bw ; -:KXNORQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x46; KReg_reg & KReg_rm -{ - KReg_reg = kxnorq_avx512bw( vex1VVV_KReg, KReg_rm ); -} - -# KXNORW/KXNORB/KXNORQ/KXNORD 3-512 PAGE 1082 LINE 56812 -define pcodeop kxnord_avx512bw ; -:KXNORD KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x46; KReg_reg & KReg_rm -{ - KReg_reg = kxnord_avx512bw( vex1VVV_KReg, KReg_rm ); -} - -# KXORW/KXORB/KXORQ/KXORD 3-513 PAGE 1083 LINE 56866 -define pcodeop kxorw_avx512f ; -:KXORW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x47; KReg_reg & KReg_rm -{ - KReg_reg = kxorw_avx512f( vex1VVV_KReg, KReg_rm ); -} - -# KXORW/KXORB/KXORQ/KXORD 3-513 PAGE 1083 LINE 56868 -define pcodeop kxorb_avx512dq ; -:KXORB KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x47; KReg_reg & KReg_rm -{ - KReg_reg = kxorb_avx512dq( vex1VVV_KReg, KReg_rm ); -} - -# KXORW/KXORB/KXORQ/KXORD 3-513 PAGE 1083 LINE 56870 -define pcodeop kxorq_avx512bw ; -:KXORQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x47; KReg_reg & KReg_rm -{ - KReg_reg = kxorq_avx512bw( vex1VVV_KReg, KReg_rm ); -} - -# KXORW/KXORB/KXORQ/KXORD 3-513 PAGE 1083 LINE 56872 -define pcodeop kxord_avx512bw ; -:KXORD KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x47; KReg_reg & KReg_rm -{ - KReg_reg = kxord_avx512bw( vex1VVV_KReg, KReg_rm ); -} # MAXPD 4-12 PAGE 1132 LINE 59206 define pcodeop vmaxpd_avx512vl ; -:VMAXPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x5F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VMAXPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x5F; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vmaxpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vmaxpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # MAXPD 4-12 PAGE 1132 LINE 59210 -:VMAXPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x5F; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VMAXPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x5F; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vmaxpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vmaxpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # MAXPD 4-12 PAGE 1132 LINE 59214 define pcodeop vmaxpd_avx512f ; -:VMAXPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x5F; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VMAXPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x5F; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vmaxpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vmaxpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # MAXPS 4-15 PAGE 1135 LINE 59356 define pcodeop vmaxps_avx512vl ; -:VMAXPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x5F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VMAXPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x5F; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vmaxps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vmaxps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # MAXPS 4-15 PAGE 1135 LINE 59359 -:VMAXPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x5F; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VMAXPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x5F; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vmaxps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vmaxps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # MAXPS 4-15 PAGE 1135 LINE 59362 define pcodeop vmaxps_avx512f ; -:VMAXPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x5F; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VMAXPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x5F; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vmaxps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vmaxps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # MAXSD 4-18 PAGE 1138 LINE 59506 define pcodeop vmaxsd_avx512f ; -:VMAXSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x5F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VMAXSD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x5F; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vmaxsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vmaxsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # MAXSS 4-20 PAGE 1140 LINE 59609 define pcodeop vmaxss_avx512f ; -:VMAXSS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x5F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VMAXSS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x5F; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vmaxss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vmaxss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # MINPD 4-23 PAGE 1143 LINE 59771 define pcodeop vminpd_avx512vl ; -:VMINPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x5D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VMINPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x5D; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vminpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vminpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # MINPD 4-23 PAGE 1143 LINE 59774 -:VMINPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x5D; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VMINPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x5D; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vminpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vminpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # MINPD 4-23 PAGE 1143 LINE 59777 define pcodeop vminpd_avx512f ; -:VMINPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x5D; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VMINPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x5D; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vminpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vminpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # MINPS 4-26 PAGE 1146 LINE 59915 define pcodeop vminps_avx512vl ; -:VMINPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x5D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VMINPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x5D; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vminps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vminps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # MINPS 4-26 PAGE 1146 LINE 59918 -:VMINPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x5D; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VMINPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x5D; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vminps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vminps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # MINPS 4-26 PAGE 1146 LINE 59921 define pcodeop vminps_avx512f ; -:VMINPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x5D; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VMINPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x5D; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vminps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vminps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # MINSD 4-29 PAGE 1149 LINE 60063 define pcodeop vminsd_avx512f ; -:VMINSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x5D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VMINSD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x5D; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vminsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vminsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # MINSS 4-31 PAGE 1151 LINE 60166 define pcodeop vminss_avx512f ; -:VMINSS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x5D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VMINSS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x5D; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vminss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vminss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # MOVAPD 4-45 PAGE 1165 LINE 60852 -define pcodeop vmovapd_avx512vl ; -:VMOVAPD XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x28; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VMOVAPD XmmReg1 XmmOpMask64, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x28; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:16 = vmovapd_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = XmmReg2_m128 ; + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # MOVAPD 4-45 PAGE 1165 LINE 60855 -:VMOVAPD YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x28; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVAPD YmmReg1 YmmOpMask64, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x28; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:32 = vmovapd_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = YmmReg2_m256; + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # MOVAPD 4-45 PAGE 1165 LINE 60858 define pcodeop vmovapd_avx512f ; -:VMOVAPD ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x28; ZmmReg1 ... & ZmmReg2_m512 +:VMOVAPD ZmmReg1 ZmmOpMask64, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x28; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - ZmmReg1 = vmovapd_avx512f( ZmmReg2_m512 ); + ZmmResult = ZmmReg2_m512; + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } -# MOVAPD 4-45 PAGE 1165 LINE 60861 -:VMOVAPD XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x29; XmmReg1 ... & XmmReg2_m128 +:VMOVAPD XmmReg2 XmmOpMask64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & XmmOpMask64; byte=0x29; XmmReg1 & mod=3 & XmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { - XmmReg2_m128 = vmovapd_avx512vl( XmmReg1 ); + XmmResult = XmmReg1 ; + XmmMask = XmmReg2; + build XmmOpMask64; + ZmmReg2 = zext(XmmResult); +} + +:VMOVAPD m128 XmmOpMask64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & XmmOpMask64; byte=0x29; XmmReg1 ... & m128 +[ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) +{ + XmmResult = XmmReg1 ; + XmmMask = m128; + build XmmOpMask64; + m128 = XmmResult; } # MOVAPD 4-45 PAGE 1165 LINE 60864 -:VMOVAPD YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x29; YmmReg1 ... & YmmReg2_m256 +:VMOVAPD YmmReg2 YmmOpMask64, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & YmmOpMask64; byte=0x29; YmmReg1 & mod=3 & YmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { - YmmReg2_m256 = vmovapd_avx512vl( YmmReg1 ); + YmmResult = YmmReg1 ; + YmmMask = YmmReg2; + build YmmOpMask64; + ZmmReg2 = zext(YmmResult); +} + +:VMOVAPD m256 YmmOpMask64, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & YmmOpMask64; byte=0x29; YmmReg1 ... & m256 +[ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) +{ + YmmResult = YmmReg1 ; + YmmMask = m256; + build YmmOpMask64; + m256 = YmmResult; } # MOVAPD 4-45 PAGE 1165 LINE 60867 -:VMOVAPD ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x29; ZmmReg1 ... & ZmmReg2_m512 +:VMOVAPD ZmmReg2 ZmmOpMask64, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & ZmmOpMask64; byte=0x29; ZmmReg1 & mod=3 & ZmmReg2 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { - ZmmReg2_m512 = vmovapd_avx512f( ZmmReg1 ); + ZmmResult = ZmmReg1 ; + ZmmMask = ZmmReg2; + build ZmmOpMask64; + ZmmReg2 = ZmmResult; +} + +:VMOVAPD m512 ZmmOpMask64, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & ZmmOpMask64; byte=0x29; ZmmReg1 ... & m512 +[ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) +{ + ZmmResult = ZmmReg1 ; + ZmmMask = m512; + build ZmmOpMask64; + m512 = ZmmResult; } # MOVAPS 4-49 PAGE 1169 LINE 61047 -define pcodeop vmovaps_avx512vl ; -:VMOVAPS XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x28; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VMOVAPS XmmReg1 XmmOpMask32, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x28; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:16 = vmovaps_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = XmmReg2_m128; + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # MOVAPS 4-49 PAGE 1169 LINE 61050 -:VMOVAPS YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x28; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVAPS YmmReg1 YmmOpMask32, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x28; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:32 = vmovaps_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = YmmReg2_m256; + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # MOVAPS 4-49 PAGE 1169 LINE 61053 define pcodeop vmovaps_avx512f ; -:VMOVAPS ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x28; ZmmReg1 ... & ZmmReg2_m512 +:VMOVAPS ZmmReg1 ZmmOpMask32, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x28; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - ZmmReg1 = vmovaps_avx512f( ZmmReg2_m512 ); + ZmmResult = ZmmReg2_m512 ; + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # MOVAPS 4-49 PAGE 1169 LINE 61056 -:VMOVAPS XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x29; XmmReg1 ... & XmmReg2_m128 +:VMOVAPS XmmReg2 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & XmmOpMask32; byte=0x29; XmmReg1 & mod=3 & XmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { - XmmReg2_m128 = vmovaps_avx512vl( XmmReg1 ); + XmmResult = XmmReg1; + XmmMask = XmmReg2; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult); +} + +:VMOVAPS m128 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & XmmOpMask32; byte=0x29; (XmmReg1) ... & m128 +[ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) +{ + XmmResult = XmmReg1; + XmmMask = m128; + build XmmOpMask32; + m128 = XmmResult; } # MOVAPS 4-49 PAGE 1169 LINE 61059 -:VMOVAPS YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x29; YmmReg1 ... & YmmReg2_m256 +:VMOVAPS YmmReg2 YmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & YmmOpMask32; byte=0x29; YmmReg1 & mod=3 & YmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { - YmmReg2_m256 = vmovaps_avx512vl( YmmReg1 ); + YmmResult = YmmReg1; + YmmMask = YmmReg2; + build YmmOpMask32; + ZmmReg2 = zext(YmmResult); +} + +:VMOVAPS m256 YmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x29; (YmmReg1 & YmmOpMask32) ... & m256 +[ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) +{ + YmmResult = YmmReg1; + YmmMask = m256; + build YmmOpMask32; + m256 = YmmResult; } # MOVAPS 4-49 PAGE 1169 LINE 61062 -:VMOVAPS ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x29; ZmmReg1 ... & ZmmReg2_m512 +:VMOVAPS ZmmReg2 ZmmOpMask32, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x29; ZmmReg1 & mod=3 & ZmmOpMask32 & ZmmReg2 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { - ZmmReg2_m512 = vmovaps_avx512f( ZmmReg1 ); + ZmmResult = ZmmReg1 ; + ZmmMask = ZmmReg2; + build ZmmOpMask32; + ZmmReg2 = ZmmResult; +} + +:VMOVAPS m512 ZmmOpMask32, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x29; (ZmmReg1 & ZmmOpMask32) ... & m512 +[ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) +{ + ZmmResult = ZmmReg1 ; + ZmmMask = m512; + build ZmmOpMask32; + m512 = ZmmResult; } # MOVD/MOVQ 4-55 PAGE 1175 LINE 61366 -define pcodeop vmovd_avx512f ; -:VMOVD XmmReg1, rm32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x6E; (XmmReg1 & ZmmReg1) ... & rm32 +:VMOVD XmmReg1, rm32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x6E; XmmReg1 ... & rm32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-RM) { - local tmp:16 = vmovd_avx512f( rm32 ); - ZmmReg1 = zext(tmp); + XmmReg1 = zext(rm32); } # MOVD/MOVQ 4-55 PAGE 1175 LINE 61368 -define pcodeop vmovq_avx512f ; @ifdef IA64 -:VMOVQ XmmReg1, rm64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x6E; (XmmReg1 & ZmmReg1) ... & rm64 +:VMOVQ XmmReg1, rm64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x6E; XmmReg1 ... & rm64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-RM) { - local tmp:16 = vmovq_avx512f( rm64 ); - ZmmReg1 = zext(tmp); + XmmReg1 = zext(rm64); } @endif # MOVD/MOVQ 4-55 PAGE 1175 LINE 61370 -:VMOVD rm32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x7E; XmmReg1 ... & rm32 +:VMOVD rm32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x7E; XmmReg1 ... & rm32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-MR) { - rm32 = vmovd_avx512f( XmmReg1 ); + rm32 = XmmReg1[0,32]; } # MOVD/MOVQ 4-55 PAGE 1175 LINE 61372 @ifdef IA64 -:VMOVQ rm64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x7E; XmmReg1 ... & rm64 +:VMOVQ rm64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x7E; XmmReg1 ... & rm64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-MR) { - rm64 = vmovq_avx512f( XmmReg1 ); + rm64 = XmmReg1[0,64]; } @endif # MOVDDUP 4-59 PAGE 1179 LINE 61526 define pcodeop vmovddup_avx512vl ; -:VMOVDDUP XmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x12; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VMOVDDUP XmmReg1 XmmOpMask64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0x12; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 13; ] # (TupleType DUP-RM) { - local tmp:16 = vmovddup_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vmovddup_avx512vl( XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # MOVDDUP 4-59 PAGE 1179 LINE 61529 -:VMOVDDUP YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x12; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVDDUP YmmReg1 YmmOpMask64, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0x12; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 13; ] # (TupleType DUP-RM) { - local tmp:32 = vmovddup_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vmovddup_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # MOVDDUP 4-59 PAGE 1179 LINE 61532 define pcodeop vmovddup_avx512f ; -:VMOVDDUP ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x12; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDDUP ZmmReg1 ZmmOpMask64, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0x12; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 13; ] # (TupleType DUP-RM) { - ZmmReg1 = vmovddup_avx512f( ZmmReg2_m512 ); + ZmmResult = vmovddup_avx512f( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # MOVDQA,VMOVDQA32/64 4-62 PAGE 1182 LINE 61675 define pcodeop vmovdqa32_avx512vl ; -:VMOVDQA32 XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x6F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VMOVDQA32 XmmReg1 XmmOpMask32, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x6F; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:16 = vmovdqa32_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vmovdqa32_avx512vl( XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # MOVDQA,VMOVDQA32/64 4-62 PAGE 1182 LINE 61678 -:VMOVDQA32 YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x6F; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVDQA32 YmmReg1 YmmOpMask32, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x6F; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:32 = vmovdqa32_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vmovdqa32_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # MOVDQA,VMOVDQA32/64 4-62 PAGE 1182 LINE 61681 define pcodeop vmovdqa32_avx512f ; -:VMOVDQA32 ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x6F; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDQA32 ZmmReg1 ZmmOpMask32, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x6F; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - ZmmReg1 = vmovdqa32_avx512f( ZmmReg2_m512 ); + ZmmResult = vmovdqa32_avx512f( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # MOVDQA,VMOVDQA32/64 4-62 PAGE 1182 LINE 61684 -:VMOVDQA32 XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7F; XmmReg1 ... & XmmReg2_m128 +:VMOVDQA32 XmmReg2_m128 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x7F; (XmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { XmmReg2_m128 = vmovdqa32_avx512vl( XmmReg1 ); } # MOVDQA,VMOVDQA32/64 4-62 PAGE 1182 LINE 61687 -:VMOVDQA32 YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7F; YmmReg1 ... & YmmReg2_m256 +:VMOVDQA32 YmmReg2_m256 YmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x7F; (YmmReg1 & YmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { YmmReg2_m256 = vmovdqa32_avx512vl( YmmReg1 ); } # MOVDQA,VMOVDQA32/64 4-62 PAGE 1182 LINE 61690 -:VMOVDQA32 ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7F; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDQA32 ZmmReg2_m512 ZmmOpMask32, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x7F; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { ZmmReg2_m512 = vmovdqa32_avx512f( ZmmReg1 ); @@ -1342,45 +1276,52 @@ define pcodeop vmovdqa32_avx512f ; # MOVDQA,VMOVDQA32/64 4-62 PAGE 1182 LINE 61693 define pcodeop vmovdqa64_avx512vl ; -:VMOVDQA64 XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x6F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VMOVDQA64 XmmReg1 XmmOpMask64, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x6F; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:16 = vmovdqa64_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vmovdqa64_avx512vl( XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # MOVDQA,VMOVDQA32/64 4-62 PAGE 1182 LINE 61696 -:VMOVDQA64 YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x6F; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVDQA64 YmmReg1 YmmOpMask64, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x6F; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:32 = vmovdqa64_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vmovdqa64_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # MOVDQA,VMOVDQA32/64 4-62 PAGE 1182 LINE 61699 define pcodeop vmovdqa64_avx512f ; -:VMOVDQA64 ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x6F; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDQA64 ZmmReg1 ZmmOpMask64, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x6F; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - ZmmReg1 = vmovdqa64_avx512f( ZmmReg2_m512 ); + ZmmResult = vmovdqa64_avx512f( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # MOVDQA,VMOVDQA32/64 4-62 PAGE 1182 LINE 61702 -:VMOVDQA64 XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7F; XmmReg1 ... & XmmReg2_m128 +:VMOVDQA64 XmmReg2_m128 XmmOpMask64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x7F; (XmmReg1 & XmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { XmmReg2_m128 = vmovdqa64_avx512vl( XmmReg1 ); } # MOVDQA,VMOVDQA32/64 4-62 PAGE 1182 LINE 61705 -:VMOVDQA64 YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7F; YmmReg1 ... & YmmReg2_m256 +:VMOVDQA64 YmmReg2_m256 YmmOpMask64, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x7F; (YmmReg1 & YmmOpMask64) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { YmmReg2_m256 = vmovdqa64_avx512vl( YmmReg1 ); } # MOVDQA,VMOVDQA32/64 4-62 PAGE 1182 LINE 61708 -:VMOVDQA64 ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7F; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDQA64 ZmmReg2_m512 ZmmOpMask64, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x7F; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { ZmmReg2_m512 = vmovdqa64_avx512f( ZmmReg1 ); @@ -1388,45 +1329,52 @@ define pcodeop vmovdqa64_avx512f ; # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61938 define pcodeop vmovdqu8_avx512vl ; -:VMOVDQU8 XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x6F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VMOVDQU8 XmmReg1 XmmOpMask8, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x6F; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:16 = vmovdqu8_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vmovdqu8_avx512vl( XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61941 -:VMOVDQU8 YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x6F; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVDQU8 YmmReg1 YmmOpMask8, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x6F; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:32 = vmovdqu8_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vmovdqu8_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61944 define pcodeop vmovdqu8_avx512bw ; -:VMOVDQU8 ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x6F; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDQU8 ZmmReg1 ZmmOpMask8, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x6F; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - ZmmReg1 = vmovdqu8_avx512bw( ZmmReg2_m512 ); + ZmmResult = vmovdqu8_avx512bw( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61947 -:VMOVDQU8 XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7F; XmmReg1 ... & XmmReg2_m128 +:VMOVDQU8 XmmReg2_m128 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x7F; (XmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { XmmReg2_m128 = vmovdqu8_avx512vl( XmmReg1 ); } # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61950 -:VMOVDQU8 YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7F; YmmReg1 ... & YmmReg2_m256 +:VMOVDQU8 YmmReg2_m256 YmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x7F; (YmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { YmmReg2_m256 = vmovdqu8_avx512vl( YmmReg1 ); } # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61953 -:VMOVDQU8 ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7F; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDQU8 ZmmReg2_m512 ZmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x7F; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { ZmmReg2_m512 = vmovdqu8_avx512bw( ZmmReg1 ); @@ -1434,45 +1382,52 @@ define pcodeop vmovdqu8_avx512bw ; # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61956 define pcodeop vmovdqu16_avx512vl ; -:VMOVDQU16 XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x6F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VMOVDQU16 XmmReg1 XmmOpMask16, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0x6F; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:16 = vmovdqu16_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vmovdqu16_avx512vl( XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61959 -:VMOVDQU16 YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x6F; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVDQU16 YmmReg1 YmmOpMask16, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0x6F; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:32 = vmovdqu16_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vmovdqu16_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61962 define pcodeop vmovdqu16_avx512bw ; -:VMOVDQU16 ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x6F; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDQU16 ZmmReg1 ZmmOpMask16, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0x6F; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - ZmmReg1 = vmovdqu16_avx512bw( ZmmReg2_m512 ); + ZmmResult = vmovdqu16_avx512bw( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61965 -:VMOVDQU16 XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7F; XmmReg1 ... & XmmReg2_m128 +:VMOVDQU16 XmmReg2_m128 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1); byte=0x7F; (XmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { XmmReg2_m128 = vmovdqu16_avx512vl( XmmReg1 ); } # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61968 -:VMOVDQU16 YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7F; YmmReg1 ... & YmmReg2_m256 +:VMOVDQU16 YmmReg2_m256 YmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1); byte=0x7F; (YmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { YmmReg2_m256 = vmovdqu16_avx512vl( YmmReg1 ); } # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61971 -:VMOVDQU16 ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7F; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDQU16 ZmmReg2_m512 ZmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1); byte=0x7F; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { ZmmReg2_m512 = vmovdqu16_avx512bw( ZmmReg1 ); @@ -1480,45 +1435,52 @@ define pcodeop vmovdqu16_avx512bw ; # MOVDQU,VMOVDQU8/16/32/64 4-67 PAGE 1187 LINE 61974 define pcodeop vmovdqu32_avx512vl ; -:VMOVDQU32 XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x6F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VMOVDQU32 XmmReg1 XmmOpMask32, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x6F; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:16 = vmovdqu32_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vmovdqu32_avx512vl( XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # MOVDQU,VMOVDQU8/16/32/64 4-68 PAGE 1188 LINE 61987 -:VMOVDQU32 YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x6F; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVDQU32 YmmReg1 YmmOpMask32, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x6F; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:32 = vmovdqu32_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vmovdqu32_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # MOVDQU,VMOVDQU8/16/32/64 4-68 PAGE 1188 LINE 61990 define pcodeop vmovdqu32_avx512f ; -:VMOVDQU32 ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x6F; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDQU32 ZmmReg1 ZmmOpMask32, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x6F; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - ZmmReg1 = vmovdqu32_avx512f( ZmmReg2_m512 ); + ZmmResult = vmovdqu32_avx512f( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # MOVDQU,VMOVDQU8/16/32/64 4-68 PAGE 1188 LINE 61993 -:VMOVDQU32 XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7F; XmmReg1 ... & XmmReg2_m128 +:VMOVDQU32 XmmReg2_m128 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x7F; (XmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { XmmReg2_m128 = vmovdqu32_avx512vl( XmmReg1 ); } # MOVDQU,VMOVDQU8/16/32/64 4-68 PAGE 1188 LINE 61996 -:VMOVDQU32 YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7F; YmmReg1 ... & YmmReg2_m256 +:VMOVDQU32 YmmReg2_m256 YmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x7F; (YmmReg1 & YmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { YmmReg2_m256 = vmovdqu32_avx512vl( YmmReg1 ); } # MOVDQU,VMOVDQU8/16/32/64 4-68 PAGE 1188 LINE 61999 -:VMOVDQU32 ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7F; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDQU32 ZmmReg2_m512 ZmmOpMask32, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x7F; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { ZmmReg2_m512 = vmovdqu32_avx512f( ZmmReg1 ); @@ -1526,69 +1488,81 @@ define pcodeop vmovdqu32_avx512f ; # MOVDQU,VMOVDQU8/16/32/64 4-68 PAGE 1188 LINE 62002 define pcodeop vmovdqu64_avx512vl ; -:VMOVDQU64 XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x6F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VMOVDQU64 XmmReg1 XmmOpMask64, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) ; byte=0x6F; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:16 = vmovdqu64_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vmovdqu64_avx512vl( XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # MOVDQU,VMOVDQU8/16/32/64 4-68 PAGE 1188 LINE 62005 -:VMOVDQU64 YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x6F; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVDQU64 YmmReg1 YmmOpMask64, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) ; byte=0x6F; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:32 = vmovdqu64_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vmovdqu64_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # MOVDQU,VMOVDQU8/16/32/64 4-68 PAGE 1188 LINE 62008 define pcodeop vmovdqu64_avx512f ; -:VMOVDQU64 ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x6F; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDQU64 ZmmReg1 ZmmOpMask64, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) ; byte=0x6F; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - ZmmReg1 = vmovdqu64_avx512f( ZmmReg2_m512 ); + ZmmResult = vmovdqu64_avx512f( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # MOVDQU,VMOVDQU8/16/32/64 4-68 PAGE 1188 LINE 62011 -:VMOVDQU64 XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7F; XmmReg1 ... & XmmReg2_m128 +:VMOVDQU64 XmmReg2_m128 XmmOpMask64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x7F; (XmmReg1 & XmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { XmmReg2_m128 = vmovdqu64_avx512vl( XmmReg1 ); } # MOVDQU,VMOVDQU8/16/32/64 4-68 PAGE 1188 LINE 62014 -:VMOVDQU64 YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7F; YmmReg1 ... & YmmReg2_m256 +:VMOVDQU64 YmmReg2_m256 YmmOpMask64, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x7F; (YmmReg1 & YmmOpMask64) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { YmmReg2_m256 = vmovdqu64_avx512vl( YmmReg1 ); } # MOVDQU,VMOVDQU8/16/32/64 4-68 PAGE 1188 LINE 62017 -:VMOVDQU64 ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7F; ZmmReg1 ... & ZmmReg2_m512 +:VMOVDQU64 ZmmReg2_m512 ZmmOpMask64, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x7F; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { ZmmReg2_m512 = vmovdqu64_avx512f( ZmmReg1 ); } # MOVHLPS 4-76 PAGE 1196 LINE 62412 -define pcodeop vmovhlps_avx512f ; -:VMOVHLPS XmmReg1, vexVVVV_XmmReg, XmmReg2 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x12; (XmmReg1 & ZmmReg1) & (mod=0x3 & XmmReg2) +:VMOVHLPS XmmReg1, vexVVVV_XmmReg, XmmReg2 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x12; (XmmReg1 & ZmmReg1) & (mod=0x3 & XmmReg2) { - local tmp:16 = vmovhlps_avx512f( vexVVVV_XmmReg, XmmReg2 ); - ZmmReg1 = zext(tmp); + local src1 = vexVVVV_XmmReg[64,64]; + local src2 = XmmReg2[64,64]; + XmmReg1[0,64] = src2; + XmmReg1[64,64] = src2; + ZmmReg1 = zext(XmmReg1); } # MOVHPD 4-78 PAGE 1198 LINE 62485 define pcodeop vmovhpd_avx512f ; -:VMOVHPD XmmReg1, vexVVVV_XmmReg, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x16; (XmmReg1 & ZmmReg1) ... & m64 +:VMOVHPD XmmReg1, vexVVVV_XmmReg, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x16; (XmmReg1 & ZmmReg1) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vmovhpd_avx512f( vexVVVV_XmmReg, m64 ); - ZmmReg1 = zext(tmp); + local src1 = vexVVVV_XmmReg[0,64]; + local src2 = m64[0,64]; + XmmReg1[0,64] = src2; + XmmReg1[64,64] = src2; + ZmmReg1 = zext(XmmReg1); } # MOVHPD 4-78 PAGE 1198 LINE 62491 -:VMOVHPD m64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x17; XmmReg1 ... & m64 +:VMOVHPD m64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x17; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-MR) { m64 = vmovhpd_avx512f( XmmReg1 ); @@ -1596,15 +1570,16 @@ define pcodeop vmovhpd_avx512f ; # MOVHPS 4-80 PAGE 1200 LINE 62572 define pcodeop vmovhps_avx512f ; -:VMOVHPS XmmReg1, vexVVVV_XmmReg, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x16; (XmmReg1 & ZmmReg1) ... & m64 +:VMOVHPS XmmReg1, vexVVVV_XmmReg, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x16; (XmmReg1 & ZmmReg1) ... & m64 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2) { - local tmp:16 = vmovhps_avx512f( vexVVVV_XmmReg, m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vmovhps_avx512f( vexVVVV_XmmReg, m64 ); + XmmMask = XmmReg1; + ZmmReg1 = zext(XmmResult); } # MOVHPS 4-80 PAGE 1200 LINE 62578 -:VMOVHPS m64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x17; XmmReg1 ... & m64 +:VMOVHPS m64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x17; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2-MR) { m64 = vmovhps_avx512f( XmmReg1 ); @@ -1613,7 +1588,7 @@ define pcodeop vmovhps_avx512f ; # MOVLHPS 4-82 PAGE 1202 LINE 62660 # WARNING: duplicate opcode EVEX.NDS.128.0F.W0 16 /r last seen on 4-80 PAGE 1200 LINE 62572 for "VMOVLHPS xmm1, xmm2, xmm3" define pcodeop vmovlhps_avx512f ; -:VMOVLHPS XmmReg1, vexVVVV_XmmReg, XmmReg2 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x16; (XmmReg1 & ZmmReg1) & (mod=0x3 & XmmReg2) +:VMOVLHPS XmmReg1, vexVVVV_XmmReg, XmmReg2 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x16; (XmmReg1 & ZmmReg1) & (mod=0x3 & XmmReg2) { local tmp:16 = vmovlhps_avx512f( vexVVVV_XmmReg, XmmReg2 ); ZmmReg1 = zext(tmp); @@ -1621,7 +1596,7 @@ define pcodeop vmovlhps_avx512f ; # MOVLPD 4-84 PAGE 1204 LINE 62733 define pcodeop vmovlpd_avx512f ; -:VMOVLPD XmmReg1, vexVVVV_XmmReg, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x12; (XmmReg1 & ZmmReg1) ... & m64 +:VMOVLPD XmmReg1, vexVVVV_XmmReg, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x12; (XmmReg1 & ZmmReg1) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vmovlpd_avx512f( vexVVVV_XmmReg, m64 ); @@ -1629,7 +1604,7 @@ define pcodeop vmovlpd_avx512f ; } # MOVLPD 4-84 PAGE 1204 LINE 62739 -:VMOVLPD m64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x13; XmmReg1 ... & m64 +:VMOVLPD m64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x13; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-MR) { m64 = vmovlpd_avx512f( XmmReg1 ); @@ -1638,7 +1613,7 @@ define pcodeop vmovlpd_avx512f ; # MOVLPS 4-86 PAGE 1206 LINE 62818 # WARNING: duplicate opcode EVEX.NDS.128.0F.W0 12 /r last seen on 4-76 PAGE 1196 LINE 62412 for "VMOVLPS xmm2, xmm1, m64" define pcodeop vmovlps_avx512f ; -:VMOVLPS XmmReg1, vexVVVV_XmmReg, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x12; (XmmReg1 & ZmmReg1) ... & m64 +:VMOVLPS XmmReg1, vexVVVV_XmmReg, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x12; (XmmReg1 & ZmmReg1) ... & m64 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2) { local tmp:16 = vmovlps_avx512f( vexVVVV_XmmReg, m64 ); @@ -1646,7 +1621,7 @@ define pcodeop vmovlps_avx512f ; } # MOVLPS 4-86 PAGE 1206 LINE 62824 -:VMOVLPS m64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x13; XmmReg1 ... & m64 +:VMOVLPS m64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x13; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2-MR) { m64 = vmovlps_avx512f( XmmReg1 ); @@ -1654,7 +1629,7 @@ define pcodeop vmovlps_avx512f ; # MOVNTDQA 4-92 PAGE 1212 LINE 63088 define pcodeop vmovntdqa_avx512vl ; -:VMOVNTDQA XmmReg1, m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x2A; (XmmReg1 & ZmmReg1) ... & m128 +:VMOVNTDQA XmmReg1, m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x2A; (XmmReg1 & ZmmReg1) ... & m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { local tmp:16 = vmovntdqa_avx512vl( m128 ); @@ -1662,7 +1637,7 @@ define pcodeop vmovntdqa_avx512vl ; } # MOVNTDQA 4-92 PAGE 1212 LINE 63090 -:VMOVNTDQA YmmReg1, m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x2A; (YmmReg1 & ZmmReg1) ... & m256 +:VMOVNTDQA YmmReg1, m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x2A; (YmmReg1 & ZmmReg1) ... & m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { local tmp:32 = vmovntdqa_avx512vl( m256 ); @@ -1671,22 +1646,23 @@ define pcodeop vmovntdqa_avx512vl ; # MOVNTDQA 4-92 PAGE 1212 LINE 63092 define pcodeop vmovntdqa_avx512f ; -:VMOVNTDQA ZmmReg1, m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x2A; ZmmReg1 ... & m512 +:VMOVNTDQA ZmmReg1, m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x2A; ZmmReg1 ... & m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { ZmmReg1 = vmovntdqa_avx512f( m512 ); + } # MOVNTDQ 4-94 PAGE 1214 LINE 63191 define pcodeop vmovntdq_avx512vl ; -:VMOVNTDQ m128, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0xE7; XmmReg1 ... & m128 +:VMOVNTDQ m128, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0xE7; XmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { m128 = vmovntdq_avx512vl( XmmReg1 ); } # MOVNTDQ 4-94 PAGE 1214 LINE 63193 -:VMOVNTDQ m256, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0xE7; YmmReg1 ... & m256 +:VMOVNTDQ m256, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0xE7; YmmReg1 ... & m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { m256 = vmovntdq_avx512vl( YmmReg1 ); @@ -1694,7 +1670,7 @@ define pcodeop vmovntdq_avx512vl ; # MOVNTDQ 4-94 PAGE 1214 LINE 63195 define pcodeop vmovntdq_avx512f ; -:VMOVNTDQ m512, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0xE7; ZmmReg1 ... & m512 +:VMOVNTDQ m512, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0xE7; ZmmReg1 ... & m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { m512 = vmovntdq_avx512f( ZmmReg1 ); @@ -1702,14 +1678,14 @@ define pcodeop vmovntdq_avx512f ; # MOVNTPD 4-98 PAGE 1218 LINE 63361 define pcodeop vmovntpd_avx512vl ; -:VMOVNTPD m128, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x2B; XmmReg1 ... & m128 +:VMOVNTPD m128, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x2B; XmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { m128 = vmovntpd_avx512vl( XmmReg1 ); } # MOVNTPD 4-98 PAGE 1218 LINE 63363 -:VMOVNTPD m256, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x2B; YmmReg1 ... & m256 +:VMOVNTPD m256, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x2B; YmmReg1 ... & m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { m256 = vmovntpd_avx512vl( YmmReg1 ); @@ -1717,7 +1693,7 @@ define pcodeop vmovntpd_avx512vl ; # MOVNTPD 4-98 PAGE 1218 LINE 63365 define pcodeop vmovntpd_avx512f ; -:VMOVNTPD m512, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x2B; ZmmReg1 ... & m512 +:VMOVNTPD m512, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x2B; ZmmReg1 ... & m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { m512 = vmovntpd_avx512f( ZmmReg1 ); @@ -1725,14 +1701,14 @@ define pcodeop vmovntpd_avx512f ; # MOVNTPS 4-100 PAGE 1220 LINE 63445 define pcodeop vmovntps_avx512vl ; -:VMOVNTPS m128, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x2B; XmmReg1 ... & m128 +:VMOVNTPS m128, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x2B; XmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { m128 = vmovntps_avx512vl( XmmReg1 ); } # MOVNTPS 4-100 PAGE 1220 LINE 63447 -:VMOVNTPS m256, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x2B; YmmReg1 ... & m256 +:VMOVNTPS m256, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x2B; YmmReg1 ... & m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { m256 = vmovntps_avx512vl( YmmReg1 ); @@ -1740,1148 +1716,1514 @@ define pcodeop vmovntps_avx512vl ; # MOVNTPS 4-100 PAGE 1220 LINE 63449 define pcodeop vmovntps_avx512f ; -:VMOVNTPS m512, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x2B; ZmmReg1 ... & m512 +:VMOVNTPS m512, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x2B; ZmmReg1 ... & m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { m512 = vmovntps_avx512f( ZmmReg1 ); } # MOVQ 4-103 PAGE 1223 LINE 63581 -:VMOVQ XmmReg1, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x7E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VMOVQ XmmReg1, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x7E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-RM) { - local tmp:16 = vmovq_avx512f( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + ZmmReg1 = zext(XmmReg2_m64[0,64]); } # MOVQ 4-103 PAGE 1223 LINE 63587 -:VMOVQ XmmReg2_m64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0xD6; XmmReg1 ... & XmmReg2_m64 +:VMOVQ XmmReg2, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0xD6; XmmReg1 & mod=3 & XmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-MR) { - XmmReg2_m64 = vmovq_avx512f( XmmReg1 ); + ZmmReg2 = zext( XmmReg1[0,64] ); +} + +:VMOVQ m64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0xD6; XmmReg1 ... & m64 +[ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-MR) +{ + m64 = XmmReg1[0,64]; } # MOVSD 4-111 PAGE 1231 LINE 63978 define pcodeop vmovsd_avx512f ; -:VMOVSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x10; (XmmReg1 & ZmmReg1) & (mod=0x3 & XmmReg2) +:VMOVSD XmmReg1 XmmOpMask, vexVVVV_XmmReg, XmmReg2 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x10; (XmmReg1 & ZmmReg1 & XmmOpMask) & (mod=0x3 & XmmReg2) { - local tmp:16 = vmovsd_avx512f( vexVVVV_XmmReg, XmmReg2 ); - ZmmReg1 = zext(tmp); + XmmResult = XmmReg2; + XmmMask = XmmReg1; + build XmmOpMask; + XmmResult[0,64] = (zext(XmmOpMask[0,1]) * XmmResult[0,64]) + (zext(!XmmOpMask[0,1]) * XmmMask[0,64]); + XmmResult[64,64] = vexVVVV_XmmReg[64,64]; + ZmmReg1 = zext(XmmResult); } # MOVSD 4-111 PAGE 1231 LINE 63981 -:VMOVSD XmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x10; (XmmReg1 & ZmmReg1) ... & m64 +:VMOVSD XmmReg1 XmmOpMask, m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0x10; (XmmReg1 & ZmmReg1 & XmmOpMask) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-RM) { - local tmp:16 = vmovsd_avx512f( XmmReg1, m64 ); + local tmp:8 = m64; + XmmMask = XmmReg1; + build XmmOpMask; + tmp = (zext(XmmOpMask[0,1]) * tmp) + (zext(!XmmOpMask[0,1]) * XmmMask[0,64]); ZmmReg1 = zext(tmp); } # MOVSD 4-111 PAGE 1231 LINE 63983 -:VMOVSD XmmReg2^KWriteMask, vexVVVV_XmmReg, XmmReg1 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x11; XmmReg1 & (mod=0x3 & (XmmReg2 & ZmmReg2)) +:VMOVSD XmmReg2 XmmOpMask, vexVVVV_XmmReg, XmmReg1 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x11; XmmReg1 & ZmmReg1 & XmmOpMask & (mod=0x3 & XmmReg2) { - local tmp:16 = vmovsd_avx512f( vexVVVV_XmmReg, XmmReg1 ); - ZmmReg2 = zext(tmp); + XmmResult = XmmReg1; + XmmMask = XmmReg2; + build XmmOpMask; + XmmResult[0,64] = (zext(XmmOpMask[0,1]) * XmmResult[0,64]) + (zext(!XmmOpMask[0,1]) * XmmMask[0,64]); + XmmResult[64,64] = vexVVVV_XmmReg[64,64]; + ZmmReg1 = zext(XmmResult); } # MOVSD 4-111 PAGE 1231 LINE 63986 -:VMOVSD m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x11; XmmReg1 ... & m64 +:VMOVSD m64 XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & XmmOpMask; byte=0x11; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-MR) { - m64 = vmovsd_avx512f( XmmReg1 ); + XmmResult = vmovsd_avx512f( XmmReg1 ); + local tmp:8 = m64; + XmmMask = XmmReg1; + build XmmOpMask; + tmp = (zext(XmmOpMask[0,1]) * tmp) + (zext(!XmmOpMask[0,1]) * XmmMask[0,64]); + m64 = tmp; + } # MOVSHDUP 4-114 PAGE 1234 LINE 64130 define pcodeop vmovshdup_avx512vl ; -:VMOVSHDUP XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x16; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VMOVSHDUP XmmReg1 XmmOpMask32, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x16; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vmovshdup_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vmovshdup_avx512vl( XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # MOVSHDUP 4-114 PAGE 1234 LINE 64133 -:VMOVSHDUP YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x16; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVSHDUP YmmReg1 YmmOpMask32, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x16; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vmovshdup_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vmovshdup_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # MOVSHDUP 4-114 PAGE 1234 LINE 64136 define pcodeop vmovshdup_avx512f ; -:VMOVSHDUP ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x16; ZmmReg1 ... & ZmmReg2_m512 +:VMOVSHDUP ZmmReg1 ZmmOpMask32, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x16; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vmovshdup_avx512f( ZmmReg2_m512 ); + ZmmResult = vmovshdup_avx512f( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # MOVSLDUP 4-117 PAGE 1237 LINE 64284 define pcodeop vmovsldup_avx512vl ; -:VMOVSLDUP XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x12; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VMOVSLDUP XmmReg1 XmmOpMask32, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x12; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vmovsldup_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vmovsldup_avx512vl( XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # MOVSLDUP 4-117 PAGE 1237 LINE 64287 -:VMOVSLDUP YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x12; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVSLDUP YmmReg1 YmmOpMask32, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x12; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vmovsldup_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vmovsldup_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # MOVSLDUP 4-117 PAGE 1237 LINE 64290 define pcodeop vmovsldup_avx512f ; -:VMOVSLDUP ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x12; ZmmReg1 ... & ZmmReg2_m512 +:VMOVSLDUP ZmmReg1 ZmmOpMask32, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x12; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vmovsldup_avx512f( ZmmReg2_m512 ); + ZmmResult = vmovsldup_avx512f( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # MOVSS 4-120 PAGE 1240 LINE 64443 -define pcodeop vmovss_avx512f ; -:VMOVSS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x10; (XmmReg1 & ZmmReg1) & (mod=0x3 & XmmReg2) +:VMOVSS XmmReg1 XmmOpMask, vexVVVV_XmmReg, XmmReg2 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x10; (XmmReg1 & ZmmReg1 & XmmOpMask) & (mod=0x3 & XmmReg2) { - local tmp:16 = vmovss_avx512f( vexVVVV_XmmReg, XmmReg2 ); - ZmmReg1 = zext(tmp); + local tmp:4 = XmmReg2[0,32]; + XmmMask = XmmReg1; + build XmmOpMask; + XmmResult = vexVVVV_XmmReg; + XmmResult[0,32] = (zext(XmmOpMask[0,1]) * tmp) + (zext(!XmmOpMask[0,1]) * XmmMask[0,32]); + ZmmReg1 = zext(XmmResult); } # MOVSS 4-120 PAGE 1240 LINE 64446 -:VMOVSS XmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x10; (XmmReg1 & ZmmReg1) ... & m32 +:VMOVSS XmmReg1 XmmOpMask, m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x10; (XmmReg1 & ZmmReg1 & XmmOpMask) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-RM) { - local tmp:16 = vmovss_avx512f( XmmReg1, m32 ); + local tmp:4 = m32; + XmmMask = XmmReg1; + build XmmOpMask; + tmp = (zext(XmmOpMask[0,1]) * tmp) + (zext(!XmmOpMask[0,1]) * XmmMask[0,32]); ZmmReg1 = zext(tmp); } # MOVSS 4-120 PAGE 1240 LINE 64448 -:VMOVSS XmmReg2^KWriteMask, vexVVVV_XmmReg, XmmReg1 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x11; XmmReg1 & (mod=0x3 & (XmmReg2 & ZmmReg2)) +:VMOVSS XmmReg2 XmmOpMask, vexVVVV_XmmReg, XmmReg1 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & XmmOpMask & vexVVVV_XmmReg; byte=0x11; XmmReg1 & (mod=0x3 & (XmmReg2 & ZmmReg2)) { - local tmp:16 = vmovss_avx512f( vexVVVV_XmmReg, XmmReg1 ); - ZmmReg2 = zext(tmp); + local tmp:4 = XmmReg1[0,32]; + XmmMask = XmmReg2; + build XmmOpMask; + XmmResult = vexVVVV_XmmReg; + XmmResult[0,32] = (zext(XmmOpMask[0,1]) * tmp) + (zext(!XmmOpMask[0,1]) * XmmMask[0,32]); + ZmmReg2 = zext(XmmResult); } # MOVSS 4-120 PAGE 1240 LINE 64451 -:VMOVSS m32^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x11; XmmReg1 ... & m32 +:VMOVSS m32 XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & XmmOpMask; byte=0x11; XmmReg1 ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-MR) { - m32 = vmovss_avx512f( XmmReg1 ); + local tmp:4 = XmmReg1[0,32]; + XmmMask = zext(m32); + build XmmOpMask; + m32 = (zext(XmmOpMask[0,1]) * tmp) + (zext(!XmmOpMask[0,1]) * XmmMask[0,32]); } # MOVUPD 4-126 PAGE 1246 LINE 64695 -define pcodeop vmovupd_avx512vl ; -:VMOVUPD XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x10; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VMOVUPD XmmReg1 XmmOpMask64, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x10; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:16 = vmovupd_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = XmmReg2_m128 ; + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # MOVUPD 4-126 PAGE 1246 LINE 64698 -:VMOVUPD XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x11; XmmReg1 ... & XmmReg2_m128 +:VMOVUPD XmmReg2 XmmOpMask64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & XmmOpMask64; byte=0x11; XmmReg1 & mod=3 & XmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { - XmmReg2_m128 = vmovupd_avx512vl( XmmReg1 ); + XmmResult = XmmReg1; + XmmMask = XmmReg2; + build XmmOpMask64; + ZmmReg2 = zext(XmmResult); +} + +:VMOVUPD m128 XmmOpMask64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & XmmOpMask64; byte=0x11; (XmmReg1) ... & m128 +[ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) +{ + XmmResult = XmmReg1; + XmmMask = m128; + build XmmOpMask64; + m128 = XmmResult; } # MOVUPD 4-126 PAGE 1246 LINE 64701 -:VMOVUPD YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x10; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVUPD YmmReg1 YmmOpMask64, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x10; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:32 = vmovupd_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = YmmReg2_m256; + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # MOVUPD 4-126 PAGE 1246 LINE 64704 -:VMOVUPD YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x11; YmmReg1 ... & YmmReg2_m256 +:VMOVUPD YmmReg2 YmmOpMask64, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & YmmOpMask64; byte=0x11; YmmReg1 & mod=3 & YmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { - YmmReg2_m256 = vmovupd_avx512vl( YmmReg1 ); + YmmResult = YmmReg1; + YmmMask = YmmReg2; + build YmmOpMask64; + ZmmReg2 = zext(YmmResult); +} + +:VMOVUPD m256 YmmOpMask64, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & YmmOpMask64; byte=0x11; YmmReg1 ... & m256 +[ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) +{ + YmmResult = YmmReg1; + YmmMask = m256; + build YmmOpMask64; + m256 = YmmResult; } # MOVUPD 4-126 PAGE 1246 LINE 64707 -define pcodeop vmovupd_avx512f ; -:VMOVUPD ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x10; ZmmReg1 ... & ZmmReg2_m512 +:VMOVUPD ZmmReg1 ZmmOpMask64, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x10; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - ZmmReg1 = vmovupd_avx512f( ZmmReg2_m512 ); + ZmmResult = ZmmReg2_m512; + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # MOVUPD 4-126 PAGE 1246 LINE 64710 -:VMOVUPD ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x11; ZmmReg1 ... & ZmmReg2_m512 +:VMOVUPD ZmmReg2_m512 ZmmOpMask64, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & ZmmOpMask64; byte=0x11; ZmmReg1 ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { - ZmmReg2_m512 = vmovupd_avx512f( ZmmReg1 ); + ZmmResult = ZmmReg1; + ZmmMask = ZmmReg2_m512; + build ZmmOpMask64; + ZmmReg2_m512 = ZmmResult; } # MOVUPS 4-130 PAGE 1250 LINE 64880 -define pcodeop vmovups_avx512vl ; -:VMOVUPS XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x10; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VMOVUPS XmmReg1 XmmOpMask32, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x10; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:16 = vmovups_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = XmmReg2_m128; + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # MOVUPS 4-130 PAGE 1250 LINE 64883 -:VMOVUPS YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x10; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VMOVUPS YmmReg1 YmmOpMask32, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x10; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - local tmp:32 = vmovups_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = YmmReg2_m256; + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # MOVUPS 4-130 PAGE 1250 LINE 64886 -define pcodeop vmovups_avx512f ; -:VMOVUPS ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x10; ZmmReg1 ... & ZmmReg2_m512 +:VMOVUPS ZmmReg1 ZmmOpMask32, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x10; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-RM) { - ZmmReg1 = vmovups_avx512f( ZmmReg2_m512 ); + ZmmResult = ZmmReg2_m512; + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # MOVUPS 4-130 PAGE 1250 LINE 64889 -:VMOVUPS XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x11; XmmReg1 ... & XmmReg2_m128 +:VMOVUPS XmmReg2 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & XmmOpMask32; byte=0x11; XmmReg1 & mod=3 & XmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { - XmmReg2_m128 = vmovups_avx512vl( XmmReg1 ); + XmmResult = XmmReg1; + XmmMask = XmmReg2; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult); +} + +:VMOVUPS m128 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & XmmOpMask32; byte=0x11; XmmReg1 ... & m128 +[ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) +{ + XmmResult = XmmReg1; + XmmMask = m128; + build XmmOpMask32; + m128 = XmmResult; } # MOVUPS 4-130 PAGE 1250 LINE 64892 -:VMOVUPS YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x11; YmmReg1 ... & YmmReg2_m256 +:VMOVUPS YmmReg2 YmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & YmmOpMask32; byte=0x11; YmmReg1 & mod=3 & YmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { - YmmReg2_m256 = vmovups_avx512vl( YmmReg1 ); + YmmResult = YmmReg1; + YmmMask = YmmReg2; + build YmmOpMask32; + ZmmReg2 = zext(YmmResult); +} + +:VMOVUPS m256 YmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & YmmOpMask32; byte=0x11; YmmReg1 ... & m256 +[ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) +{ + YmmResult = YmmReg1; + YmmMask = m256; + build YmmOpMask32; + m256 = YmmResult; } # MOVUPS 4-130 PAGE 1250 LINE 64895 -:VMOVUPS ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x11; ZmmReg1 ... & ZmmReg2_m512 +:VMOVUPS ZmmReg2_m512 ZmmOpMask32, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & ZmmOpMask32; byte=0x11; ZmmReg1 ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM-MR) { - ZmmReg2_m512 = vmovups_avx512f( ZmmReg1 ); + ZmmResult = ZmmReg1; + ZmmMask = ZmmReg2_m512; + build ZmmOpMask32; + ZmmReg2_m512 = ZmmResult; } # MULPD 4-146 PAGE 1266 LINE 65686 define pcodeop vmulpd_avx512vl ; -:VMULPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x59; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VMULPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x59; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vmulpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vmulpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # MULPD 4-146 PAGE 1266 LINE 65689 -:VMULPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x59; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VMULPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x59; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vmulpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vmulpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # MULPD 4-146 PAGE 1266 LINE 65692 define pcodeop vmulpd_avx512f ; -:VMULPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x59; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VMULPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x59; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vmulpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vmulpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # MULPS 4-149 PAGE 1269 LINE 65817 define pcodeop vmulps_avx512vl ; -:VMULPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x59; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VMULPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x59; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vmulps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vmulps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # MULPS 4-149 PAGE 1269 LINE 65820 -:VMULPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x59; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VMULPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x59; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vmulps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vmulps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # MULPS 4-149 PAGE 1269 LINE 65823 define pcodeop vmulps_avx512f ; -:VMULPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x59; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VMULPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x59; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vmulps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vmulps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # MULSD 4-152 PAGE 1272 LINE 65959 define pcodeop vmulsd_avx512f ; -:VMULSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x59; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VMULSD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x59; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vmulsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vmulsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # MULSS 4-154 PAGE 1274 LINE 66055 define pcodeop vmulss_avx512f ; -:VMULSS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x59; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VMULSS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x59; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vmulss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vmulss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # ORPD 4-168 PAGE 1288 LINE 66724 define pcodeop vorpd_avx512vl ; -:VORPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x56; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VORPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x56; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vorpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vorpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # ORPD 4-168 PAGE 1288 LINE 66727 -:VORPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x56; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VORPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x56; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vorpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vorpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # ORPD 4-168 PAGE 1288 LINE 66730 define pcodeop vorpd_avx512dq ; -:VORPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x56; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VORPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x56; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vorpd_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vorpd_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # ORPS 4-171 PAGE 1291 LINE 66850 define pcodeop vorps_avx512vl ; -:VORPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x56; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VORPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x56; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vorps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vorps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # ORPS 4-171 PAGE 1291 LINE 66853 -:VORPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x56; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VORPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x56; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vorps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vorps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # ORPS 4-171 PAGE 1291 LINE 66856 define pcodeop vorps_avx512dq ; -:VORPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x56; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VORPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x56; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vorps_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vorps_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PABSB/PABSW/PABSD/PABSQ 4-180 PAGE 1300 LINE 67320 define pcodeop vpabsb_avx512vl ; -:VPABSB XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x1C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPABSB XmmReg1 XmmOpMask8, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x1C; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpabsb_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpabsb_avx512vl( XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PABSB/PABSW/PABSD/PABSQ 4-180 PAGE 1300 LINE 67323 -:VPABSB YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x1C; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPABSB YmmReg1 YmmOpMask8, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x1C; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpabsb_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpabsb_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PABSB/PABSW/PABSD/PABSQ 4-180 PAGE 1300 LINE 67326 define pcodeop vpabsb_avx512bw ; -:VPABSB ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x1C; ZmmReg1 ... & ZmmReg2_m512 +:VPABSB ZmmReg1 ZmmOpMask8, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x1C; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpabsb_avx512bw( ZmmReg2_m512 ); + ZmmResult = vpabsb_avx512bw( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PABSB/PABSW/PABSD/PABSQ 4-180 PAGE 1300 LINE 67329 define pcodeop vpabsw_avx512vl ; -:VPABSW XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x1D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPABSW XmmReg1 XmmOpMask16, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x1D; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpabsw_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpabsw_avx512vl( XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PABSB/PABSW/PABSD/PABSQ 4-181 PAGE 1301 LINE 67344 -:VPABSW YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x1D; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPABSW YmmReg1 YmmOpMask16, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x1D; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpabsw_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpabsw_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PABSB/PABSW/PABSD/PABSQ 4-181 PAGE 1301 LINE 67347 define pcodeop vpabsw_avx512bw ; -:VPABSW ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x1D; ZmmReg1 ... & ZmmReg2_m512 +:VPABSW ZmmReg1 ZmmOpMask16, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x1D; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpabsw_avx512bw( ZmmReg2_m512 ); + ZmmResult = vpabsw_avx512bw( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PABSB/PABSW/PABSD/PABSQ 4-181 PAGE 1301 LINE 67350 define pcodeop vpabsd_avx512vl ; -:VPABSD XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x1E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPABSD XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x1E; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpabsd_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpabsd_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PABSB/PABSW/PABSD/PABSQ 4-181 PAGE 1301 LINE 67353 -:VPABSD YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x1E; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPABSD YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x1E; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpabsd_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpabsd_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PABSB/PABSW/PABSD/PABSQ 4-181 PAGE 1301 LINE 67357 define pcodeop vpabsd_avx512f ; -:VPABSD ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x1E; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPABSD ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x1E; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpabsd_avx512f( ZmmReg2_m512_m32bcst ); + ZmmResult = vpabsd_avx512f( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PABSB/PABSW/PABSD/PABSQ 4-181 PAGE 1301 LINE 67360 define pcodeop vpabsq_avx512vl ; -:VPABSQ XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x1F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPABSQ XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x1F; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpabsq_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpabsq_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PABSB/PABSW/PABSD/PABSQ 4-181 PAGE 1301 LINE 67363 -:VPABSQ YmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x1F; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPABSQ YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x1F; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpabsq_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpabsq_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PABSB/PABSW/PABSD/PABSQ 4-181 PAGE 1301 LINE 67366 define pcodeop vpabsq_avx512f ; -:VPABSQ ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x1F; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPABSQ ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x1F; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpabsq_avx512f( ZmmReg2_m512_m64bcst ); + ZmmResult = vpabsq_avx512f( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PACKSSWB/PACKSSDW 4-186 PAGE 1306 LINE 67645 define pcodeop vpacksswb_avx512vl ; -:VPACKSSWB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x63; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPACKSSWB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x63; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpacksswb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpacksswb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PACKSSWB/PACKSSDW 4-186 PAGE 1306 LINE 67649 -:VPACKSSWB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x63; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPACKSSWB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x63; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpacksswb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpacksswb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PACKSSWB/PACKSSDW 4-186 PAGE 1306 LINE 67653 define pcodeop vpacksswb_avx512bw ; -:VPACKSSWB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x63; ZmmReg1 ... & ZmmReg2_m512 +:VPACKSSWB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x63; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpacksswb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpacksswb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PACKSSWB/PACKSSDW 4-186 PAGE 1306 LINE 67657 define pcodeop vpackssdw_avx512vl ; -:VPACKSSDW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x6B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPACKSSDW XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x6B; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpackssdw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpackssdw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PACKSSWB/PACKSSDW 4-187 PAGE 1307 LINE 67674 -:VPACKSSDW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x6B; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPACKSSDW YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x6B; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpackssdw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpackssdw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PACKSSWB/PACKSSDW 4-187 PAGE 1307 LINE 67678 define pcodeop vpackssdw_avx512bw ; -:VPACKSSDW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x6B; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPACKSSDW ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x6B; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpackssdw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpackssdw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PACKUSDW 4-194 PAGE 1314 LINE 68094 define pcodeop vpackusdw_avx512vl ; -:VPACKUSDW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x2B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPACKUSDW XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x2B; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpackusdw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpackusdw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PACKUSDW 4-194 PAGE 1314 LINE 68098 -:VPACKUSDW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x2B; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPACKUSDW YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x2B; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpackusdw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpackusdw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PACKUSDW 4-194 PAGE 1314 LINE 68103 define pcodeop vpackusdw_avx512bw ; -:VPACKUSDW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x2B; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPACKUSDW ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x2B; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpackusdw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpackusdw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PACKUSWB 4-199 PAGE 1319 LINE 68374 define pcodeop vpackuswb_avx512vl ; -:VPACKUSWB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x67; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPACKUSWB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x67; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpackuswb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpackuswb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PACKUSWB 4-199 PAGE 1319 LINE 68378 -:VPACKUSWB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x67; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPACKUSWB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x67; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpackuswb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpackuswb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PACKUSWB 4-199 PAGE 1319 LINE 68382 define pcodeop vpackuswb_avx512bw ; -:VPACKUSWB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x67; ZmmReg1 ... & ZmmReg2_m512 +:VPACKUSWB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x67; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpackuswb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpackuswb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PADDB/PADDW/PADDD/PADDQ 4-204 PAGE 1324 LINE 68674 define pcodeop vpaddb_avx512vl ; -:VPADDB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xFC; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPADDB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xFC; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpaddb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpaddb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PADDB/PADDW/PADDD/PADDQ 4-204 PAGE 1324 LINE 68677 define pcodeop vpaddw_avx512vl ; -:VPADDW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xFD; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPADDW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xFD; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpaddw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpaddw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PADDB/PADDW/PADDD/PADDQ 4-204 PAGE 1324 LINE 68680 define pcodeop vpaddd_avx512vl ; -:VPADDD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xFE; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPADDD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xFE; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpaddd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpaddd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PADDB/PADDW/PADDD/PADDQ 4-204 PAGE 1324 LINE 68683 define pcodeop vpaddq_avx512vl ; -:VPADDQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xD4; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPADDQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xD4; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpaddq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpaddq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PADDB/PADDW/PADDD/PADDQ 4-204 PAGE 1324 LINE 68686 -:VPADDB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xFC; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPADDB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xFC; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpaddb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpaddb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PADDB/PADDW/PADDD/PADDQ 4-204 PAGE 1324 LINE 68689 -:VPADDW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xFD; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPADDW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xFD; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpaddw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpaddw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PADDB/PADDW/PADDD/PADDQ 4-204 PAGE 1324 LINE 68692 -:VPADDD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xFE; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPADDD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xFE; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpaddd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpaddd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PADDB/PADDW/PADDD/PADDQ 4-205 PAGE 1325 LINE 68707 -:VPADDQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xD4; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPADDQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xD4; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpaddq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpaddq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PADDB/PADDW/PADDD/PADDQ 4-205 PAGE 1325 LINE 68710 define pcodeop vpaddb_avx512bw ; -:VPADDB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xFC; ZmmReg1 ... & ZmmReg2_m512 +:VPADDB ZmmReg1 ZmmOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xFC; (ZmmReg1 & ZmmOpMask) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpaddb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpaddb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask; + ZmmReg1 = ZmmResult; } # PADDB/PADDW/PADDD/PADDQ 4-205 PAGE 1325 LINE 68713 define pcodeop vpaddw_avx512bw ; -:VPADDW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xFD; ZmmReg1 ... & ZmmReg2_m512 +:VPADDW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xFD; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpaddw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpaddw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PADDB/PADDW/PADDD/PADDQ 4-205 PAGE 1325 LINE 68716 define pcodeop vpaddd_avx512f ; -:VPADDD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xFE; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPADDD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0xFE; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpaddd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpaddd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PADDB/PADDW/PADDD/PADDQ 4-205 PAGE 1325 LINE 68719 define pcodeop vpaddq_avx512f ; -:VPADDQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xD4; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPADDQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0xD4; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpaddq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpaddq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PADDSB/PADDSW 4-211 PAGE 1331 LINE 69051 define pcodeop vpaddsb_avx512vl ; -:VPADDSB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xEC; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPADDSB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xEC; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpaddsb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpaddsb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PADDSB/PADDSW 4-211 PAGE 1331 LINE 69054 -:VPADDSB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xEC; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPADDSB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xEC; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpaddsb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpaddsb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PADDSB/PADDSW 4-211 PAGE 1331 LINE 69057 define pcodeop vpaddsb_avx512bw ; -:VPADDSB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xEC; ZmmReg1 ... & ZmmReg2_m512 +:VPADDSB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xEC; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpaddsb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpaddsb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PADDSB/PADDSW 4-211 PAGE 1331 LINE 69060 define pcodeop vpaddsw_avx512vl ; -:VPADDSW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xED; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPADDSW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xED; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpaddsw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpaddsw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PADDSB/PADDSW 4-211 PAGE 1331 LINE 69063 -:VPADDSW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xED; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPADDSW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xED; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpaddsw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpaddsw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PADDSB/PADDSW 4-211 PAGE 1331 LINE 69066 define pcodeop vpaddsw_avx512bw ; -:VPADDSW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xED; ZmmReg1 ... & ZmmReg2_m512 +:VPADDSW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xED; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpaddsw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpaddsw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PADDUSB/PADDUSW 4-215 PAGE 1335 LINE 69269 define pcodeop vpaddusb_avx512vl ; -:VPADDUSB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xDC; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPADDUSB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xDC; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpaddusb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpaddusb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PADDUSB/PADDUSW 4-215 PAGE 1335 LINE 69273 -:VPADDUSB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xDC; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPADDUSB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xDC; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpaddusb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpaddusb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PADDUSB/PADDUSW 4-215 PAGE 1335 LINE 69277 define pcodeop vpaddusb_avx512bw ; -:VPADDUSB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xDC; ZmmReg1 ... & ZmmReg2_m512 +:VPADDUSB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xDC; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpaddusb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpaddusb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PADDUSB/PADDUSW 4-215 PAGE 1335 LINE 69281 define pcodeop vpaddusw_avx512vl ; -:VPADDUSW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xDD; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPADDUSW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xDD; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpaddusw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpaddusw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PADDUSB/PADDUSW 4-215 PAGE 1335 LINE 69285 -:VPADDUSW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xDD; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPADDUSW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xDD; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpaddusw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpaddusw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PADDUSB/PADDUSW 4-216 PAGE 1336 LINE 69302 define pcodeop vpaddusw_avx512bw ; -:VPADDUSW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xDD; ZmmReg1 ... & ZmmReg2_m512 +:VPADDUSW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xDD; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpaddusw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpaddusw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PALIGNR 4-219 PAGE 1339 LINE 69495 define pcodeop vpalignr_avx512vl ; -:VPALIGNR XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x0F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPALIGNR XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x0F; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpalignr_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpalignr_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PALIGNR 4-219 PAGE 1339 LINE 69499 -:VPALIGNR YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x0F; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPALIGNR YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x0F; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpalignr_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpalignr_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PALIGNR 4-219 PAGE 1339 LINE 69505 define pcodeop vpalignr_avx512bw ; -:VPALIGNR ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x0F; ZmmReg1 ... & ZmmReg2_m512 +:VPALIGNR ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x0F; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpalignr_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpalignr_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PAND 4-223 PAGE 1343 LINE 69684 define pcodeop vpandd_avx512vl ; -:VPANDD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xDB; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPANDD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xDB; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpandd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpandd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PAND 4-223 PAGE 1343 LINE 69687 -:VPANDD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xDB; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPANDD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xDB; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpandd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpandd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PAND 4-223 PAGE 1343 LINE 69690 define pcodeop vpandd_avx512f ; -:VPANDD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xDB; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPANDD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0xDB; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpandd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpandd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PAND 4-223 PAGE 1343 LINE 69693 define pcodeop vpandq_avx512vl ; -:VPANDQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xDB; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPANDQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xDB; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpandq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpandq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PAND 4-223 PAGE 1343 LINE 69696 -:VPANDQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xDB; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPANDQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xDB; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpandq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpandq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PAND 4-223 PAGE 1343 LINE 69699 define pcodeop vpandq_avx512f ; -:VPANDQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xDB; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPANDQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0xDB; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpandq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpandq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PANDN 4-226 PAGE 1346 LINE 69859 define pcodeop vpandnd_avx512vl ; -:VPANDND XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xDF; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPANDND XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xDF; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpandnd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpandnd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PANDN 4-226 PAGE 1346 LINE 69862 -:VPANDND YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xDF; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPANDND YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xDF; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpandnd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpandnd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PANDN 4-226 PAGE 1346 LINE 69865 define pcodeop vpandnd_avx512f ; -:VPANDND ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xDF; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPANDND ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0xDF; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpandnd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpandnd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PANDN 4-226 PAGE 1346 LINE 69868 define pcodeop vpandnq_avx512vl ; -:VPANDNQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xDF; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPANDNQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xDF; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpandnq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpandnq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PANDN 4-226 PAGE 1346 LINE 69871 -:VPANDNQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xDF; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPANDNQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xDF; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpandnq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpandnq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PANDN 4-226 PAGE 1346 LINE 69874 define pcodeop vpandnq_avx512f ; -:VPANDNQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xDF; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPANDNQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0xDF; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpandnq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpandnq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PAVGB/PAVGW 4-230 PAGE 1350 LINE 70097 define pcodeop vpavgb_avx512vl ; -:VPAVGB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xE0; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPAVGB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xE0; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpavgb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpavgb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PAVGB/PAVGW 4-230 PAGE 1350 LINE 70100 -:VPAVGB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xE0; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPAVGB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xE0; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpavgb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpavgb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PAVGB/PAVGW 4-230 PAGE 1350 LINE 70103 define pcodeop vpavgb_avx512bw ; -:VPAVGB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xE0; ZmmReg1 ... & ZmmReg2_m512 +:VPAVGB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xE0; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpavgb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpavgb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PAVGB/PAVGW 4-230 PAGE 1350 LINE 70106 define pcodeop vpavgw_avx512vl ; -:VPAVGW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xE3; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPAVGW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xE3; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpavgw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpavgw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PAVGB/PAVGW 4-230 PAGE 1350 LINE 70109 -:VPAVGW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xE3; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPAVGW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xE3; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpavgw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpavgw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PAVGB/PAVGW 4-230 PAGE 1350 LINE 70112 define pcodeop vpavgw_avx512bw ; -:VPAVGW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xE3; ZmmReg1 ... & ZmmReg2_m512 +:VPAVGW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xE3; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpavgw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpavgw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PCMPEQB/PCMPEQW/PCMPEQD 4-244 PAGE 1364 LINE 70841 define pcodeop vpcmpeqd_avx512vl ; -:VPCMPEQD KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x76; KReg_reg ... & XmmReg2_m128_m32bcst +:VPCMPEQD KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & AVXOpMask & vexVVVV_XmmReg; byte=0x76; KReg_reg ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpeqd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + local tmp = vpcmpeqd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; + } # PCMPEQB/PCMPEQW/PCMPEQD 4-244 PAGE 1364 LINE 70846 -:VPCMPEQD KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x76; KReg_reg ... & YmmReg2_m256_m32bcst +:VPCMPEQD KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & AVXOpMask & vexVVVV_YmmReg; byte=0x76; KReg_reg ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpeqd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + local tmp = vpcmpeqd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # PCMPEQB/PCMPEQW/PCMPEQD 4-244 PAGE 1364 LINE 70851 define pcodeop vpcmpeqd_avx512f ; -:VPCMPEQD KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x76; KReg_reg ... & ZmmReg2_m512_m32bcst +:VPCMPEQD KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & AVXOpMask & evexV5_ZmmReg; byte=0x76; KReg_reg ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpeqd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + local tmp = vpcmpeqd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # PCMPEQB/PCMPEQW/PCMPEQD 4-244 PAGE 1364 LINE 70855 define pcodeop vpcmpeqb_avx512vl ; -:VPCMPEQB KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x74; KReg_reg ... & XmmReg2_m128 +:VPCMPEQB KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & AVXOpMask & vexVVVV_XmmReg; byte=0x74; KReg_reg ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpeqb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + local tmp = vpcmpeqb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # PCMPEQB/PCMPEQW/PCMPEQD 4-245 PAGE 1365 LINE 70873 -:VPCMPEQB KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x74; KReg_reg ... & YmmReg2_m256 +:VPCMPEQB KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & AVXOpMask & vexVVVV_YmmReg; byte=0x74; KReg_reg ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpeqb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + local tmp = vpcmpeqb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + KReg_reg = zext(AVXOpMask[0,32]) & tmp; } # PCMPEQB/PCMPEQW/PCMPEQD 4-245 PAGE 1365 LINE 70878 define pcodeop vpcmpeqb_avx512bw ; -:VPCMPEQB KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x74; KReg_reg ... & ZmmReg2_m512 +:VPCMPEQB KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & AVXOpMask & evexV5_ZmmReg; byte=0x74; KReg_reg ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpeqb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + local tmp = vpcmpeqb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + KReg_reg = zext(AVXOpMask[0,64]) & tmp; } # PCMPEQB/PCMPEQW/PCMPEQD 4-245 PAGE 1365 LINE 70883 define pcodeop vpcmpeqw_avx512vl ; -:VPCMPEQW KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x75; KReg_reg ... & XmmReg2_m128 +:VPCMPEQW KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & AVXOpMask & vexVVVV_XmmReg; byte=0x75; KReg_reg ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpeqw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + local tmp = vpcmpeqw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # PCMPEQB/PCMPEQW/PCMPEQD 4-245 PAGE 1365 LINE 70888 -:VPCMPEQW KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x75; KReg_reg ... & YmmReg2_m256 +:VPCMPEQW KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & AVXOpMask & vexVVVV_YmmReg; byte=0x75; KReg_reg ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpeqw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + local tmp = vpcmpeqw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # PCMPEQB/PCMPEQW/PCMPEQD 4-245 PAGE 1365 LINE 70893 define pcodeop vpcmpeqw_avx512bw ; -:VPCMPEQW KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x75; KReg_reg ... & ZmmReg2_m512 +:VPCMPEQW KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & AVXOpMask & evexV5_ZmmReg; byte=0x75; KReg_reg ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpeqw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + local tmp = vpcmpeqw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + KReg_reg = zext(AVXOpMask[0,32]) & tmp; } # PCMPEQQ 4-250 PAGE 1370 LINE 71174 define pcodeop vpcmpeqq_avx512vl ; -:VPCMPEQQ KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x29; KReg_reg ... & XmmReg2_m128_m64bcst +:VPCMPEQQ KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & vexVVVV_XmmReg; byte=0x29; KReg_reg ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpeqq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + local tmp = vpcmpeqq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + KReg_reg = zext(AVXOpMask[0,2]) & tmp; } # PCMPEQQ 4-250 PAGE 1370 LINE 71179 -:VPCMPEQQ KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x29; KReg_reg ... & YmmReg2_m256_m64bcst +:VPCMPEQQ KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & vexVVVV_YmmReg; byte=0x29; KReg_reg ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpeqq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + local tmp = vpcmpeqq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; } # PCMPEQQ 4-250 PAGE 1370 LINE 71184 define pcodeop vpcmpeqq_avx512f ; -:VPCMPEQQ KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x29; KReg_reg ... & ZmmReg2_m512_m64bcst +:VPCMPEQQ KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & evexV5_ZmmReg; byte=0x29; KReg_reg ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpeqq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + local tmp = vpcmpeqq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # PCMPGTB/PCMPGTW/PCMPGTD 4-257 PAGE 1377 LINE 71517 define pcodeop vpcmpgtd_avx512vl ; -:VPCMPGTD KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x66; KReg_reg ... & XmmReg2_m128_m32bcst +:VPCMPGTD KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & AVXOpMask & vexVVVV_XmmReg; byte=0x66; KReg_reg ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpgtd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + local tmp = vpcmpgtd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; } # PCMPGTB/PCMPGTW/PCMPGTD 4-257 PAGE 1377 LINE 71521 -:VPCMPGTD KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x66; KReg_reg ... & YmmReg2_m256_m32bcst +:VPCMPGTD KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & AVXOpMask & vexVVVV_YmmReg; byte=0x66; KReg_reg ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpgtd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + local tmp = vpcmpgtd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # PCMPGTB/PCMPGTW/PCMPGTD 4-257 PAGE 1377 LINE 71525 define pcodeop vpcmpgtd_avx512f ; -:VPCMPGTD KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x66; KReg_reg ... & ZmmReg2_m512_m32bcst +:VPCMPGTD KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & AVXOpMask & evexV5_ZmmReg; byte=0x66; KReg_reg ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpgtd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + local tmp = vpcmpgtd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # PCMPGTB/PCMPGTW/PCMPGTD 4-257 PAGE 1377 LINE 71529 define pcodeop vpcmpgtb_avx512vl ; -:VPCMPGTB KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x64; KReg_reg ... & XmmReg2_m128 +:VPCMPGTB KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & AVXOpMask & vexVVVV_XmmReg; byte=0x64; KReg_reg ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpgtb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + local tmp = vpcmpgtb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # PCMPGTB/PCMPGTW/PCMPGTD 4-257 PAGE 1377 LINE 71533 -:VPCMPGTB KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x64; KReg_reg ... & YmmReg2_m256 +:VPCMPGTB KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & AVXOpMask & vexVVVV_YmmReg; byte=0x64; KReg_reg ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpgtb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + local tmp = vpcmpgtb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + KReg_reg = zext(AVXOpMask[0,32]) & tmp; } # PCMPGTB/PCMPGTW/PCMPGTD 4-258 PAGE 1378 LINE 71545 define pcodeop vpcmpgtb_avx512bw ; -:VPCMPGTB KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x64; KReg_reg ... & ZmmReg2_m512 +:VPCMPGTB KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & AVXOpMask & evexV5_ZmmReg; byte=0x64; KReg_reg ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpgtb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + local tmp = vpcmpgtb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + KReg_reg = zext(AVXOpMask[0,64]) & tmp; } # PCMPGTB/PCMPGTW/PCMPGTD 4-258 PAGE 1378 LINE 71549 define pcodeop vpcmpgtw_avx512vl ; -:VPCMPGTW KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x65; KReg_reg ... & XmmReg2_m128 +:VPCMPGTW KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & AVXOpMask & vexVVVV_XmmReg; byte=0x65; KReg_reg ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpgtw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + local tmp = vpcmpgtw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # PCMPGTB/PCMPGTW/PCMPGTD 4-258 PAGE 1378 LINE 71553 -:VPCMPGTW KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x65; KReg_reg ... & YmmReg2_m256 +:VPCMPGTW KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & AVXOpMask & vexVVVV_YmmReg; byte=0x65; KReg_reg ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpgtw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + local tmp = vpcmpgtw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # PCMPGTB/PCMPGTW/PCMPGTD 4-258 PAGE 1378 LINE 71557 define pcodeop vpcmpgtw_avx512bw ; -:VPCMPGTW KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x65; KReg_reg ... & ZmmReg2_m512 +:VPCMPGTW KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & AVXOpMask & evexV5_ZmmReg; byte=0x65; KReg_reg ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpgtw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + local tmp = vpcmpgtw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + KReg_reg = zext(AVXOpMask[0,32]) & tmp; } # PCMPGTQ 4-263 PAGE 1383 LINE 71837 define pcodeop vpcmpgtq_avx512vl ; -:VPCMPGTQ KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x37; KReg_reg ... & XmmReg2_m128_m64bcst +:VPCMPGTQ KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & vexVVVV_XmmReg; byte=0x37; KReg_reg ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpgtq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + local tmp = vpcmpgtq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + KReg_reg = zext(AVXOpMask[0,2]) & tmp; } # PCMPGTQ 4-263 PAGE 1383 LINE 71841 -:VPCMPGTQ KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x37; KReg_reg ... & YmmReg2_m256_m64bcst +:VPCMPGTQ KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & vexVVVV_YmmReg; byte=0x37; KReg_reg ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpgtq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + local tmp = vpcmpgtq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; } # PCMPGTQ 4-263 PAGE 1383 LINE 71849 define pcodeop vpcmpgtq_avx512f ; -:VPCMPGTQ KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x37; KReg_reg ... & ZmmReg2_m512_m64bcst +:VPCMPGTQ KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & evexV5_ZmmReg; byte=0x37; KReg_reg ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpgtq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + local tmp = vpcmpgtq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # PEXTRB/PEXTRD/PEXTRQ 4-274 PAGE 1394 LINE 72334 -define pcodeop vpextrb_avx512bw ; -:VPEXTRB Reg32_m8, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG); byte=0x14; XmmReg1 ... & Reg32_m8; imm8 +@ifdef IA64 +:VPEXTRB Reg32, XmmReg1, imm8 is $(LONGMODE_ON) & $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG); byte=0x14; XmmReg1 & mod=3 & Reg32 & Reg64; imm8 { - Reg32_m8 = vpextrb_avx512bw( XmmReg1, imm8:1 ); - # TODO Reg64 = zext(Reg32) + local tmp = XmmReg1 >> (imm8*8); + Reg64 = zext(tmp[0,8]); +} +@endif + +:VPEXTRB Reg32, XmmReg1, imm8 is $(LONGMODE_OFF) & $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG); byte=0x14; XmmReg1 & mod=3 & Reg32; imm8 +{ + local tmp = XmmReg1 >> (imm8*8); + Reg32 = zext(tmp[0,8]); +} + +:VPEXTRB m8, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG); byte=0x14; XmmReg1 ... & m8; imm8 +{ + local tmp = XmmReg1 >> (imm8*8); + m8 = tmp[0,8]; } # PEXTRB/PEXTRD/PEXTRQ 4-274 PAGE 1394 LINE 72339 -define pcodeop vpextrd_avx512dq ; -:VPEXTRD rm32, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x16; XmmReg1 ... & rm32; imm8 +:VPEXTRD rm32, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x16; XmmReg1 ... & rm32; imm8 { - rm32 = vpextrd_avx512dq( XmmReg1, imm8:1 ); + local tmp = XmmReg1 >> (imm8*32); + rm32 = tmp[0,32]; } # PEXTRB/PEXTRD/PEXTRQ 4-274 PAGE 1394 LINE 72343 -define pcodeop vpextrq_avx512dq ; @ifdef IA64 -:VPEXTRQ rm64, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1); byte=0x16; XmmReg1 ... & rm64; imm8 +:VPEXTRQ rm64, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1); byte=0x16; XmmReg1 ... & rm64; imm8 { - rm64 = vpextrq_avx512dq( XmmReg1, imm8:1 ); + local tmp = XmmReg1 >> (imm8*64); + rm64 = tmp[0,64]; } @endif # PEXTRW 4-277 PAGE 1397 LINE 72488 -define pcodeop vpextrw_avx512bw ; -:VPEXTRW Reg32, XmmReg2, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG); byte=0xC5; Reg32 & (mod=0x3 & XmmReg2); imm8 +@ifdef IA64 +:VPEXTRW Reg32, XmmReg2, imm8 is $(LONGMODE_ON) & $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG); byte=0xC5; Reg32 & Reg64 & (mod=0x3 & XmmReg2); imm8 { - Reg32 = vpextrw_avx512bw( XmmReg2, imm8:1 ); - # TODO Reg64 = zext(Reg32) + local tmp = XmmReg2 >> (imm8*16); + Reg64 = zext(tmp[0,16]); +} +@endif + +:VPEXTRW Reg32, XmmReg2, imm8 is $(LONGMODE_OFF) & $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG); byte=0xC5; Reg32 & (mod=0x3 & XmmReg2); imm8 +{ + local tmp = XmmReg2 >> (imm8*16); + Reg32 = zext(tmp[0,16]); } # PEXTRW 4-277 PAGE 1397 LINE 72494 -:VPEXTRW Reg32_m16, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG); byte=0x15; XmmReg1 ... & Reg32_m16; imm8 +@ifdef IA64 +:VPEXTRW Reg32, XmmReg1, imm8 is $(LONGMODE_ON) & $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG); byte=0x15; XmmReg1 & mod=3 & Reg32 & Reg64; imm8 { - Reg32_m16 = vpextrw_avx512bw( XmmReg1, imm8:1 ); - # TODO Reg64 = zext(Reg32) + local tmp = XmmReg1 >> (imm8*16); + Reg64 = zext(tmp[0,16]); +} +@endif + +:VPEXTRW Reg32, XmmReg1, imm8 is $(LONGMODE_OFF) & $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG); byte=0x15; XmmReg1 & mod=3 & Reg32; imm8 +{ + local tmp = XmmReg1 >> (imm8*16); + Reg32 = zext(tmp[0,16]); +} + + +:VPEXTRW m16, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG); byte=0x15; XmmReg1 ... & m16; imm8 +{ + local tmp = XmmReg1 >> (imm8*16); + m16 = tmp[0,16]; } # PINSRB/PINSRD/PINSRQ 4-293 PAGE 1413 LINE 73330 define pcodeop vpinsrb_avx512bw ; -:VPINSRB XmmReg1, vexVVVV_XmmReg, Reg32_m8, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x20; (XmmReg1 & ZmmReg1) ... & Reg32_m8; imm8 +:VPINSRB XmmReg1, vexVVVV_XmmReg, Reg32_m8, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x20; (XmmReg1 & ZmmReg1) ... & Reg32_m8; imm8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-RVMI) { local tmp:16 = vpinsrb_avx512bw( vexVVVV_XmmReg, Reg32_m8, imm8:1 ); @@ -2890,7 +3232,7 @@ define pcodeop vpinsrb_avx512bw ; # PINSRB/PINSRD/PINSRQ 4-293 PAGE 1413 LINE 73333 define pcodeop vpinsrd_avx512dq ; -:VPINSRD XmmReg1, vexVVVV_XmmReg, rm32, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x22; (XmmReg1 & ZmmReg1) ... & rm32; imm8 +:VPINSRD XmmReg1, vexVVVV_XmmReg, rm32, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x22; (XmmReg1 & ZmmReg1) ... & rm32; imm8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-RVMI) { local tmp:16 = vpinsrd_avx512dq( vexVVVV_XmmReg, rm32, imm8:1 ); @@ -2900,7 +3242,7 @@ define pcodeop vpinsrd_avx512dq ; # PINSRB/PINSRD/PINSRQ 4-293 PAGE 1413 LINE 73336 define pcodeop vpinsrq_avx512dq ; @ifdef IA64 -:VPINSRQ XmmReg1, vexVVVV_XmmReg, rm64, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x22; (XmmReg1 & ZmmReg1) ... & rm64; imm8 +:VPINSRQ XmmReg1, vexVVVV_XmmReg, rm64, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x22; (XmmReg1 & ZmmReg1) ... & rm64; imm8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-RVMI) { local tmp:16 = vpinsrq_avx512dq( vexVVVV_XmmReg, rm64, imm8:1 ); @@ -2910,7 +3252,7 @@ define pcodeop vpinsrq_avx512dq ; # PINSRW 4-296 PAGE 1416 LINE 73449 define pcodeop vpinsrw_avx512bw ; -:VPINSRW XmmReg1, vexVVVV_XmmReg, Reg32_m16, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xC4; (XmmReg1 & ZmmReg1) ... & Reg32_m16; imm8 +:VPINSRW XmmReg1, vexVVVV_XmmReg, Reg32_m16, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xC4; (XmmReg1 & ZmmReg1) ... & Reg32_m16; imm8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S-RVMI) { local tmp:16 = vpinsrw_avx512bw( vexVVVV_XmmReg, Reg32_m16, imm8:1 ); @@ -2919,1007 +3261,1287 @@ define pcodeop vpinsrw_avx512bw ; # PMADDUBSW 4-298 PAGE 1418 LINE 73558 define pcodeop vpmaddubsw_avx512vl ; -:VPMADDUBSW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x04; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMADDUBSW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x04; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpmaddubsw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmaddubsw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PMADDUBSW 4-298 PAGE 1418 LINE 73562 -:VPMADDUBSW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x04; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMADDUBSW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x04; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpmaddubsw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmaddubsw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PMADDUBSW 4-298 PAGE 1418 LINE 73566 define pcodeop vpmaddubsw_avx512bw ; -:VPMADDUBSW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x04; ZmmReg1 ... & ZmmReg2_m512 +:VPMADDUBSW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x04; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpmaddubsw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpmaddubsw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PMADDWD 4-301 PAGE 1421 LINE 73708 define pcodeop vpmaddwd_avx512vl ; -:VPMADDWD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xF5; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMADDWD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xF5; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpmaddwd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmaddwd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PMADDWD 4-301 PAGE 1421 LINE 73712 -:VPMADDWD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xF5; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMADDWD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xF5; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpmaddwd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmaddwd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PMADDWD 4-301 PAGE 1421 LINE 73716 define pcodeop vpmaddwd_avx512bw ; -:VPMADDWD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xF5; ZmmReg1 ... & ZmmReg2_m512 +:VPMADDWD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xF5; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpmaddwd_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpmaddwd_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PMAXSB/PMAXSW/PMAXSD/PMAXSQ 4-304 PAGE 1424 LINE 73900 define pcodeop vpmaxsb_avx512vl ; -:VPMAXSB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x3C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMAXSB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x3C; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpmaxsb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmaxsb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PMAXSB/PMAXSW/PMAXSD/PMAXSQ 4-304 PAGE 1424 LINE 73903 -:VPMAXSB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x3C; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMAXSB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x3C; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpmaxsb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmaxsb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PMAXSB/PMAXSW/PMAXSD/PMAXSQ 4-304 PAGE 1424 LINE 73906 define pcodeop vpmaxsb_avx512bw ; -:VPMAXSB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x3C; ZmmReg1 ... & ZmmReg2_m512 +:VPMAXSB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x3C; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpmaxsb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpmaxsb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PMAXSB/PMAXSW/PMAXSD/PMAXSQ 4-304 PAGE 1424 LINE 73909 define pcodeop vpmaxsw_avx512vl ; -:VPMAXSW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xEE; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMAXSW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xEE; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpmaxsw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmaxsw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PMAXSB/PMAXSW/PMAXSD/PMAXSQ 4-304 PAGE 1424 LINE 73912 -:VPMAXSW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xEE; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMAXSW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xEE; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpmaxsw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmaxsw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PMAXSB/PMAXSW/PMAXSD/PMAXSQ 4-304 PAGE 1424 LINE 73915 define pcodeop vpmaxsw_avx512bw ; -:VPMAXSW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xEE; ZmmReg1 ... & ZmmReg2_m512 +:VPMAXSW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xEE; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpmaxsw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpmaxsw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PMAXSB/PMAXSW/PMAXSD/PMAXSQ 4-304 PAGE 1424 LINE 73918 define pcodeop vpmaxsd_avx512vl ; -:VPMAXSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x3D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPMAXSD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x3D; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpmaxsd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpmaxsd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PMAXSB/PMAXSW/PMAXSD/PMAXSQ 4-305 PAGE 1425 LINE 73933 -:VPMAXSD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x3D; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPMAXSD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x3D; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpmaxsd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpmaxsd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PMAXSB/PMAXSW/PMAXSD/PMAXSQ 4-305 PAGE 1425 LINE 73936 define pcodeop vpmaxsd_avx512f ; -:VPMAXSD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x3D; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPMAXSD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x3D; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpmaxsd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpmaxsd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PMAXSB/PMAXSW/PMAXSD/PMAXSQ 4-305 PAGE 1425 LINE 73939 define pcodeop vpmaxsq_avx512vl ; -:VPMAXSQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x3D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPMAXSQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x3D; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpmaxsq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpmaxsq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMAXSB/PMAXSW/PMAXSD/PMAXSQ 4-305 PAGE 1425 LINE 73942 -:VPMAXSQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x3D; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPMAXSQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x3D; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpmaxsq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpmaxsq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMAXSB/PMAXSW/PMAXSD/PMAXSQ 4-305 PAGE 1425 LINE 73945 define pcodeop vpmaxsq_avx512f ; -:VPMAXSQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x3D; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPMAXSQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x3D; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpmaxsq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpmaxsq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PMAXUB/PMAXUW 4-311 PAGE 1431 LINE 74295 define pcodeop vpmaxub_avx512vl ; -:VPMAXUB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xDE; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMAXUB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xDE; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpmaxub_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmaxub_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PMAXUB/PMAXUW 4-311 PAGE 1431 LINE 74298 -:VPMAXUB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xDE; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMAXUB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xDE; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpmaxub_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmaxub_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PMAXUB/PMAXUW 4-311 PAGE 1431 LINE 74301 define pcodeop vpmaxub_avx512bw ; -:VPMAXUB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xDE; ZmmReg1 ... & ZmmReg2_m512 +:VPMAXUB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xDE; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpmaxub_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpmaxub_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PMAXUB/PMAXUW 4-311 PAGE 1431 LINE 74304 define pcodeop vpmaxuw_avx512vl ; -:VPMAXUW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x3E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMAXUW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x3E; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpmaxuw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmaxuw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PMAXUB/PMAXUW 4-311 PAGE 1431 LINE 74307 -:VPMAXUW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x3E; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMAXUW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x3E; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpmaxuw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmaxuw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PMAXUB/PMAXUW 4-311 PAGE 1431 LINE 74310 define pcodeop vpmaxuw_avx512bw ; -:VPMAXUW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x3E; ZmmReg1 ... & ZmmReg2_m512 +:VPMAXUW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x3E; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpmaxuw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpmaxuw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PMAXUD/PMAXUQ 4-316 PAGE 1436 LINE 74540 define pcodeop vpmaxud_avx512vl ; -:VPMAXUD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x3F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPMAXUD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x3F; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpmaxud_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpmaxud_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PMAXUD/PMAXUQ 4-316 PAGE 1436 LINE 74543 -:VPMAXUD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x3F; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPMAXUD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x3F; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpmaxud_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpmaxud_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PMAXUD/PMAXUQ 4-316 PAGE 1436 LINE 74546 define pcodeop vpmaxud_avx512f ; -:VPMAXUD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x3F; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPMAXUD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x3F; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpmaxud_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpmaxud_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PMAXUD/PMAXUQ 4-316 PAGE 1436 LINE 74549 define pcodeop vpmaxuq_avx512vl ; -:VPMAXUQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x3F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPMAXUQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x3F; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpmaxuq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpmaxuq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMAXUD/PMAXUQ 4-316 PAGE 1436 LINE 74552 -:VPMAXUQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x3F; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPMAXUQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x3F; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpmaxuq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpmaxuq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMAXUD/PMAXUQ 4-316 PAGE 1436 LINE 74555 define pcodeop vpmaxuq_avx512f ; -:VPMAXUQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x3F; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPMAXUQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x3F; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpmaxuq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpmaxuq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PMINSB/PMINSW 4-320 PAGE 1440 LINE 74748 define pcodeop vpminsb_avx512vl ; -:VPMINSB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x38; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMINSB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x38; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpminsb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpminsb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PMINSB/PMINSW 4-320 PAGE 1440 LINE 74751 -:VPMINSB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x38; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMINSB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x38; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpminsb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpminsb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PMINSB/PMINSW 4-320 PAGE 1440 LINE 74754 define pcodeop vpminsb_avx512bw ; -:VPMINSB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x38; ZmmReg1 ... & ZmmReg2_m512 +:VPMINSB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x38; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpminsb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpminsb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PMINSB/PMINSW 4-320 PAGE 1440 LINE 74757 define pcodeop vpminsw_avx512vl ; -:VPMINSW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xEA; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMINSW XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xEA; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpminsw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpminsw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PMINSB/PMINSW 4-320 PAGE 1440 LINE 74760 -:VPMINSW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xEA; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMINSW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xEA; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpminsw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpminsw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PMINSB/PMINSW 4-320 PAGE 1440 LINE 74763 define pcodeop vpminsw_avx512bw ; -:VPMINSW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xEA; ZmmReg1 ... & ZmmReg2_m512 +:VPMINSW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xEA; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpminsw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpminsw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PMINSD/PMINSQ 4-325 PAGE 1445 LINE 74995 define pcodeop vpminsd_avx512vl ; -:VPMINSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x39; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPMINSD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x39; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpminsd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpminsd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PMINSD/PMINSQ 4-325 PAGE 1445 LINE 74998 -:VPMINSD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x39; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPMINSD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x39; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpminsd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpminsd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PMINSD/PMINSQ 4-325 PAGE 1445 LINE 75001 define pcodeop vpminsd_avx512f ; -:VPMINSD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x39; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPMINSD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x39; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpminsd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpminsd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PMINSD/PMINSQ 4-325 PAGE 1445 LINE 75004 define pcodeop vpminsq_avx512vl ; -:VPMINSQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x39; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPMINSQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x39; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpminsq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpminsq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMINSD/PMINSQ 4-325 PAGE 1445 LINE 75007 -:VPMINSQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x39; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPMINSQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x39; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpminsq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpminsq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMINSD/PMINSQ 4-325 PAGE 1445 LINE 75010 define pcodeop vpminsq_avx512f ; -:VPMINSQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x39; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPMINSQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x39; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpminsq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpminsq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PMINUB/PMINUW 4-329 PAGE 1449 LINE 75207 define pcodeop vpminub_avx512vl ; -:VPMINUB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & KWriteMask & vexVVVV_XmmReg; byte=0xDA; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMINUB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & vexVVVV_XmmReg; byte=0xDA; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpminub_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpminub_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PMINUB/PMINUW 4-329 PAGE 1449 LINE 75210 -:VPMINUB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & KWriteMask & vexVVVV_YmmReg; byte=0xDA; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMINUB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & vexVVVV_YmmReg; byte=0xDA; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpminub_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpminub_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PMINUB/PMINUW 4-329 PAGE 1449 LINE 75213 define pcodeop vpminub_avx512bw ; -:VPMINUB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & KWriteMask & evexV5_ZmmReg; byte=0xDA; ZmmReg1 ... & ZmmReg2_m512 +:VPMINUB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & evexV5_ZmmReg; byte=0xDA; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpminub_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpminub_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PMINUB/PMINUW 4-329 PAGE 1449 LINE 75216 define pcodeop vpminuw_avx512vl ; -:VPMINUW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & KWriteMask & vexVVVV_XmmReg; byte=0x3A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMINUW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & vexVVVV_XmmReg; byte=0x3A; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpminuw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpminuw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PMINUB/PMINUW 4-329 PAGE 1449 LINE 75219 -:VPMINUW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & KWriteMask & vexVVVV_YmmReg; byte=0x3A; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMINUW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & vexVVVV_YmmReg; byte=0x3A; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpminuw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpminuw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PMINUB/PMINUW 4-329 PAGE 1449 LINE 75222 define pcodeop vpminuw_avx512bw ; -:VPMINUW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & KWriteMask & evexV5_ZmmReg; byte=0x3A; ZmmReg1 ... & ZmmReg2_m512 +:VPMINUW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & evexV5_ZmmReg; byte=0x3A; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpminuw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpminuw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PMINUD/PMINUQ 4-334 PAGE 1454 LINE 75451 define pcodeop vpminud_avx512vl ; -:VPMINUD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x3B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPMINUD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x3B; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpminud_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpminud_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PMINUD/PMINUQ 4-334 PAGE 1454 LINE 75454 -:VPMINUD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x3B; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPMINUD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x3B; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpminud_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpminud_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PMINUD/PMINUQ 4-334 PAGE 1454 LINE 75457 define pcodeop vpminud_avx512f ; -:VPMINUD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x3B; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPMINUD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x3B; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpminud_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpminud_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PMINUD/PMINUQ 4-334 PAGE 1454 LINE 75460 define pcodeop vpminuq_avx512vl ; -:VPMINUQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x3B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPMINUQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x3B; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpminuq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpminuq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMINUD/PMINUQ 4-334 PAGE 1454 LINE 75463 -:VPMINUQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x3B; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPMINUQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x3B; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpminuq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpminuq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMINUD/PMINUQ 4-334 PAGE 1454 LINE 75466 define pcodeop vpminuq_avx512f ; -:VPMINUQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x3B; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPMINUQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x3B; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpminuq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpminuq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PMOVSX 4-340 PAGE 1460 LINE 75796 define pcodeop vpmovsxbw_avx512vl ; -:VPMOVSXBW XmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x20; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VPMOVSXBW XmmReg1 XmmOpMask16, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x20; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:16 = vpmovsxbw_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmovsxbw_avx512vl( XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PMOVSX 4-340 PAGE 1460 LINE 75799 -:VPMOVSXBW YmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x20; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMOVSXBW YmmReg1 YmmOpMask16, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x20; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:32 = vpmovsxbw_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmovsxbw_avx512vl( XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PMOVSX 4-340 PAGE 1460 LINE 75802 define pcodeop vpmovsxbw_avx512bw ; -:VPMOVSXBW ZmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x20; ZmmReg1 ... & YmmReg2_m256 +:VPMOVSXBW ZmmReg1 ZmmOpMask16, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x20; (ZmmReg1 & ZmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - ZmmReg1 = vpmovsxbw_avx512bw( YmmReg2_m256 ); + ZmmResult = vpmovsxbw_avx512bw( YmmReg2_m256 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PMOVSX 4-340 PAGE 1460 LINE 75805 define pcodeop vpmovsxbd_avx512vl ; -:VPMOVSXBD XmmReg1^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x21; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VPMOVSXBD XmmReg1 XmmOpMask32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x21; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:16 = vpmovsxbd_avx512vl( XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmovsxbd_avx512vl( XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PMOVSX 4-341 PAGE 1461 LINE 75819 -:VPMOVSXBD YmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x21; (YmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VPMOVSXBD YmmReg1 YmmOpMask32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x21; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:32 = vpmovsxbd_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmovsxbd_avx512vl( XmmReg2_m64 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PMOVSX 4-341 PAGE 1461 LINE 75822 define pcodeop vpmovsxbd_avx512f ; -:VPMOVSXBD ZmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x21; ZmmReg1 ... & XmmReg2_m128 +:VPMOVSXBD ZmmReg1 ZmmOpMask32, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x21; (ZmmReg1 & ZmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - ZmmReg1 = vpmovsxbd_avx512f( XmmReg2_m128 ); + ZmmResult = vpmovsxbd_avx512f( XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PMOVSX 4-341 PAGE 1461 LINE 75825 define pcodeop vpmovsxbq_avx512vl ; -:VPMOVSXBQ XmmReg1^KWriteMask, XmmReg2_m16 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x22; (XmmReg1 & ZmmReg1) ... & XmmReg2_m16 +:VPMOVSXBQ XmmReg1 XmmOpMask64, XmmReg2_m16 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x22; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m16 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:16 = vpmovsxbq_avx512vl( XmmReg2_m16 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmovsxbq_avx512vl( XmmReg2_m16 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMOVSX 4-341 PAGE 1461 LINE 75828 -:VPMOVSXBQ YmmReg1^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x22; (YmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VPMOVSXBQ YmmReg1 YmmOpMask64, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x22; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:32 = vpmovsxbq_avx512vl( XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmovsxbq_avx512vl( XmmReg2_m32 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMOVSX 4-341 PAGE 1461 LINE 75831 define pcodeop vpmovsxbq_avx512f ; -:VPMOVSXBQ ZmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x22; ZmmReg1 ... & XmmReg2_m64 +:VPMOVSXBQ ZmmReg1 ZmmOpMask64, XmmReg2_m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x22; (ZmmReg1 & ZmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - ZmmReg1 = vpmovsxbq_avx512f( XmmReg2_m64 ); + ZmmResult = vpmovsxbq_avx512f( XmmReg2_m64 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PMOVSX 4-341 PAGE 1461 LINE 75834 define pcodeop vpmovsxwd_avx512vl ; -:VPMOVSXWD XmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x23; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VPMOVSXWD XmmReg1 XmmOpMask32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x23; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:16 = vpmovsxwd_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmovsxwd_avx512vl( XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PMOVSX 4-341 PAGE 1461 LINE 75837 -:VPMOVSXWD YmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x23; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMOVSXWD YmmReg1 YmmOpMask32, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x23; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:32 = vpmovsxwd_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmovsxwd_avx512vl( XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PMOVSX 4-341 PAGE 1461 LINE 75840 define pcodeop vpmovsxwd_avx512f ; -:VPMOVSXWD ZmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x23; ZmmReg1 ... & YmmReg2_m256 +:VPMOVSXWD ZmmReg1 ZmmOpMask32, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x23; (ZmmReg1 & ZmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - ZmmReg1 = vpmovsxwd_avx512f( YmmReg2_m256 ); + ZmmResult = vpmovsxwd_avx512f( YmmReg2_m256 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PMOVSX 4-341 PAGE 1461 LINE 75843 define pcodeop vpmovsxwq_avx512vl ; -:VPMOVSXWQ XmmReg1^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x24; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VPMOVSXWQ XmmReg1 XmmOpMask64, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x24; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:16 = vpmovsxwq_avx512vl( XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmovsxwq_avx512vl( XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMOVSX 4-341 PAGE 1461 LINE 75846 -:VPMOVSXWQ YmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x24; (YmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VPMOVSXWQ YmmReg1 YmmOpMask64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x24; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:32 = vpmovsxwq_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmovsxwq_avx512vl( XmmReg2_m64 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMOVSX 4-341 PAGE 1461 LINE 75849 define pcodeop vpmovsxwq_avx512f ; -:VPMOVSXWQ ZmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x24; ZmmReg1 ... & XmmReg2_m128 +:VPMOVSXWQ ZmmReg1 ZmmOpMask64, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x24; (ZmmReg1 & ZmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - ZmmReg1 = vpmovsxwq_avx512f( XmmReg2_m128 ); + ZmmResult = vpmovsxwq_avx512f( XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PMOVSX 4-341 PAGE 1461 LINE 75852 define pcodeop vpmovsxdq_avx512vl ; -:VPMOVSXDQ XmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x25; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VPMOVSXDQ XmmReg1 XmmOpMask64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x25; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:16 = vpmovsxdq_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmovsxdq_avx512vl( XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMOVSX 4-341 PAGE 1461 LINE 75855 -:VPMOVSXDQ YmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x25; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMOVSXDQ YmmReg1 YmmOpMask64, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x25; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:32 = vpmovsxdq_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmovsxdq_avx512vl( XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMOVSX 4-341 PAGE 1461 LINE 75858 define pcodeop vpmovsxdq_avx512f ; -:VPMOVSXDQ ZmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x25; ZmmReg1 ... & YmmReg2_m256 +:VPMOVSXDQ ZmmReg1 ZmmOpMask64, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x25; (ZmmReg1 & ZmmOpMask64) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - ZmmReg1 = vpmovsxdq_avx512f( YmmReg2_m256 ); + ZmmResult = vpmovsxdq_avx512f( YmmReg2_m256 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PMOVZX 4-351 PAGE 1471 LINE 76329 define pcodeop vpmovzxbw_avx512vl ; -:VPMOVZXBW XmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x30; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VPMOVZXBW XmmReg1 XmmOpMask16, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x30; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:16 = vpmovzxbw_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmovzxbw_avx512vl( XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PMOVZX 4-351 PAGE 1471 LINE 76332 -:VPMOVZXBW YmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x30; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMOVZXBW YmmReg1 YmmOpMask16, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x30; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:32 = vpmovzxbw_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmovzxbw_avx512vl( XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PMOVZX 4-351 PAGE 1471 LINE 76335 define pcodeop vpmovzxbw_avx512bw ; -:VPMOVZXBW ZmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x30; ZmmReg1 ... & YmmReg2_m256 +:VPMOVZXBW ZmmReg1 ZmmOpMask16, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x30; (ZmmReg1 & ZmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - ZmmReg1 = vpmovzxbw_avx512bw( YmmReg2_m256 ); + ZmmResult = vpmovzxbw_avx512bw( YmmReg2_m256 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PMOVZX 4-351 PAGE 1471 LINE 76338 define pcodeop vpmovzxbd_avx512vl ; -:VPMOVZXBD XmmReg1^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x31; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VPMOVZXBD XmmReg1 XmmOpMask32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x31; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:16 = vpmovzxbd_avx512vl( XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmovzxbd_avx512vl( XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PMOVZX 4-351 PAGE 1471 LINE 76341 -:VPMOVZXBD YmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x31; (YmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VPMOVZXBD YmmReg1 YmmOpMask32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x31; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:32 = vpmovzxbd_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmovzxbd_avx512vl( XmmReg2_m64 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PMOVZX 4-351 PAGE 1471 LINE 76344 define pcodeop vpmovzxbd_avx512f ; -:VPMOVZXBD ZmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x31; ZmmReg1 ... & XmmReg2_m128 +:VPMOVZXBD ZmmReg1 ZmmOpMask32, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x31; (ZmmReg1 & ZmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - ZmmReg1 = vpmovzxbd_avx512f( XmmReg2_m128 ); + ZmmResult = vpmovzxbd_avx512f( XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PMOVZX 4-351 PAGE 1471 LINE 76347 define pcodeop vpmovzxbq_avx512vl ; -:VPMOVZXBQ XmmReg1^KWriteMask, XmmReg2_m16 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x32; (XmmReg1 & ZmmReg1) ... & XmmReg2_m16 +:VPMOVZXBQ XmmReg1 XmmOpMask64, XmmReg2_m16 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x32; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m16 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:16 = vpmovzxbq_avx512vl( XmmReg2_m16 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmovzxbq_avx512vl( XmmReg2_m16 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMOVZX 4-351 PAGE 1471 LINE 76350 -:VPMOVZXBQ YmmReg1^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x32; (YmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VPMOVZXBQ YmmReg1 YmmOpMask64, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x32; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:32 = vpmovzxbq_avx512vl( XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmovzxbq_avx512vl( XmmReg2_m32 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMOVZX 4-351 PAGE 1471 LINE 76353 define pcodeop vpmovzxbq_avx512f ; -:VPMOVZXBQ ZmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x32; ZmmReg1 ... & XmmReg2_m64 +:VPMOVZXBQ ZmmReg1 ZmmOpMask64, XmmReg2_m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x32; (ZmmReg1 & ZmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - ZmmReg1 = vpmovzxbq_avx512f( XmmReg2_m64 ); + ZmmResult = vpmovzxbq_avx512f( XmmReg2_m64 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PMOVZX 4-351 PAGE 1471 LINE 76356 define pcodeop vpmovzxwd_avx512vl ; -:VPMOVZXWD XmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x33; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VPMOVZXWD XmmReg1 XmmOpMask32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x33; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:16 = vpmovzxwd_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmovzxwd_avx512vl( XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PMOVZX 4-351 PAGE 1471 LINE 76359 -:VPMOVZXWD YmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x33; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMOVZXWD YmmReg1 YmmOpMask32, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x33; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:32 = vpmovzxwd_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmovzxwd_avx512vl( XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PMOVZX 4-351 PAGE 1471 LINE 76362 define pcodeop vpmovzxwd_avx512f ; -:VPMOVZXWD ZmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x33; ZmmReg1 ... & YmmReg2_m256 +:VPMOVZXWD ZmmReg1 ZmmOpMask32, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x33; (ZmmReg1 & ZmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - ZmmReg1 = vpmovzxwd_avx512f( YmmReg2_m256 ); + ZmmResult = vpmovzxwd_avx512f( YmmReg2_m256 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PMOVZX 4-351 PAGE 1471 LINE 76365 define pcodeop vpmovzxwq_avx512vl ; -:VPMOVZXWQ XmmReg1^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x34; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VPMOVZXWQ XmmReg1 XmmOpMask64, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x34; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:16 = vpmovzxwq_avx512vl( XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmovzxwq_avx512vl( XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMOVZX 4-351 PAGE 1471 LINE 76368 -:VPMOVZXWQ YmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x34; (YmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VPMOVZXWQ YmmReg1 YmmOpMask64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x34; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:32 = vpmovzxwq_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmovzxwq_avx512vl( XmmReg2_m64 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMOVZX 4-351 PAGE 1471 LINE 76371 define pcodeop vpmovzxwq_avx512f ; -:VPMOVZXWQ ZmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask; byte=0x34; ZmmReg1 ... & XmmReg2_m128 +:VPMOVZXWQ ZmmReg1 ZmmOpMask64, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) ; byte=0x34; (ZmmReg1 & ZmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - ZmmReg1 = vpmovzxwq_avx512f( XmmReg2_m128 ); + ZmmResult = vpmovzxwq_avx512f( XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PMOVZX 4-352 PAGE 1472 LINE 76386 define pcodeop vpmovzxdq_avx512vl ; -:VPMOVZXDQ XmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x35; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VPMOVZXDQ XmmReg1 XmmOpMask64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x35; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:16 = vpmovzxdq_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmovzxdq_avx512vl( XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMOVZX 4-352 PAGE 1472 LINE 76389 -:VPMOVZXDQ YmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x35; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMOVZXDQ YmmReg1 YmmOpMask64, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x35; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - local tmp:32 = vpmovzxdq_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmovzxdq_avx512vl( XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMOVZX 4-352 PAGE 1472 LINE 76392 define pcodeop vpmovzxdq_avx512f ; -:VPMOVZXDQ ZmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x35; ZmmReg1 ... & YmmReg2_m256 +:VPMOVZXDQ ZmmReg1 ZmmOpMask64, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x35; (ZmmReg1 & ZmmOpMask64) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM,QVM,OVM) { - ZmmReg1 = vpmovzxdq_avx512f( YmmReg2_m256 ); + ZmmResult = vpmovzxdq_avx512f( YmmReg2_m256 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PMULDQ 4-359 PAGE 1479 LINE 76794 define pcodeop vpmuldq_avx512vl ; -:VPMULDQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x28; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPMULDQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x28; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpmuldq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpmuldq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMULDQ 4-359 PAGE 1479 LINE 76798 -:VPMULDQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x28; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPMULDQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x28; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpmuldq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpmuldq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMULDQ 4-359 PAGE 1479 LINE 76802 define pcodeop vpmuldq_avx512f ; -:VPMULDQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x28; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPMULDQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x28; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpmuldq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpmuldq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PMULHRSW 4-362 PAGE 1482 LINE 76934 define pcodeop vpmulhrsw_avx512vl ; -:VPMULHRSW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x0B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMULHRSW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x0B; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpmulhrsw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmulhrsw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PMULHRSW 4-362 PAGE 1482 LINE 76937 -:VPMULHRSW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x0B; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMULHRSW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x0B; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpmulhrsw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmulhrsw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PMULHRSW 4-362 PAGE 1482 LINE 76940 define pcodeop vpmulhrsw_avx512bw ; -:VPMULHRSW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x0B; ZmmReg1 ... & ZmmReg2_m512 +:VPMULHRSW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x0B; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpmulhrsw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpmulhrsw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PMULHUW 4-366 PAGE 1486 LINE 77147 define pcodeop vpmulhuw_avx512vl ; -:VPMULHUW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xE4; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMULHUW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xE4; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpmulhuw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmulhuw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PMULHUW 4-366 PAGE 1486 LINE 77151 -:VPMULHUW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xE4; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMULHUW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xE4; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpmulhuw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmulhuw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PMULHUW 4-366 PAGE 1486 LINE 77155 define pcodeop vpmulhuw_avx512bw ; -:VPMULHUW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xE4; ZmmReg1 ... & ZmmReg2_m512 +:VPMULHUW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xE4; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpmulhuw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpmulhuw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PMULHW 4-370 PAGE 1490 LINE 77376 define pcodeop vpmulhw_avx512vl ; -:VPMULHW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xE5; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMULHW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xE5; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpmulhw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmulhw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PMULHW 4-370 PAGE 1490 LINE 77379 -:VPMULHW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xE5; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMULHW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xE5; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpmulhw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmulhw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PMULHW 4-370 PAGE 1490 LINE 77382 define pcodeop vpmulhw_avx512bw ; -:VPMULHW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xE5; ZmmReg1 ... & ZmmReg2_m512 +:VPMULHW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xE5; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpmulhw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpmulhw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PMULLD/PMULLQ 4-374 PAGE 1494 LINE 77582 define pcodeop vpmulld_avx512vl ; -:VPMULLD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x40; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPMULLD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x40; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpmulld_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpmulld_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PMULLD/PMULLQ 4-374 PAGE 1494 LINE 77585 -:VPMULLD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x40; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPMULLD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x40; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpmulld_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpmulld_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PMULLD/PMULLQ 4-374 PAGE 1494 LINE 77588 define pcodeop vpmulld_avx512f ; -:VPMULLD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x40; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPMULLD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x40; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpmulld_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpmulld_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PMULLD/PMULLQ 4-374 PAGE 1494 LINE 77591 define pcodeop vpmullq_avx512vl ; -:VPMULLQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x40; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPMULLQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x40; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpmullq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpmullq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMULLD/PMULLQ 4-374 PAGE 1494 LINE 77594 -:VPMULLQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x40; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPMULLQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x40; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpmullq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpmullq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMULLD/PMULLQ 4-374 PAGE 1494 LINE 77597 define pcodeop vpmullq_avx512dq ; -:VPMULLQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x40; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPMULLQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x40; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpmullq_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpmullq_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PMULLW 4-378 PAGE 1498 LINE 77781 define pcodeop vpmullw_avx512vl ; -:VPMULLW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xD5; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPMULLW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xD5; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpmullw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpmullw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PMULLW 4-378 PAGE 1498 LINE 77784 -:VPMULLW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xD5; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPMULLW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xD5; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpmullw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpmullw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PMULLW 4-378 PAGE 1498 LINE 77787 define pcodeop vpmullw_avx512bw ; -:VPMULLW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xD5; ZmmReg1 ... & ZmmReg2_m512 +:VPMULLW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xD5; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpmullw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpmullw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PMULUDQ 4-382 PAGE 1502 LINE 77977 define pcodeop vpmuludq_avx512vl ; -:VPMULUDQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xF4; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPMULUDQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xF4; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpmuludq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpmuludq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PMULUDQ 4-382 PAGE 1502 LINE 77981 -:VPMULUDQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xF4; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPMULUDQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xF4; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpmuludq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpmuludq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PMULUDQ 4-382 PAGE 1502 LINE 77985 define pcodeop vpmuludq_avx512f ; -:VPMULUDQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xF4; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPMULUDQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0xF4; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpmuludq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpmuludq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # POR 4-399 PAGE 1519 LINE 78854 define pcodeop vpord_avx512vl ; -:VPORD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xEB; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPORD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xEB; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpord_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpord_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # POR 4-399 PAGE 1519 LINE 78857 -:VPORD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xEB; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPORD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xEB; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpord_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpord_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # POR 4-399 PAGE 1519 LINE 78860 define pcodeop vpord_avx512f ; -:VPORD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xEB; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPORD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0xEB; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpord_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpord_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # POR 4-399 PAGE 1519 LINE 78863 define pcodeop vporq_avx512vl ; -:VPORQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xEB; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPORQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xEB; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vporq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vporq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # POR 4-399 PAGE 1519 LINE 78866 -:VPORQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xEB; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPORQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xEB; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vporq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vporq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # POR 4-399 PAGE 1519 LINE 78869 define pcodeop vporq_avx512f ; -:VPORQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xEB; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPORQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0xEB; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vporq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vporq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PSADBW 4-408 PAGE 1528 LINE 79250 define pcodeop vpsadbw_avx512vl ; -:VPSADBW XmmReg1, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xF6; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSADBW XmmReg1, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xF6; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { local tmp:16 = vpsadbw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); @@ -3927,124 +4549,155 @@ define pcodeop vpsadbw_avx512vl ; } # PSADBW 4-408 PAGE 1528 LINE 79255 -:VPSADBW YmmReg1, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xF6; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPSADBW YmmReg1, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xF6; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpsadbw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsadbw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + ZmmReg1 = zext(YmmResult); } # PSADBW 4-408 PAGE 1528 LINE 79260 define pcodeop vpsadbw_avx512bw ; -:VPSADBW ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xF6; ZmmReg1 ... & ZmmReg2_m512 +:VPSADBW ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xF6; ZmmReg1 ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpsadbw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpsadbw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + ZmmReg1 = ZmmResult; } # PSHUFB 4-412 PAGE 1532 LINE 79466 define pcodeop vpshufb_avx512vl ; -:VPSHUFB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x00; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSHUFB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x00; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpshufb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpshufb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PSHUFB 4-412 PAGE 1532 LINE 79468 -:VPSHUFB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x00; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPSHUFB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x00; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpshufb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpshufb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PSHUFB 4-412 PAGE 1532 LINE 79470 define pcodeop vpshufb_avx512bw ; -:VPSHUFB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x00; ZmmReg1 ... & ZmmReg2_m512 +:VPSHUFB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x00; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpshufb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpshufb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PSHUFD 4-416 PAGE 1536 LINE 79656 define pcodeop vpshufd_avx512vl ; -:VPSHUFD XmmReg1^KWriteMask, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x70; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst; imm8 +:VPSHUFD XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x70; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpshufd_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vpshufd_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PSHUFD 4-416 PAGE 1536 LINE 79659 -:VPSHUFD YmmReg1^KWriteMask, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x70; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst; imm8 +:VPSHUFD YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x70; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpshufd_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vpshufd_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PSHUFD 4-416 PAGE 1536 LINE 79662 define pcodeop vpshufd_avx512f ; -:VPSHUFD ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x70; ZmmReg1 ... & ZmmReg2_m512_m32bcst; imm8 +:VPSHUFD ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x70; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpshufd_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vpshufd_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PSHUFHW 4-420 PAGE 1540 LINE 79863 define pcodeop vpshufhw_avx512vl ; -:VPSHUFHW XmmReg1^KWriteMask, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_WIG) & KWriteMask; byte=0x70; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128; imm8 +:VPSHUFHW XmmReg1 XmmOpMask16, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_WIG) ; byte=0x70; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpshufhw_avx512vl( XmmReg2_m128, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vpshufhw_avx512vl( XmmReg2_m128, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PSHUFHW 4-420 PAGE 1540 LINE 79866 -:VPSHUFHW YmmReg1^KWriteMask, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_WIG) & KWriteMask; byte=0x70; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256; imm8 +:VPSHUFHW YmmReg1 YmmOpMask16, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_WIG) ; byte=0x70; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpshufhw_avx512vl( YmmReg2_m256, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vpshufhw_avx512vl( YmmReg2_m256, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PSHUFHW 4-420 PAGE 1540 LINE 79869 define pcodeop vpshufhw_avx512bw ; -:VPSHUFHW ZmmReg1^KWriteMask, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_WIG) & KWriteMask; byte=0x70; ZmmReg1 ... & ZmmReg2_m512; imm8 +:VPSHUFHW ZmmReg1 ZmmOpMask16, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_WIG) ; byte=0x70; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpshufhw_avx512bw( ZmmReg2_m512, imm8:1 ); + ZmmResult = vpshufhw_avx512bw( ZmmReg2_m512, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PSHUFLW 4-423 PAGE 1543 LINE 80038 define pcodeop vpshuflw_avx512vl ; -:VPSHUFLW XmmReg1^KWriteMask, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_WIG) & KWriteMask; byte=0x70; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128; imm8 +:VPSHUFLW XmmReg1 XmmOpMask16, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_WIG) ; byte=0x70; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpshuflw_avx512vl( XmmReg2_m128, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vpshuflw_avx512vl( XmmReg2_m128, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PSHUFLW 4-423 PAGE 1543 LINE 80041 -:VPSHUFLW YmmReg1^KWriteMask, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_WIG) & KWriteMask; byte=0x70; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256; imm8 +:VPSHUFLW YmmReg1 YmmOpMask16, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_WIG) ; byte=0x70; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpshuflw_avx512vl( YmmReg2_m256, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vpshuflw_avx512vl( YmmReg2_m256, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PSHUFLW 4-423 PAGE 1543 LINE 80044 define pcodeop vpshuflw_avx512bw ; -:VPSHUFLW ZmmReg1^KWriteMask, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_WIG) & KWriteMask; byte=0x70; ZmmReg1 ... & ZmmReg2_m512; imm8 +:VPSHUFLW ZmmReg1 ZmmOpMask16, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_WIG) ; byte=0x70; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpshuflw_avx512bw( ZmmReg2_m512, imm8:1 ); + ZmmResult = vpshuflw_avx512bw( ZmmReg2_m512, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PSLLDQ 4-431 PAGE 1551 LINE 80491 define pcodeop vpslldq_avx512vl ; -:VPSLLDQ vexVVVV_XmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_XmmReg & vexVVVV_ZmmReg); byte=0x73; reg_opcode=7 ... & XmmReg2_m128; imm8 +:VPSLLDQ vexVVVV_XmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_XmmReg & vexVVVV_ZmmReg); byte=0x73; reg_opcode=7 ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVMI) { local tmp:64 = vpslldq_avx512vl( XmmReg2_m128, imm8:1 ); @@ -4052,7 +4705,7 @@ define pcodeop vpslldq_avx512vl ; } # PSLLDQ 4-431 PAGE 1551 LINE 80493 -:VPSLLDQ vexVVVV_YmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_YmmReg & vexVVVV_ZmmReg); byte=0x73; reg_opcode=7 ... & YmmReg2_m256; imm8 +:VPSLLDQ vexVVVV_YmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_YmmReg & vexVVVV_ZmmReg); byte=0x73; reg_opcode=7 ... & YmmReg2_m256; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVMI) { local tmp:64 = vpslldq_avx512vl( YmmReg2_m256, imm8:1 ); @@ -4061,7 +4714,7 @@ define pcodeop vpslldq_avx512vl ; # PSLLDQ 4-431 PAGE 1551 LINE 80495 define pcodeop vpslldq_avx512bw ; -:VPSLLDQ evexV5_ZmmReg, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x73; reg_opcode=7 ... & ZmmReg2_m512; imm8 +:VPSLLDQ evexV5_ZmmReg, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x73; reg_opcode=7 ... & ZmmReg2_m512; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVMI) { evexV5_ZmmReg = vpslldq_avx512bw( ZmmReg2_m512, imm8:1 ); @@ -4069,295 +4722,380 @@ define pcodeop vpslldq_avx512bw ; # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80667 define pcodeop vpsllw_avx512vl ; -:VPSLLW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xF1; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSLLW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xF1; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:16 = vpsllw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsllw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80670 -:VPSLLW YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xF1; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSLLW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xF1; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:32 = vpsllw_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsllw_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80673 define pcodeop vpsllw_avx512bw ; -:VPSLLW ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xF1; ZmmReg1 ... & XmmReg2_m128 +:VPSLLW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xF1; (ZmmReg1 & ZmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - ZmmReg1 = vpsllw_avx512bw( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmResult = vpsllw_avx512bw( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80676 -:VPSLLW vexVVVV_XmmReg^KWriteMask, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x71; reg_opcode=6 ... & XmmReg2_m128; imm8 +:VPSLLW vexVVVV_XmmReg XmmOpMask16, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask16; byte=0x71; reg_opcode=6 ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVMI) { - local tmp:64 = vpsllw_avx512vl( XmmReg2_m128, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vpsllw_avx512vl( XmmReg2_m128, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask16; + vexVVVV_ZmmReg = zext(XmmResult); } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80678 -:VPSLLW vexVVVV_YmmReg^KWriteMask, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x71; reg_opcode=6 ... & YmmReg2_m256; imm8 +:VPSLLW vexVVVV_YmmReg YmmOpMask16, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask16; byte=0x71; reg_opcode=6 ... & YmmReg2_m256; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVMI) { - local tmp:64 = vpsllw_avx512vl( YmmReg2_m256, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vpsllw_avx512vl( YmmReg2_m256, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask16; + vexVVVV_ZmmReg = zext(YmmResult); } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80680 -:VPSLLW evexV5_ZmmReg^KWriteMask, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg & KWriteMask; byte=0x71; reg_opcode=6 ... & ZmmReg2_m512; imm8 +:VPSLLW evexV5_ZmmReg ZmmOpMask16, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg & ZmmOpMask16; byte=0x71; reg_opcode=6 ... & ZmmReg2_m512; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVMI) { - evexV5_ZmmReg = vpsllw_avx512bw( ZmmReg2_m512, imm8:1 ); + ZmmResult = vpsllw_avx512bw( ZmmReg2_m512, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask16; + evexV5_ZmmReg = ZmmResult; } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80682 define pcodeop vpslld_avx512vl ; -:VPSLLD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xF2; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSLLD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xF2; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:16 = vpslld_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpslld_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80685 -:VPSLLD YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xF2; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSLLD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xF2; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:32 = vpslld_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpslld_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80688 define pcodeop vpslld_avx512f ; -:VPSLLD ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xF2; ZmmReg1 ... & XmmReg2_m128 +:VPSLLD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0xF2; (ZmmReg1 & ZmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - ZmmReg1 = vpslld_avx512f( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmResult = vpslld_avx512f( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80691 -:VPSLLD vexVVVV_XmmReg^KWriteMask, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=6 ... & XmmReg2_m128_m32bcst; imm8 +:VPSLLD vexVVVV_XmmReg XmmOpMask32, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask32; byte=0x72; reg_opcode=6 ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:64 = vpslld_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vpslld_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask32; + vexVVVV_ZmmReg = zext(XmmResult); } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80694 -:VPSLLD vexVVVV_YmmReg^KWriteMask, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=6 ... & YmmReg2_m256_m32bcst; imm8 +:VPSLLD vexVVVV_YmmReg YmmOpMask32, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask32; byte=0x72; reg_opcode=6 ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:64 = vpslld_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vpslld_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask32; + vexVVVV_ZmmReg = zext(YmmResult); } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80697 -:VPSLLD evexV5_ZmmReg^KWriteMask, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg & KWriteMask; byte=0x72; reg_opcode=6 ... & ZmmReg2_m512_m32bcst; imm8 +:VPSLLD evexV5_ZmmReg ZmmOpMask32, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg & ZmmOpMask32; byte=0x72; reg_opcode=6 ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - evexV5_ZmmReg = vpslld_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vpslld_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask32; + evexV5_ZmmReg = ZmmResult; } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80700 define pcodeop vpsllq_avx512vl ; -:VPSLLQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xF3; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSLLQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xF3; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:16 = vpsllq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsllq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80703 -:VPSLLQ YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xF3; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSLLQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xF3; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:32 = vpsllq_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsllq_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PSLLW/PSLLD/PSLLQ 4-434 PAGE 1554 LINE 80706 define pcodeop vpsllq_avx512f ; -:VPSLLQ ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xF3; ZmmReg1 ... & XmmReg2_m128 +:VPSLLQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0xF3; (ZmmReg1 & ZmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - ZmmReg1 = vpsllq_avx512f( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmResult = vpsllq_avx512f( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PSLLW/PSLLD/PSLLQ 4-435 PAGE 1555 LINE 80721 -:VPSLLQ vexVVVV_XmmReg^KWriteMask, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x73; reg_opcode=6 ... & XmmReg2_m128_m64bcst; imm8 +:VPSLLQ vexVVVV_XmmReg XmmOpMask64, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask64; byte=0x73; reg_opcode=6 ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:64 = vpsllq_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vpsllq_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask64; + vexVVVV_ZmmReg = zext(XmmResult); } # PSLLW/PSLLD/PSLLQ 4-435 PAGE 1555 LINE 80724 -:VPSLLQ vexVVVV_YmmReg^KWriteMask, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x73; reg_opcode=6 ... & YmmReg2_m256_m64bcst; imm8 +:VPSLLQ vexVVVV_YmmReg YmmOpMask64, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask64; byte=0x73; reg_opcode=6 ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:64 = vpsllq_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vpsllq_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask64; + vexVVVV_ZmmReg = zext(YmmResult); } # PSLLW/PSLLD/PSLLQ 4-435 PAGE 1555 LINE 80727 -:VPSLLQ evexV5_ZmmReg^KWriteMask, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg & KWriteMask; byte=0x73; reg_opcode=6 ... & ZmmReg2_m512_m64bcst; imm8 +:VPSLLQ evexV5_ZmmReg ZmmOpMask64, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg & ZmmOpMask64; byte=0x73; reg_opcode=6 ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - evexV5_ZmmReg = vpsllq_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vpsllq_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask64; + evexV5_ZmmReg = ZmmResult; } # PSRAW/PSRAD/PSRAQ 4-445 PAGE 1565 LINE 81329 define pcodeop vpsraw_avx512vl ; -:VPSRAW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xE1; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRAW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xE1; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:16 = vpsraw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsraw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PSRAW/PSRAD/PSRAQ 4-445 PAGE 1565 LINE 81332 -:VPSRAW YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xE1; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRAW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xE1; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:32 = vpsraw_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsraw_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PSRAW/PSRAD/PSRAQ 4-445 PAGE 1565 LINE 81335 define pcodeop vpsraw_avx512bw ; -:VPSRAW ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xE1; ZmmReg1 ... & XmmReg2_m128 +:VPSRAW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xE1; (ZmmReg1 & ZmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - ZmmReg1 = vpsraw_avx512bw( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmResult = vpsraw_avx512bw( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81350 -:VPSRAW vexVVVV_XmmReg^KWriteMask, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x71; reg_opcode=4 ... & XmmReg2_m128; imm8 +:VPSRAW vexVVVV_XmmReg XmmOpMask16, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask16; byte=0x71; reg_opcode=4 ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVMI) { - local tmp:64 = vpsraw_avx512vl( XmmReg2_m128, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vpsraw_avx512vl( XmmReg2_m128, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask16; + vexVVVV_ZmmReg = zext(XmmResult); } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81352 -:VPSRAW vexVVVV_YmmReg^KWriteMask, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x71; reg_opcode=4 ... & YmmReg2_m256; imm8 +:VPSRAW vexVVVV_YmmReg YmmOpMask16, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask16; byte=0x71; reg_opcode=4 ... & YmmReg2_m256; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVMI) { - local tmp:64 = vpsraw_avx512vl( YmmReg2_m256, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vpsraw_avx512vl( YmmReg2_m256, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask16; + vexVVVV_ZmmReg = zext(YmmResult); } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81354 -:VPSRAW evexV5_ZmmReg^KWriteMask, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg & KWriteMask; byte=0x71; reg_opcode=4 ... & ZmmReg2_m512; imm8 +:VPSRAW evexV5_ZmmReg ZmmOpMask16, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg & ZmmOpMask16; byte=0x71; reg_opcode=4 ... & ZmmReg2_m512; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVMI) { - evexV5_ZmmReg = vpsraw_avx512bw( ZmmReg2_m512, imm8:1 ); + ZmmResult = vpsraw_avx512bw( ZmmReg2_m512, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask16; + evexV5_ZmmReg = ZmmResult; } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81356 define pcodeop vpsrad_avx512vl ; -:VPSRAD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xE2; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRAD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xE2; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:16 = vpsrad_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsrad_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81359 -:VPSRAD YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xE2; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRAD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xE2; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:32 = vpsrad_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsrad_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81362 define pcodeop vpsrad_avx512f ; -:VPSRAD ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xE2; ZmmReg1 ... & XmmReg2_m128 +:VPSRAD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0xE2; (ZmmReg1 & ZmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - ZmmReg1 = vpsrad_avx512f( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmResult = vpsrad_avx512f( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81365 -:VPSRAD vexVVVV_XmmReg^KWriteMask, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=4 ... & XmmReg2_m128_m32bcst; imm8 +:VPSRAD vexVVVV_XmmReg XmmOpMask32, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask32; byte=0x72; reg_opcode=4 ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:64 = vpsrad_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vpsrad_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask32; + vexVVVV_ZmmReg = zext(XmmResult); } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81368 -:VPSRAD vexVVVV_YmmReg^KWriteMask, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=4 ... & YmmReg2_m256_m32bcst; imm8 +:VPSRAD vexVVVV_YmmReg YmmOpMask32, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask32; byte=0x72; reg_opcode=4 ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:64 = vpsrad_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vpsrad_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask32; + vexVVVV_ZmmReg = zext(YmmResult); + } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81371 -:VPSRAD evexV5_ZmmReg^KWriteMask, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg & KWriteMask; byte=0x72; reg_opcode=4 ... & ZmmReg2_m512_m32bcst; imm8 +:VPSRAD evexV5_ZmmReg ZmmOpMask32, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg & ZmmOpMask32; byte=0x72; reg_opcode=4 ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - evexV5_ZmmReg = vpsrad_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vpsrad_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask32; + evexV5_ZmmReg = ZmmResult; } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81374 define pcodeop vpsraq_avx512vl ; -:VPSRAQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xE2; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRAQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xE2; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:16 = vpsraq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsraq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81377 -:VPSRAQ YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xE2; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRAQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xE2; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:32 = vpsraq_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsraq_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81380 define pcodeop vpsraq_avx512f ; -:VPSRAQ ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xE2; ZmmReg1 ... & XmmReg2_m128 +:VPSRAQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0xE2; (ZmmReg1 & ZmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - ZmmReg1 = vpsraq_avx512f( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmResult = vpsraq_avx512f( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81383 -:VPSRAQ vexVVVV_XmmReg^KWriteMask, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=4 ... & XmmReg2_m128_m64bcst; imm8 +:VPSRAQ vexVVVV_XmmReg XmmOpMask64, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask64; byte=0x72; reg_opcode=4 ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:64 = vpsraq_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vpsraq_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask64; + vexVVVV_ZmmReg = zext(XmmResult); } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81386 -:VPSRAQ vexVVVV_YmmReg^KWriteMask, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=4 ... & YmmReg2_m256_m64bcst; imm8 +:VPSRAQ vexVVVV_YmmReg YmmOpMask64, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask64; byte=0x72; reg_opcode=4 ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:64 = vpsraq_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vpsraq_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask64; + vexVVVV_ZmmReg = zext(YmmResult); } # PSRAW/PSRAD/PSRAQ 4-446 PAGE 1566 LINE 81389 -:VPSRAQ evexV5_ZmmReg^KWriteMask, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg & KWriteMask; byte=0x72; reg_opcode=4 ... & ZmmReg2_m512_m64bcst; imm8 +:VPSRAQ evexV5_ZmmReg ZmmOpMask64, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg & ZmmOpMask64; byte=0x72; reg_opcode=4 ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - evexV5_ZmmReg = vpsraq_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vpsraq_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask64; + evexV5_ZmmReg = ZmmResult; } # PSRLDQ 4-455 PAGE 1575 LINE 81879 define pcodeop vpsrldq_avx512vl ; -:VPSRLDQ vexVVVV_XmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_XmmReg & vexVVVV_ZmmReg); byte=0x73; reg_opcode=3 ... & XmmReg2_m128; imm8 +:VPSRLDQ vexVVVV_XmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_XmmReg & vexVVVV_ZmmReg); byte=0x73; reg_opcode=3 ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { local tmp:64 = vpsrldq_avx512vl( XmmReg2_m128, imm8:1 ); @@ -4365,7 +5103,7 @@ define pcodeop vpsrldq_avx512vl ; } # PSRLDQ 4-455 PAGE 1575 LINE 81881 -:VPSRLDQ vexVVVV_YmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_YmmReg & vexVVVV_ZmmReg); byte=0x73; reg_opcode=3 ... & YmmReg2_m256; imm8 +:VPSRLDQ vexVVVV_YmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_YmmReg & vexVVVV_ZmmReg); byte=0x73; reg_opcode=3 ... & YmmReg2_m256; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { local tmp:64 = vpsrldq_avx512vl( YmmReg2_m256, imm8:1 ); @@ -4374,7 +5112,7 @@ define pcodeop vpsrldq_avx512vl ; # PSRLDQ 4-455 PAGE 1575 LINE 81883 define pcodeop vpsrldq_avx512bw ; -:VPSRLDQ evexV5_ZmmReg, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x73; reg_opcode=3 ... & ZmmReg2_m512; imm8 +:VPSRLDQ evexV5_ZmmReg, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x73; reg_opcode=3 ... & ZmmReg2_m512; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { evexV5_ZmmReg = vpsrldq_avx512bw( ZmmReg2_m512, imm8:1 ); @@ -4382,787 +5120,1003 @@ define pcodeop vpsrldq_avx512bw ; # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82059 define pcodeop vpsrlw_avx512vl ; -:VPSRLW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xD1; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRLW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xD1; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:16 = vpsrlw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsrlw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82062 -:VPSRLW YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xD1; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRLW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xD1; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:32 = vpsrlw_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsrlw_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82065 define pcodeop vpsrlw_avx512bw ; -:VPSRLW ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xD1; ZmmReg1 ... & XmmReg2_m128 +:VPSRLW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xD1; (ZmmReg1 & ZmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - ZmmReg1 = vpsrlw_avx512bw( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmResult = vpsrlw_avx512bw( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82068 -:VPSRLW vexVVVV_XmmReg^KWriteMask, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x71; reg_opcode=2 ... & XmmReg2_m128; imm8 +:VPSRLW vexVVVV_XmmReg XmmOpMask16, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask16; byte=0x71; reg_opcode=2 ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:64 = vpsrlw_avx512vl( XmmReg2_m128, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vpsrlw_avx512vl( XmmReg2_m128, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask16; + vexVVVV_ZmmReg = zext(XmmResult); } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82070 -:VPSRLW vexVVVV_YmmReg^KWriteMask, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x71; reg_opcode=2 ... & YmmReg2_m256; imm8 +:VPSRLW vexVVVV_YmmReg YmmOpMask16, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask16; byte=0x71; reg_opcode=2 ... & YmmReg2_m256; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:64 = vpsrlw_avx512vl( YmmReg2_m256, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vpsrlw_avx512vl( YmmReg2_m256, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask16; + vexVVVV_ZmmReg = zext(YmmResult); } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82072 -:VPSRLW evexV5_ZmmReg^KWriteMask, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg & KWriteMask; byte=0x71; reg_opcode=2 ... & ZmmReg2_m512; imm8 +:VPSRLW evexV5_ZmmReg ZmmOpMask16, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg & ZmmOpMask16; byte=0x71; reg_opcode=2 ... & ZmmReg2_m512; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - evexV5_ZmmReg = vpsrlw_avx512bw( ZmmReg2_m512, imm8:1 ); + ZmmResult = vpsrlw_avx512bw( ZmmReg2_m512, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask16; + evexV5_ZmmReg = ZmmResult; } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82074 define pcodeop vpsrld_avx512vl ; -:VPSRLD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xD2; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRLD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xD2; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:16 = vpsrld_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsrld_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82077 -:VPSRLD YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xD2; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRLD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xD2; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:32 = vpsrld_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsrld_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82080 define pcodeop vpsrld_avx512f ; -:VPSRLD ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xD2; ZmmReg1 ... & XmmReg2_m128 +:VPSRLD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0xD2; (ZmmReg1 & ZmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - ZmmReg1 = vpsrld_avx512f( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmResult = vpsrld_avx512f( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82084 -:VPSRLD vexVVVV_XmmReg^KWriteMask, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=2 ... & XmmReg2_m128_m32bcst; imm8 +:VPSRLD vexVVVV_XmmReg XmmOpMask32, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask32; byte=0x72; reg_opcode=2 ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:64 = vpsrld_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vpsrld_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask32; + vexVVVV_ZmmReg = zext(XmmResult); } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82088 -:VPSRLD vexVVVV_YmmReg^KWriteMask, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=2 ... & YmmReg2_m256_m32bcst; imm8 +:VPSRLD vexVVVV_YmmReg YmmOpMask32, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask32; byte=0x72; reg_opcode=2 ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:64 = vpsrld_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vpsrld_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask32; + vexVVVV_ZmmReg = zext(YmmResult); } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82091 -:VPSRLD evexV5_ZmmReg^KWriteMask, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg & KWriteMask; byte=0x72; reg_opcode=2 ... & ZmmReg2_m512_m32bcst; imm8 +:VPSRLD evexV5_ZmmReg ZmmOpMask32, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg & ZmmOpMask32; byte=0x72; reg_opcode=2 ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - evexV5_ZmmReg = vpsrld_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vpsrld_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask32; + evexV5_ZmmReg = ZmmResult; } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82094 define pcodeop vpsrlq_avx512vl ; -:VPSRLQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xD3; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRLQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xD3; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:16 = vpsrlq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsrlq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82097 -:VPSRLQ YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xD3; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRLQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xD3; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - local tmp:32 = vpsrlq_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsrlq_avx512vl( vexVVVV_YmmReg, XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PSRLW/PSRLD/PSRLQ 4-458 PAGE 1578 LINE 82100 define pcodeop vpsrlq_avx512f ; -:VPSRLQ ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xD3; ZmmReg1 ... & XmmReg2_m128 +:VPSRLQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0xD3; (ZmmReg1 & ZmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 12; ] # (TupleType M128) { - ZmmReg1 = vpsrlq_avx512f( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmResult = vpsrlq_avx512f( evexV5_ZmmReg, XmmReg2_m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PSRLW/PSRLD/PSRLQ 4-459 PAGE 1579 LINE 82115 -:VPSRLQ vexVVVV_XmmReg^KWriteMask, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x73; reg_opcode=2 ... & XmmReg2_m128_m64bcst; imm8 +:VPSRLQ vexVVVV_XmmReg XmmOpMask64, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask64; byte=0x73; reg_opcode=2 ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:64 = vpsrlq_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vpsrlq_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask64; + vexVVVV_ZmmReg = zext(XmmResult); } # PSRLW/PSRLD/PSRLQ 4-459 PAGE 1579 LINE 82119 -:VPSRLQ vexVVVV_YmmReg^KWriteMask, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x73; reg_opcode=2 ... & YmmReg2_m256_m64bcst; imm8 +:VPSRLQ vexVVVV_YmmReg YmmOpMask64, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask64; byte=0x73; reg_opcode=2 ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:64 = vpsrlq_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vpsrlq_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask64; + vexVVVV_ZmmReg = zext(YmmResult); } # PSRLW/PSRLD/PSRLQ 4-459 PAGE 1579 LINE 82122 -:VPSRLQ evexV5_ZmmReg^KWriteMask, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg & KWriteMask; byte=0x73; reg_opcode=2 ... & ZmmReg2_m512_m64bcst; imm8 +:VPSRLQ evexV5_ZmmReg ZmmOpMask64, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg & ZmmOpMask64; byte=0x73; reg_opcode=2 ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - evexV5_ZmmReg = vpsrlq_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vpsrlq_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask64; + evexV5_ZmmReg = ZmmResult; } # PSUBB/PSUBW/PSUBD 4-469 PAGE 1589 LINE 82702 define pcodeop vpsubb_avx512vl ; -:VPSUBB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xF8; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSUBB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xF8; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpsubb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsubb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PSUBB/PSUBW/PSUBD 4-469 PAGE 1589 LINE 82705 -:VPSUBB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xF8; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPSUBB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xF8; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpsubb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsubb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PSUBB/PSUBW/PSUBD 4-469 PAGE 1589 LINE 82708 define pcodeop vpsubb_avx512bw ; -:VPSUBB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xF8; ZmmReg1 ... & ZmmReg2_m512 +:VPSUBB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xF8; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpsubb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpsubb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PSUBB/PSUBW/PSUBD 4-469 PAGE 1589 LINE 82711 define pcodeop vpsubw_avx512vl ; -:VPSUBW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xF9; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSUBW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xF9; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpsubw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsubw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PSUBB/PSUBW/PSUBD 4-469 PAGE 1589 LINE 82714 -:VPSUBW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xF9; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPSUBW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xF9; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpsubw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsubw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PSUBB/PSUBW/PSUBD 4-469 PAGE 1589 LINE 82717 define pcodeop vpsubw_avx512bw ; -:VPSUBW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xF9; ZmmReg1 ... & ZmmReg2_m512 +:VPSUBW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xF9; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpsubw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpsubw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PSUBB/PSUBW/PSUBD 4-470 PAGE 1590 LINE 82733 define pcodeop vpsubd_avx512vl ; -:VPSUBD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xFA; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPSUBD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xFA; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpsubd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpsubd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PSUBB/PSUBW/PSUBD 4-470 PAGE 1590 LINE 82736 -:VPSUBD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xFA; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPSUBD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xFA; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpsubd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpsubd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PSUBB/PSUBW/PSUBD 4-470 PAGE 1590 LINE 82743 define pcodeop vpsubd_avx512f ; -:VPSUBD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xFA; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPSUBD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0xFA; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpsubd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpsubd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PSUBQ 4-476 PAGE 1596 LINE 83111 define pcodeop vpsubq_avx512vl ; -:VPSUBQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xFB; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPSUBQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xFB; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpsubq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpsubq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PSUBQ 4-476 PAGE 1596 LINE 83114 -:VPSUBQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xFB; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPSUBQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xFB; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpsubq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpsubq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PSUBQ 4-476 PAGE 1596 LINE 83117 define pcodeop vpsubq_avx512f ; -:VPSUBQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xFB; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPSUBQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0xFB; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpsubq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpsubq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PSUBSB/PSUBSW 4-479 PAGE 1599 LINE 83270 define pcodeop vpsubsb_avx512vl ; -:VPSUBSB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xE8; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSUBSB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xE8; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpsubsb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsubsb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PSUBSB/PSUBSW 4-479 PAGE 1599 LINE 83274 -:VPSUBSB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xE8; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPSUBSB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xE8; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpsubsb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsubsb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PSUBSB/PSUBSW 4-479 PAGE 1599 LINE 83278 define pcodeop vpsubsb_avx512bw ; -:VPSUBSB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xE8; ZmmReg1 ... & ZmmReg2_m512 +:VPSUBSB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xE8; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpsubsb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpsubsb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PSUBSB/PSUBSW 4-479 PAGE 1599 LINE 83282 define pcodeop vpsubsw_avx512vl ; -:VPSUBSW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xE9; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSUBSW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xE9; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpsubsw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsubsw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PSUBSB/PSUBSW 4-479 PAGE 1599 LINE 83286 -:VPSUBSW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xE9; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPSUBSW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xE9; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpsubsw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsubsw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PSUBSB/PSUBSW 4-480 PAGE 1600 LINE 83302 -# WARNING: did not recognize VEX field 512 for "PSUBSW zmm1 {k1}{z}, zmm2, zmm3/m512" define pcodeop psubsw_avx512bw ; -:PSUBSW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(VEX_NDS) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xE9; ZmmReg1 ... & ZmmReg2_m512 +:PSUBSW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(VEX_NDS) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xE9; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 { ZmmReg1 = psubsw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PSUBUSB/PSUBUSW 4-483 PAGE 1603 LINE 83510 define pcodeop vpsubusb_avx512vl ; -:VPSUBUSB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xD8; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSUBUSB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xD8; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpsubusb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsubusb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PSUBUSB/PSUBUSW 4-483 PAGE 1603 LINE 83514 -:VPSUBUSB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xD8; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPSUBUSB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xD8; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpsubusb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsubusb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PSUBUSB/PSUBUSW 4-483 PAGE 1603 LINE 83518 define pcodeop vpsubusb_avx512bw ; -:VPSUBUSB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xD8; ZmmReg1 ... & ZmmReg2_m512 +:VPSUBUSB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xD8; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpsubusb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpsubusb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PSUBUSB/PSUBUSW 4-483 PAGE 1603 LINE 83522 define pcodeop vpsubusw_avx512vl ; -:VPSUBUSW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0xD9; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSUBUSW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0xD9; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpsubusw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsubusw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PSUBUSB/PSUBUSW 4-483 PAGE 1603 LINE 83526 -:VPSUBUSW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0xD9; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPSUBUSW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0xD9; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpsubusw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsubusw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PSUBUSB/PSUBUSW 4-484 PAGE 1604 LINE 83543 define pcodeop vpsubusw_avx512bw ; -:VPSUBUSW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0xD9; ZmmReg1 ... & ZmmReg2_m512 +:VPSUBUSW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0xD9; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpsubusw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpsubusw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PUNPCKHBW/PUNPCKHWD/PUNPCKHDQ/PUNPCKHQDQ 4-491 PAGE 1611 LINE 83948 define pcodeop vpunpckhbw_avx512vl ; -:VPUNPCKHBW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x68; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPUNPCKHBW XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x68; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpunpckhbw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpunpckhbw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PUNPCKHBW/PUNPCKHWD/PUNPCKHDQ/PUNPCKHQDQ 4-491 PAGE 1611 LINE 83952 define pcodeop vpunpckhwd_avx512vl ; -:VPUNPCKHWD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x69; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPUNPCKHWD XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x69; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpunpckhwd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpunpckhwd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PUNPCKHBW/PUNPCKHWD/PUNPCKHDQ/PUNPCKHQDQ 4-491 PAGE 1611 LINE 83955 define pcodeop vpunpckhdq_avx512vl ; -:VPUNPCKHDQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x6A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPUNPCKHDQ XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x6A; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpunpckhdq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpunpckhdq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PUNPCKHBW/PUNPCKHWD/PUNPCKHDQ/PUNPCKHQDQ 4-491 PAGE 1611 LINE 83958 define pcodeop vpunpckhqdq_avx512vl ; -:VPUNPCKHQDQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x6D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPUNPCKHQDQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x6D; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpunpckhqdq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpunpckhqdq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PUNPCKHBW/PUNPCKHWD/PUNPCKHDQ/PUNPCKHQDQ 4-492 PAGE 1612 LINE 83974 -:VPUNPCKHBW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x68; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPUNPCKHBW YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x68; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpunpckhbw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpunpckhbw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PUNPCKHBW/PUNPCKHWD/PUNPCKHDQ/PUNPCKHQDQ 4-492 PAGE 1612 LINE 83977 -:VPUNPCKHWD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x69; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPUNPCKHWD YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x69; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpunpckhwd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpunpckhwd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PUNPCKHBW/PUNPCKHWD/PUNPCKHDQ/PUNPCKHQDQ 4-492 PAGE 1612 LINE 83980 -:VPUNPCKHDQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x6A; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPUNPCKHDQ YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x6A; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpunpckhdq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpunpckhdq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PUNPCKHBW/PUNPCKHWD/PUNPCKHDQ/PUNPCKHQDQ 4-492 PAGE 1612 LINE 83984 -:VPUNPCKHQDQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x6D; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPUNPCKHQDQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x6D; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpunpckhqdq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpunpckhqdq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PUNPCKHBW/PUNPCKHWD/PUNPCKHDQ/PUNPCKHQDQ 4-492 PAGE 1612 LINE 83988 define pcodeop vpunpckhbw_avx512bw ; -:VPUNPCKHBW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x68; ZmmReg1 ... & ZmmReg2_m512 +:VPUNPCKHBW ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x68; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpunpckhbw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpunpckhbw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PUNPCKHBW/PUNPCKHWD/PUNPCKHDQ/PUNPCKHQDQ 4-492 PAGE 1612 LINE 83991 define pcodeop vpunpckhwd_avx512bw ; -:VPUNPCKHWD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x69; ZmmReg1 ... & ZmmReg2_m512 +:VPUNPCKHWD ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x69; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpunpckhwd_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpunpckhwd_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PUNPCKHBW/PUNPCKHWD/PUNPCKHDQ/PUNPCKHQDQ 4-492 PAGE 1612 LINE 83994 define pcodeop vpunpckhdq_avx512f ; -:VPUNPCKHDQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x6A; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPUNPCKHDQ ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x6A; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpunpckhdq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpunpckhdq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PUNPCKHBW/PUNPCKHWD/PUNPCKHDQ/PUNPCKHQDQ 4-492 PAGE 1612 LINE 83997 define pcodeop vpunpckhqdq_avx512f ; -:VPUNPCKHQDQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x6D; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPUNPCKHQDQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x6D; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpunpckhqdq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpunpckhqdq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PUNPCKLBW/PUNPCKLWD/PUNPCKLDQ/PUNPCKLQDQ 4-501 PAGE 1621 LINE 84553 define pcodeop vpunpcklbw_avx512vl ; -:VPUNPCKLBW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x60; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPUNPCKLBW XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x60; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpunpcklbw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpunpcklbw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # PUNPCKLBW/PUNPCKLWD/PUNPCKLDQ/PUNPCKLQDQ 4-501 PAGE 1621 LINE 84556 define pcodeop vpunpcklwd_avx512vl ; -:VPUNPCKLWD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_XmmReg; byte=0x61; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPUNPCKLWD XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_XmmReg; byte=0x61; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpunpcklwd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpunpcklwd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # PUNPCKLBW/PUNPCKLWD/PUNPCKLDQ/PUNPCKLQDQ 4-501 PAGE 1621 LINE 84559 define pcodeop vpunpckldq_avx512vl ; -:VPUNPCKLDQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x62; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPUNPCKLDQ XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x62; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpunpckldq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpunpckldq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PUNPCKLBW/PUNPCKLWD/PUNPCKLDQ/PUNPCKLQDQ 4-501 PAGE 1621 LINE 84562 define pcodeop vpunpcklqdq_avx512vl ; -:VPUNPCKLQDQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x6C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPUNPCKLQDQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x6C; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpunpcklqdq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpunpcklqdq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PUNPCKLBW/PUNPCKLWD/PUNPCKLDQ/PUNPCKLQDQ 4-502 PAGE 1622 LINE 84578 -:VPUNPCKLBW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x60; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPUNPCKLBW YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x60; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpunpcklbw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpunpcklbw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # PUNPCKLBW/PUNPCKLWD/PUNPCKLDQ/PUNPCKLQDQ 4-502 PAGE 1622 LINE 84581 -:VPUNPCKLWD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & vexVVVV_YmmReg; byte=0x61; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPUNPCKLWD YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & vexVVVV_YmmReg; byte=0x61; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpunpcklwd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpunpcklwd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # PUNPCKLBW/PUNPCKLWD/PUNPCKLDQ/PUNPCKLQDQ 4-502 PAGE 1622 LINE 84584 -:VPUNPCKLDQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x62; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPUNPCKLDQ YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x62; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpunpckldq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpunpckldq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PUNPCKLBW/PUNPCKLWD/PUNPCKLDQ/PUNPCKLQDQ 4-502 PAGE 1622 LINE 84587 -:VPUNPCKLQDQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x6C; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPUNPCKLQDQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x6C; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpunpcklqdq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpunpcklqdq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PUNPCKLBW/PUNPCKLWD/PUNPCKLDQ/PUNPCKLQDQ 4-502 PAGE 1622 LINE 84590 define pcodeop vpunpcklbw_avx512bw ; -:VPUNPCKLBW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x60; ZmmReg1 ... & ZmmReg2_m512 +:VPUNPCKLBW ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x60; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpunpcklbw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpunpcklbw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # PUNPCKLBW/PUNPCKLWD/PUNPCKLDQ/PUNPCKLQDQ 4-502 PAGE 1622 LINE 84593 define pcodeop vpunpcklwd_avx512bw ; -:VPUNPCKLWD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & KWriteMask & evexV5_ZmmReg; byte=0x61; ZmmReg1 ... & ZmmReg2_m512 +:VPUNPCKLWD ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_WIG) & evexV5_ZmmReg; byte=0x61; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpunpcklwd_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpunpcklwd_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # PUNPCKLBW/PUNPCKLWD/PUNPCKLDQ/PUNPCKLQDQ 4-502 PAGE 1622 LINE 84596 define pcodeop vpunpckldq_avx512f ; -:VPUNPCKLDQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x62; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPUNPCKLDQ ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x62; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpunpckldq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpunpckldq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PUNPCKLBW/PUNPCKLWD/PUNPCKLDQ/PUNPCKLQDQ 4-502 PAGE 1622 LINE 84599 define pcodeop vpunpcklqdq_avx512f ; -:VPUNPCKLQDQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x6C; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPUNPCKLQDQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x6C; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpunpcklqdq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpunpcklqdq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PXOR 4-518 PAGE 1638 LINE 85503 define pcodeop vpxord_avx512vl ; -:VPXORD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xEF; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPXORD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xEF; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpxord_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpxord_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PXOR 4-518 PAGE 1638 LINE 85505 -:VPXORD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xEF; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPXORD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xEF; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpxord_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpxord_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PXOR 4-518 PAGE 1638 LINE 85507 define pcodeop vpxord_avx512f ; -:VPXORD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xEF; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPXORD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0xEF; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpxord_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpxord_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PXOR 4-518 PAGE 1638 LINE 85514 define pcodeop vpxorq_avx512vl ; -:VPXORQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xEF; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPXORQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xEF; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpxorq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpxorq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PXOR 4-518 PAGE 1638 LINE 85521 -:VPXORQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xEF; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPXORQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xEF; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpxorq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpxorq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PXOR 4-518 PAGE 1638 LINE 85523 define pcodeop vpxorq_avx512f ; -:VPXORQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xEF; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPXORQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0xEF; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpxorq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpxorq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # SHUFPD 4-617 PAGE 1737 LINE 90231 define pcodeop vshufpd_avx512vl ; -:VSHUFPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xC6; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst; imm8 +:VSHUFPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xC6; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vshufpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vshufpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # SHUFPD 4-617 PAGE 1737 LINE 90235 -:VSHUFPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xC6; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst; imm8 +:VSHUFPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xC6; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vshufpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vshufpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # SHUFPD 4-617 PAGE 1737 LINE 90239 define pcodeop vshufpd_avx512f ; -:VSHUFPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xC6; ZmmReg1 ... & ZmmReg2_m512_m64bcst; imm8 +:VSHUFPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0xC6; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vshufpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vshufpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # SHUFPS 4-622 PAGE 1742 LINE 90489 define pcodeop vshufps_avx512vl ; -:VSHUFPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xC6; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst; imm8 +:VSHUFPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xC6; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vshufps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vshufps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # SHUFPS 4-622 PAGE 1742 LINE 90493 -:VSHUFPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xC6; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst; imm8 +:VSHUFPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xC6; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vshufps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vshufps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # SHUFPS 4-622 PAGE 1742 LINE 90497 define pcodeop vshufps_avx512f ; -:VSHUFPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xC6; ZmmReg1 ... & ZmmReg2_m512_m32bcst; imm8 +:VSHUFPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0xC6; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vshufps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vshufps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # SQRTPD 4-632 PAGE 1752 LINE 91007 define pcodeop vsqrtpd_avx512vl ; -:VSQRTPD XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x51; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VSQRTPD XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x51; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vsqrtpd_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vsqrtpd_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # SQRTPD 4-632 PAGE 1752 LINE 91010 -:VSQRTPD YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x51; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VSQRTPD YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x51; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vsqrtpd_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vsqrtpd_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # SQRTPD 4-632 PAGE 1752 LINE 91013 define pcodeop vsqrtpd_avx512f ; -:VSQRTPD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x51; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VSQRTPD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x51; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vsqrtpd_avx512f( ZmmReg2_m512_m64bcst ); + ZmmResult = vsqrtpd_avx512f( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # SQRTPS 4-635 PAGE 1755 LINE 91139 define pcodeop vsqrtps_avx512vl ; -:VSQRTPS XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x51; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VSQRTPS XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x51; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vsqrtps_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vsqrtps_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # SQRTPS 4-635 PAGE 1755 LINE 91142 -:VSQRTPS YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x51; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VSQRTPS YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x51; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vsqrtps_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vsqrtps_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # SQRTPS 4-635 PAGE 1755 LINE 91145 define pcodeop vsqrtps_avx512f ; -:VSQRTPS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x51; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VSQRTPS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x51; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vsqrtps_avx512f( ZmmReg2_m512_m32bcst ); + ZmmResult = vsqrtps_avx512f( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # SQRTSD 4-638 PAGE 1758 LINE 91276 define pcodeop vsqrtsd_avx512f ; -:VSQRTSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x51; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VSQRTSD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x51; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vsqrtsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vsqrtsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # SQRTSS 4-640 PAGE 1760 LINE 91371 define pcodeop vsqrtss_avx512f ; -:VSQRTSS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x51; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VSQRTSS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x51; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vsqrtss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vsqrtss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # SUBPD 4-656 PAGE 1776 LINE 92120 define pcodeop vsubpd_avx512vl ; -:VSUBPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x5C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VSUBPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x5C; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vsubpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vsubpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # SUBPD 4-656 PAGE 1776 LINE 92123 -:VSUBPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x5C; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VSUBPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x5C; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vsubpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vsubpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # SUBPD 4-656 PAGE 1776 LINE 92126 define pcodeop vsubpd_avx512f ; -:VSUBPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x5C; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VSUBPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x5C; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vsubpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vsubpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # SUBPS 4-659 PAGE 1779 LINE 92269 define pcodeop vsubps_avx512vl ; -:VSUBPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x5C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VSUBPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x5C; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vsubps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vsubps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # SUBPS 4-659 PAGE 1779 LINE 92272 -:VSUBPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x5C; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VSUBPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x5C; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vsubps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vsubps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # SUBPS 4-659 PAGE 1779 LINE 92275 define pcodeop vsubps_avx512f ; -:VSUBPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x5C; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VSUBPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x5C; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vsubps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vsubps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # SUBSD 4-662 PAGE 1782 LINE 92421 define pcodeop vsubsd_avx512f ; -:VSUBSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x5C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VSUBSD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x5C; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vsubsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vsubsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # SUBSS 4-664 PAGE 1784 LINE 92514 define pcodeop vsubss_avx512f ; -:VSUBSS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x5C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VSUBSS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x5C; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vsubss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vsubss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # UCOMISD 4-683 PAGE 1803 LINE 93424 define pcodeop vucomisd_avx512f ; -:VUCOMISD XmmReg1, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x2E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VUCOMISD XmmReg1, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x2E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vucomisd_avx512f( XmmReg2_m64 ); @@ -5172,7 +6126,7 @@ define pcodeop vucomisd_avx512f ; # UCOMISS 4-685 PAGE 1805 LINE 93507 define pcodeop vucomiss_avx512f ; -:VUCOMISS XmmReg1, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x2E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VUCOMISS XmmReg1, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x2E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vucomiss_avx512f( XmmReg2_m32 ); @@ -5182,323 +6136,412 @@ define pcodeop vucomiss_avx512f ; # UNPCKHPD 4-688 PAGE 1808 LINE 93629 define pcodeop vunpckhpd_avx512vl ; -:VUNPCKHPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x15; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VUNPCKHPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x15; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vunpckhpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vunpckhpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # UNPCKHPD 4-688 PAGE 1808 LINE 93632 -:VUNPCKHPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x15; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VUNPCKHPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x15; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vunpckhpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vunpckhpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # UNPCKHPD 4-688 PAGE 1808 LINE 93635 define pcodeop vunpckhpd_avx512f ; -:VUNPCKHPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x15; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VUNPCKHPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x15; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vunpckhpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vunpckhpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # UNPCKHPS 4-692 PAGE 1812 LINE 93813 define pcodeop vunpckhps_avx512vl ; -:VUNPCKHPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x15; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VUNPCKHPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x15; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vunpckhps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vunpckhps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # UNPCKHPS 4-692 PAGE 1812 LINE 93817 -:VUNPCKHPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x15; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VUNPCKHPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x15; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vunpckhps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vunpckhps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # UNPCKHPS 4-692 PAGE 1812 LINE 93821 define pcodeop vunpckhps_avx512f ; -:VUNPCKHPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x15; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VUNPCKHPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x15; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vunpckhps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vunpckhps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # UNPCKLPD 4-696 PAGE 1816 LINE 94045 define pcodeop vunpcklpd_avx512vl ; -:VUNPCKLPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x14; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VUNPCKLPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x14; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vunpcklpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vunpcklpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # UNPCKLPD 4-696 PAGE 1816 LINE 94048 -:VUNPCKLPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x14; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VUNPCKLPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x14; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vunpcklpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vunpcklpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # UNPCKLPD 4-696 PAGE 1816 LINE 94051 define pcodeop vunpcklpd_avx512f ; -:VUNPCKLPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x14; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VUNPCKLPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x14; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vunpcklpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vunpcklpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # UNPCKLPS 4-700 PAGE 1820 LINE 94231 define pcodeop vunpcklps_avx512vl ; -:VUNPCKLPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x14; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VUNPCKLPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x14; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vunpcklps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vunpcklps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # UNPCKLPS 4-700 PAGE 1820 LINE 94234 -:VUNPCKLPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x14; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VUNPCKLPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x14; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vunpcklps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vunpcklps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # UNPCKLPS 4-700 PAGE 1820 LINE 94237 define pcodeop vunpcklps_avx512f ; -:VUNPCKLPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x14; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VUNPCKLPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x14; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vunpcklps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vunpcklps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VALIGND/VALIGNQ 5-5 PAGE 1829 LINE 94615 define pcodeop valignd_avx512vl ; -:VALIGND XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x03; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VALIGND XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x03; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = valignd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = valignd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VALIGND/VALIGNQ 5-5 PAGE 1829 LINE 94619 define pcodeop valignq_avx512vl ; -:VALIGNQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x03; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VALIGNQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x03; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = valignq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = valignq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VALIGND/VALIGNQ 5-5 PAGE 1829 LINE 94623 -:VALIGND YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x03; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VALIGND YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x03; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = valignd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = valignd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VALIGND/VALIGNQ 5-5 PAGE 1829 LINE 94627 -:VALIGNQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x03; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VALIGNQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x03; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = valignq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = valignq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VALIGND/VALIGNQ 5-5 PAGE 1829 LINE 94631 define pcodeop valignd_avx512f ; -:VALIGND ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x03; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VALIGND ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & evexV5_ZmmReg; byte=0x03; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = valignd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = valignd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VALIGND/VALIGNQ 5-5 PAGE 1829 LINE 94635 define pcodeop valignq_avx512f ; -:VALIGNQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x03; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VALIGNQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & evexV5_ZmmReg; byte=0x03; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = valignq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = valignq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VBLENDMPD/VBLENDMPS 5-9 PAGE 1833 LINE 94787 define pcodeop vblendmpd_avx512vl ; -:VBLENDMPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x65; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VBLENDMPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x65; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vblendmpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vblendmpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VBLENDMPD/VBLENDMPS 5-9 PAGE 1833 LINE 94790 -:VBLENDMPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x65; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VBLENDMPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x65; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vblendmpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vblendmpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VBLENDMPD/VBLENDMPS 5-9 PAGE 1833 LINE 94793 define pcodeop vblendmpd_avx512f ; -:VBLENDMPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x65; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VBLENDMPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x65; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vblendmpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vblendmpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VBLENDMPD/VBLENDMPS 5-9 PAGE 1833 LINE 94796 define pcodeop vblendmps_avx512vl ; -:VBLENDMPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x65; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VBLENDMPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x65; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vblendmps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vblendmps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VBLENDMPD/VBLENDMPS 5-9 PAGE 1833 LINE 94799 -:VBLENDMPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x65; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VBLENDMPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x65; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vblendmps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vblendmps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VBLENDMPD/VBLENDMPS 5-9 PAGE 1833 LINE 94802 define pcodeop vblendmps_avx512f ; -:VBLENDMPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x65; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VBLENDMPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x65; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vblendmps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vblendmps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VBROADCAST 5-12 PAGE 1836 LINE 94917 define pcodeop vbroadcastsd_avx512vl ; -:VBROADCASTSD YmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x19; (YmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VBROADCASTSD YmmReg1 YmmOpMask64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x19; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:32 = vbroadcastsd_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + YmmResult = vbroadcastsd_avx512vl( XmmReg2_m64 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VBROADCAST 5-12 PAGE 1836 LINE 94920 define pcodeop vbroadcastsd_avx512f ; -:VBROADCASTSD ZmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x19; ZmmReg1 ... & XmmReg2_m64 +:VBROADCASTSD ZmmReg1 ZmmOpMask64, XmmReg2_m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x19; (ZmmReg1 & ZmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vbroadcastsd_avx512f( XmmReg2_m64 ); + ZmmResult = vbroadcastsd_avx512f( XmmReg2_m64 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VBROADCAST 5-12 PAGE 1836 LINE 94923 define pcodeop vbroadcastf32x2_avx512vl ; -:VBROADCASTF32X2 YmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x19; (YmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VBROADCASTF32X2 YmmReg1 YmmOpMask32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x19; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:32 = vbroadcastf32x2_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + YmmResult = vbroadcastf32x2_avx512vl( XmmReg2_m64 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VBROADCAST 5-12 PAGE 1836 LINE 94926 define pcodeop vbroadcastf32x2_avx512dq ; -:VBROADCASTF32X2 ZmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x19; ZmmReg1 ... & XmmReg2_m64 +:VBROADCASTF32X2 ZmmReg1 ZmmOpMask32, XmmReg2_m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x19; (ZmmReg1 & ZmmOpMask32) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vbroadcastf32x2_avx512dq( XmmReg2_m64 ); + ZmmResult = vbroadcastf32x2_avx512dq( XmmReg2_m64 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VBROADCAST 5-12 PAGE 1836 LINE 94929 define pcodeop vbroadcastss_avx512vl ; -:VBROADCASTSS XmmReg1^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x18; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VBROADCASTSS XmmReg1 XmmOpMask32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x18; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:16 = vbroadcastss_avx512vl( XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vbroadcastss_avx512vl( XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VBROADCAST 5-12 PAGE 1836 LINE 94932 -:VBROADCASTSS YmmReg1^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x18; (YmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VBROADCASTSS YmmReg1 YmmOpMask32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x18; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:32 = vbroadcastss_avx512vl( XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + YmmResult = vbroadcastss_avx512vl( XmmReg2_m32 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VBROADCAST 5-12 PAGE 1836 LINE 94935 define pcodeop vbroadcastss_avx512f ; -:VBROADCASTSS ZmmReg1^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x18; ZmmReg1 ... & XmmReg2_m32 +:VBROADCASTSS ZmmReg1 ZmmOpMask32, XmmReg2_m32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x18; (ZmmReg1 & ZmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vbroadcastss_avx512f( XmmReg2_m32 ); + ZmmResult = vbroadcastss_avx512f( XmmReg2_m32 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VBROADCAST 5-12 PAGE 1836 LINE 94938 define pcodeop vbroadcastf32x4_avx512vl ; -:VBROADCASTF32X4 YmmReg1^KWriteMask, m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x1A; (YmmReg1 & ZmmReg1) ... & m128 +:VBROADCASTF32X4 YmmReg1 YmmOpMask32, m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x1A; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & m128 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:32 = vbroadcastf32x4_avx512vl( m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vbroadcastf32x4_avx512vl( m128 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VBROADCAST 5-12 PAGE 1836 LINE 94941 define pcodeop vbroadcastf32x4_avx512f ; -:VBROADCASTF32X4 ZmmReg1^KWriteMask, m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x1A; ZmmReg1 ... & m128 +:VBROADCASTF32X4 ZmmReg1 ZmmOpMask32, m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x1A; (ZmmReg1 & ZmmOpMask32) ... & m128 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vbroadcastf32x4_avx512f( m128 ); + ZmmResult = vbroadcastf32x4_avx512f( m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VBROADCAST 5-12 PAGE 1836 LINE 94944 define pcodeop vbroadcastf64x2_avx512vl ; -:VBROADCASTF64X2 YmmReg1^KWriteMask, m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x1A; (YmmReg1 & ZmmReg1) ... & m128 +:VBROADCASTF64X2 YmmReg1 YmmOpMask64, m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x1A; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & m128 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:32 = vbroadcastf64x2_avx512vl( m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vbroadcastf64x2_avx512vl( m128 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VBROADCAST 5-12 PAGE 1836 LINE 94947 define pcodeop vbroadcastf64x2_avx512dq ; -:VBROADCASTF64X2 ZmmReg1^KWriteMask, m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x1A; ZmmReg1 ... & m128 +:VBROADCASTF64X2 ZmmReg1 ZmmOpMask64, m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x1A; (ZmmReg1 & ZmmOpMask64) ... & m128 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vbroadcastf64x2_avx512dq( m128 ); + ZmmResult = vbroadcastf64x2_avx512dq( m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VBROADCAST 5-12 PAGE 1836 LINE 94950 define pcodeop vbroadcastf32x8_avx512dq ; -:VBROADCASTF32X8 ZmmReg1^KWriteMask, m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x1B; ZmmReg1 ... & m256 +:VBROADCASTF32X8 ZmmReg1 ZmmOpMask32, m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x1B; (ZmmReg1 & ZmmOpMask32) ... & m256 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vbroadcastf32x8_avx512dq( m256 ); + ZmmResult = vbroadcastf32x8_avx512dq( m256 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VBROADCAST 5-12 PAGE 1836 LINE 94953 define pcodeop vbroadcastf64x4_avx512f ; -:VBROADCASTF64X4 ZmmReg1^KWriteMask, m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x1B; ZmmReg1 ... & m256 +:VBROADCASTF64X4 ZmmReg1 ZmmOpMask64, m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x1B; (ZmmReg1 & ZmmOpMask64) ... & m256 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vbroadcastf64x4_avx512f( m256 ); + ZmmResult = vbroadcastf64x4_avx512f( m256 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPBROADCASTM 5-19 PAGE 1843 LINE 95303 define pcodeop vpbroadcastmb2q_avx512vl ; -:VPBROADCASTMB2Q XmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x2A; (XmmReg1 & ZmmReg1) & KReg_rm +:VPBROADCASTMB2Q XmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x2A; (XmmReg1 & ZmmReg1) & KReg_rm { local tmp:16 = vpbroadcastmb2q_avx512vl( KReg_rm ); ZmmReg1 = zext(tmp); } # VPBROADCASTM 5-19 PAGE 1843 LINE 95305 -:VPBROADCASTMB2Q YmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x2A; (YmmReg1 & ZmmReg1) & KReg_rm +:VPBROADCASTMB2Q YmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x2A; (YmmReg1 & ZmmReg1) & KReg_rm { local tmp:32 = vpbroadcastmb2q_avx512vl( KReg_rm ); ZmmReg1 = zext(tmp); @@ -5506,21 +6549,21 @@ define pcodeop vpbroadcastmb2q_avx512vl ; # VPBROADCASTM 5-19 PAGE 1843 LINE 95307 define pcodeop vpbroadcastmb2q_avx512cd ; -:VPBROADCASTMB2Q ZmmReg1, KReg_rm is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x2A; ZmmReg1 & KReg_rm +:VPBROADCASTMB2Q ZmmReg1, KReg_rm is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x2A; ZmmReg1 & KReg_rm { ZmmReg1 = vpbroadcastmb2q_avx512cd( KReg_rm ); } # VPBROADCASTM 5-19 PAGE 1843 LINE 95309 define pcodeop vpbroadcastmw2d_avx512vl ; -:VPBROADCASTMW2D XmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x3A; (XmmReg1 & ZmmReg1) & KReg_rm +:VPBROADCASTMW2D XmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x3A; (XmmReg1 & ZmmReg1) & KReg_rm { local tmp:16 = vpbroadcastmw2d_avx512vl( KReg_rm ); ZmmReg1 = zext(tmp); } # VPBROADCASTM 5-19 PAGE 1843 LINE 95311 -:VPBROADCASTMW2D YmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x3A; (YmmReg1 & ZmmReg1) & KReg_rm +:VPBROADCASTMW2D YmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x3A; (YmmReg1 & ZmmReg1) & KReg_rm { local tmp:32 = vpbroadcastmw2d_avx512vl( KReg_rm ); ZmmReg1 = zext(tmp); @@ -5528,156 +6571,255 @@ define pcodeop vpbroadcastmw2d_avx512vl ; # VPBROADCASTM 5-19 PAGE 1843 LINE 95313 define pcodeop vpbroadcastmw2d_avx512cd ; -:VPBROADCASTMW2D ZmmReg1, KReg_rm is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x3A; ZmmReg1 & KReg_rm +:VPBROADCASTMW2D ZmmReg1, KReg_rm is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x3A; ZmmReg1 & KReg_rm { ZmmReg1 = vpbroadcastmw2d_avx512cd( KReg_rm ); } # VCOMPRESSPD 5-21 PAGE 1845 LINE 95380 define pcodeop vcompresspd_avx512vl ; -:VCOMPRESSPD XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x8A; XmmReg1 ... & XmmReg2_m128 +:VCOMPRESSPD XmmReg2 XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & XmmOpMask; byte=0x8A; XmmReg1 & mod=3 & XmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - XmmReg2_m128 = vcompresspd_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask; + XmmResult = vcompresspd_avx512vl( XmmReg1, XmmOpMask ); + ZmmReg2 = zext(XmmResult); +} + +:VCOMPRESSPD m128 XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & XmmOpMask; byte=0x8A; XmmReg1 ... & m128 +[ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) +{ + XmmMask = m128; + build XmmOpMask; + XmmResult = vcompresspd_avx512vl( XmmReg1, XmmOpMask ); + m128 = XmmResult; } # VCOMPRESSPD 5-21 PAGE 1845 LINE 95383 -:VCOMPRESSPD YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x8A; YmmReg1 ... & YmmReg2_m256 +:VCOMPRESSPD YmmReg2 YmmOpMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & YmmOpMask; byte=0x8A; YmmReg1 & mod=3 & YmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - YmmReg2_m256 = vcompresspd_avx512vl( YmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask; + YmmResult = vcompresspd_avx512vl( YmmReg1, YmmOpMask ); + ZmmReg2 = zext(YmmResult); +} + +:VCOMPRESSPD m256 YmmOpMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & YmmOpMask; byte=0x8A; YmmReg1 ... & m256 +[ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) +{ + YmmMask = m256; + build YmmOpMask; + YmmResult = vcompresspd_avx512vl( YmmReg1, YmmOpMask ); + m256 = YmmResult; } # VCOMPRESSPD 5-21 PAGE 1845 LINE 95386 define pcodeop vcompresspd_avx512f ; -:VCOMPRESSPD ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x8A; ZmmReg1 ... & ZmmReg2_m512 +:VCOMPRESSPD ZmmReg2 ZmmOpMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & ZmmOpMask; byte=0x8A; ZmmReg1 & mod=3 & ZmmReg2 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg2_m512 = vcompresspd_avx512f( ZmmReg1 ); + ZmmMask = ZmmReg2; + build ZmmOpMask; + ZmmResult = vcompresspd_avx512f( ZmmReg1, ZmmOpMask ); + ZmmReg2 = ZmmResult; +} + +:VCOMPRESSPD m512 ZmmOpMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & ZmmOpMask; byte=0x8A; ZmmReg1 ... & m512 +[ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) +{ + ZmmMask = m512; + build ZmmOpMask; + ZmmResult = vcompresspd_avx512f( ZmmReg1, ZmmOpMask ); + m512 = ZmmResult; } # VCOMPRESSPS 5-23 PAGE 1847 LINE 95481 define pcodeop vcompressps_avx512vl ; -:VCOMPRESSPS XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x8A; XmmReg1 ... & XmmReg2_m128 +:VCOMPRESSPS XmmReg2 XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask; byte=0x8A; XmmReg1 & mod=3 & XmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - XmmReg2_m128 = vcompressps_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask; + XmmResult = vcompressps_avx512vl( XmmReg1, XmmOpMask ); + ZmmReg2 = zext(XmmResult); +} + +:VCOMPRESSPS m128 XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask; byte=0x8A; XmmReg1 ... & m128 +[ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) +{ + XmmMask = m128; + build XmmOpMask; + XmmResult = vcompressps_avx512vl( XmmReg1, XmmOpMask ); + m128 = XmmResult; } # VCOMPRESSPS 5-23 PAGE 1847 LINE 95484 -:VCOMPRESSPS YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x8A; YmmReg1 ... & YmmReg2_m256 +:VCOMPRESSPS YmmReg2 YmmOpMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask; byte=0x8A; YmmReg1 & mod=3 & YmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - YmmReg2_m256 = vcompressps_avx512vl( YmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask; + YmmResult = vcompressps_avx512vl( YmmReg1, YmmOpMask ); + ZmmReg2 = zext(YmmResult); +} + +:VCOMPRESSPS m256 YmmOpMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask; byte=0x8A; YmmReg1 ... & m256 +[ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) +{ + YmmMask = m256; + build YmmOpMask; + YmmResult = vcompressps_avx512vl( YmmReg1, YmmOpMask ); + m256 = YmmResult; } # VCOMPRESSPS 5-23 PAGE 1847 LINE 95487 define pcodeop vcompressps_avx512f ; -:VCOMPRESSPS ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x8A; ZmmReg1 ... & ZmmReg2_m512 +:VCOMPRESSPS ZmmReg2 ZmmOpMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & ZmmOpMask; byte=0x8A; ZmmReg1 & mod=3 & ZmmReg2 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg2_m512 = vcompressps_avx512f( ZmmReg1 ); + ZmmMask = ZmmReg2; + build ZmmOpMask; + ZmmResult = vcompressps_avx512f( ZmmReg1, ZmmOpMask ); + ZmmReg2 = ZmmResult; +} + +:VCOMPRESSPS m512 ZmmOpMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & ZmmOpMask; byte=0x8A; ZmmReg1 ... & m512 +[ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) +{ + ZmmMask = m512; + build ZmmOpMask; + ZmmResult = vcompressps_avx512f( ZmmReg1, ZmmOpMask ); + m512 = ZmmResult; } # VCVTPD2QQ 5-25 PAGE 1849 LINE 95583 define pcodeop vcvtpd2qq_avx512vl ; -:VCVTPD2QQ XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTPD2QQ XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x7B; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtpd2qq_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtpd2qq_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTPD2QQ 5-25 PAGE 1849 LINE 95586 -:VCVTPD2QQ YmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7B; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTPD2QQ YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x7B; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtpd2qq_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtpd2qq_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTPD2QQ 5-25 PAGE 1849 LINE 95589 define pcodeop vcvtpd2qq_avx512dq ; -:VCVTPD2QQ ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7B; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VCVTPD2QQ ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x7B; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vcvtpd2qq_avx512dq( ZmmReg2_m512_m64bcst ); + ZmmResult = vcvtpd2qq_avx512dq( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VCVTPD2UDQ 5-28 PAGE 1852 LINE 95706 define pcodeop vcvtpd2udq_avx512vl ; -:VCVTPD2UDQ XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x79; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTPD2UDQ XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) ; byte=0x79; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtpd2udq_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtpd2udq_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTPD2UDQ 5-28 PAGE 1852 LINE 95709 -:VCVTPD2UDQ XmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x79; (XmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTPD2UDQ XmmReg1 XmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) ; byte=0x79; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtpd2udq_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtpd2udq_avx512vl( YmmReg2_m256_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTPD2UDQ 5-28 PAGE 1852 LINE 95712 define pcodeop vcvtpd2udq_avx512f ; -:VCVTPD2UDQ YmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x79; (YmmReg1 & ZmmReg1) ... & ZmmReg2_m512_m64bcst +:VCVTPD2UDQ YmmReg1 YmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) ; byte=0x79; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtpd2udq_avx512f( ZmmReg2_m512_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtpd2udq_avx512f( ZmmReg2_m512_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTPD2UQQ 5-31 PAGE 1855 LINE 95833 define pcodeop vcvtpd2uqq_avx512vl ; -:VCVTPD2UQQ XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x79; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTPD2UQQ XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x79; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtpd2uqq_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtpd2uqq_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTPD2UQQ 5-31 PAGE 1855 LINE 95836 -:VCVTPD2UQQ YmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x79; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTPD2UQQ YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x79; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtpd2uqq_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtpd2uqq_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTPD2UQQ 5-31 PAGE 1855 LINE 95839 define pcodeop vcvtpd2uqq_avx512dq ; -:VCVTPD2UQQ ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x79; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VCVTPD2UQQ ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x79; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vcvtpd2uqq_avx512dq( ZmmReg2_m512_m64bcst ); + ZmmResult = vcvtpd2uqq_avx512dq( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VCVTPH2PS 5-34 PAGE 1858 LINE 95963 define pcodeop vcvtph2ps_avx512vl ; -:VCVTPH2PS XmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x13; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VCVTPH2PS XmmReg1 XmmOpMask32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x13; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - local tmp:16 = vcvtph2ps_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtph2ps_avx512vl( XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VCVTPH2PS 5-34 PAGE 1858 LINE 95966 -:VCVTPH2PS YmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x13; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VCVTPH2PS YmmReg1 YmmOpMask32, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x13; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - local tmp:32 = vcvtph2ps_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtph2ps_avx512vl( XmmReg2_m128 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VCVTPH2PS 5-34 PAGE 1858 LINE 95969 define pcodeop vcvtph2ps_avx512f ; -:VCVTPH2PS ZmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x13; ZmmReg1 ... & YmmReg2_m256 +:VCVTPH2PS ZmmReg1 ZmmOpMask32, YmmReg2_m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x13; (ZmmReg1 & ZmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - ZmmReg1 = vcvtph2ps_avx512f( YmmReg2_m256 ); + ZmmResult = vcvtph2ps_avx512f( YmmReg2_m256 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VCVTPS2PH 5-37 PAGE 1861 LINE 96116 @@ -5691,133 +6833,167 @@ define pcodeop vcvtph2ps_avx512f ; # VCVTPS2UDQ 5-41 PAGE 1865 LINE 96305 define pcodeop vcvtps2udq_avx512vl ; -:VCVTPS2UDQ XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x79; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTPS2UDQ XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x79; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtps2udq_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtps2udq_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VCVTPS2UDQ 5-41 PAGE 1865 LINE 96309 -:VCVTPS2UDQ YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x79; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VCVTPS2UDQ YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x79; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtps2udq_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtps2udq_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VCVTPS2UDQ 5-41 PAGE 1865 LINE 96313 define pcodeop vcvtps2udq_avx512f ; -:VCVTPS2UDQ ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x79; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VCVTPS2UDQ ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x79; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vcvtps2udq_avx512f( ZmmReg2_m512_m32bcst ); + ZmmResult = vcvtps2udq_avx512f( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VCVTPS2QQ 5-44 PAGE 1868 LINE 96434 define pcodeop vcvtps2qq_avx512vl ; -:VCVTPS2QQ XmmReg1^KWriteMask, XmmReg2_m64_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64_m32bcst +:VCVTPS2QQ XmmReg1 XmmOpMask64, XmmReg2_m64_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x7B; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:16 = vcvtps2qq_avx512vl( XmmReg2_m64_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtps2qq_avx512vl( XmmReg2_m64_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTPS2QQ 5-44 PAGE 1868 LINE 96437 -:VCVTPS2QQ YmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7B; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTPS2QQ YmmReg1 YmmOpMask64, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x7B; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:32 = vcvtps2qq_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtps2qq_avx512vl( XmmReg2_m128_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTPS2QQ 5-44 PAGE 1868 LINE 96440 define pcodeop vcvtps2qq_avx512dq ; -:VCVTPS2QQ ZmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7B; ZmmReg1 ... & YmmReg2_m256_m32bcst +:VCVTPS2QQ ZmmReg1 ZmmOpMask64, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x7B; (ZmmReg1 & ZmmOpMask64) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - ZmmReg1 = vcvtps2qq_avx512dq( YmmReg2_m256_m32bcst ); + ZmmResult = vcvtps2qq_avx512dq( YmmReg2_m256_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VCVTPS2UQQ 5-47 PAGE 1871 LINE 96560 define pcodeop vcvtps2uqq_avx512vl ; -:VCVTPS2UQQ XmmReg1^KWriteMask, XmmReg2_m64_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x79; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64_m32bcst +:VCVTPS2UQQ XmmReg1 XmmOpMask64, XmmReg2_m64_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x79; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:16 = vcvtps2uqq_avx512vl( XmmReg2_m64_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtps2uqq_avx512vl( XmmReg2_m64_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTPS2UQQ 5-47 PAGE 1871 LINE 96563 -:VCVTPS2UQQ YmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x79; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTPS2UQQ YmmReg1 YmmOpMask64, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x79; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:32 = vcvtps2uqq_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtps2uqq_avx512vl( XmmReg2_m128_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTPS2UQQ 5-47 PAGE 1871 LINE 96566 define pcodeop vcvtps2uqq_avx512dq ; -:VCVTPS2UQQ ZmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x79; ZmmReg1 ... & YmmReg2_m256_m32bcst +:VCVTPS2UQQ ZmmReg1 ZmmOpMask64, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x79; (ZmmReg1 & ZmmOpMask64) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - ZmmReg1 = vcvtps2uqq_avx512dq( YmmReg2_m256_m32bcst ); + ZmmResult = vcvtps2uqq_avx512dq( YmmReg2_m256_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VCVTQQ2PD 5-50 PAGE 1874 LINE 96686 define pcodeop vcvtqq2pd_avx512vl ; -:VCVTQQ2PD XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0xE6; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTQQ2PD XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) ; byte=0xE6; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtqq2pd_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtqq2pd_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTQQ2PD 5-50 PAGE 1874 LINE 96689 -:VCVTQQ2PD YmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0xE6; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTQQ2PD YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) ; byte=0xE6; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtqq2pd_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtqq2pd_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTQQ2PD 5-50 PAGE 1874 LINE 96692 define pcodeop vcvtqq2pd_avx512dq ; -:VCVTQQ2PD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0xE6; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VCVTQQ2PD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) ; byte=0xE6; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vcvtqq2pd_avx512dq( ZmmReg2_m512_m64bcst ); + ZmmResult = vcvtqq2pd_avx512dq( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VCVTQQ2PS 5-52 PAGE 1876 LINE 96797 define pcodeop vcvtqq2ps_avx512vl ; -:VCVTQQ2PS XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x5B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTQQ2PS XmmReg1 XmmOpMask32, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) ; byte=0x5B; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtqq2ps_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtqq2ps_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VCVTQQ2PS 5-52 PAGE 1876 LINE 96800 -:VCVTQQ2PS XmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x5B; (XmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTQQ2PS XmmReg1 XmmOpMask32, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) ; byte=0x5B; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtqq2ps_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtqq2ps_avx512vl( YmmReg2_m256_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VCVTQQ2PS 5-52 PAGE 1876 LINE 96803 define pcodeop vcvtqq2ps_avx512dq ; -:VCVTQQ2PS YmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x5B; (YmmReg1 & ZmmReg1) ... & ZmmReg2_m512_m64bcst +:VCVTQQ2PS YmmReg1 YmmOpMask32, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) ; byte=0x5B; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtqq2ps_avx512dq( ZmmReg2_m512_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtqq2ps_avx512dq( ZmmReg2_m512_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VCVTSD2USI 5-54 PAGE 1878 LINE 96907 define pcodeop vcvtsd2usi_avx512f ; -:VCVTSD2USI Reg32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x79; Reg32 ... & XmmReg2_m64 +:VCVTSD2USI Reg32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x79; Reg32 ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg32 = vcvtsd2usi_avx512f( XmmReg2_m64 ); @@ -5826,7 +7002,7 @@ define pcodeop vcvtsd2usi_avx512f ; # VCVTSD2USI 5-54 PAGE 1878 LINE 96909 @ifdef IA64 -:VCVTSD2USI Reg64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1); byte=0x79; Reg64 ... & XmmReg2_m64 +:VCVTSD2USI Reg64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1); byte=0x79; Reg64 ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg64 = vcvtsd2usi_avx512f( XmmReg2_m64 ); @@ -5835,7 +7011,7 @@ define pcodeop vcvtsd2usi_avx512f ; # VCVTSS2USI 5-55 PAGE 1879 LINE 96967 define pcodeop vcvtss2usi_avx512f ; -:VCVTSS2USI Reg32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x79; Reg32 ... & XmmReg2_m32 +:VCVTSS2USI Reg32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x79; Reg32 ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg32 = vcvtss2usi_avx512f( XmmReg2_m32 ); @@ -5844,7 +7020,7 @@ define pcodeop vcvtss2usi_avx512f ; # VCVTSS2USI 5-55 PAGE 1879 LINE 96969 @ifdef IA64 -:VCVTSS2USI Reg64, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x79; Reg64 ... & XmmReg2_m32 +:VCVTSS2USI Reg64, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x79; Reg64 ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg64 = vcvtss2usi_avx512f( XmmReg2_m32 ); @@ -5853,158 +7029,199 @@ define pcodeop vcvtss2usi_avx512f ; # VCVTTPD2QQ 5-57 PAGE 1881 LINE 97040 define pcodeop vcvttpd2qq_avx512vl ; -:VCVTTPD2QQ XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTTPD2QQ XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x7A; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvttpd2qq_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvttpd2qq_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTTPD2QQ 5-57 PAGE 1881 LINE 97043 -:VCVTTPD2QQ YmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7A; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTTPD2QQ YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x7A; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvttpd2qq_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvttpd2qq_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTTPD2QQ 5-57 PAGE 1881 LINE 97046 define pcodeop vcvttpd2qq_avx512dq ; -:VCVTTPD2QQ ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7A; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VCVTTPD2QQ ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x7A; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vcvttpd2qq_avx512dq( ZmmReg2_m512_m64bcst ); + ZmmResult = vcvttpd2qq_avx512dq( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VCVTTPD2UDQ 5-59 PAGE 1883 LINE 97147 define pcodeop vcvttpd2udq_avx512vl ; -:VCVTTPD2UDQ XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x78; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTTPD2UDQ XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) ; byte=0x78; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvttpd2udq_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvttpd2udq_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTTPD2UDQ 5-59 PAGE 1883 LINE 97152 -:VCVTTPD2UDQ XmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x78; (XmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTTPD2UDQ XmmReg1 XmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) ; byte=0x78; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvttpd2udq_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvttpd2udq_avx512vl( YmmReg2_m256_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTTPD2UDQ 5-59 PAGE 1883 LINE 97156 define pcodeop vcvttpd2udq_avx512f ; -:VCVTTPD2UDQ YmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x78; (YmmReg1 & ZmmReg1) ... & ZmmReg2_m512_m64bcst +:VCVTTPD2UDQ YmmReg1 YmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) ; byte=0x78; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvttpd2udq_avx512f( ZmmReg2_m512_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvttpd2udq_avx512f( ZmmReg2_m512_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTTPD2UQQ 5-62 PAGE 1886 LINE 97272 define pcodeop vcvttpd2uqq_avx512vl ; -:VCVTTPD2UQQ XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x78; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTTPD2UQQ XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x78; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvttpd2uqq_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvttpd2uqq_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTTPD2UQQ 5-62 PAGE 1886 LINE 97276 -:VCVTTPD2UQQ YmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x78; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTTPD2UQQ YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x78; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvttpd2uqq_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvttpd2uqq_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTTPD2UQQ 5-62 PAGE 1886 LINE 97280 define pcodeop vcvttpd2uqq_avx512dq ; -:VCVTTPD2UQQ ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x78; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VCVTTPD2UQQ ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) ; byte=0x78; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vcvttpd2uqq_avx512dq( ZmmReg2_m512_m64bcst ); + ZmmResult = vcvttpd2uqq_avx512dq( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VCVTTPS2UDQ 5-64 PAGE 1888 LINE 97385 define pcodeop vcvttps2udq_avx512vl ; -:VCVTTPS2UDQ XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x78; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTTPS2UDQ XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x78; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvttps2udq_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvttps2udq_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VCVTTPS2UDQ 5-64 PAGE 1888 LINE 97389 -:VCVTTPS2UDQ YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x78; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VCVTTPS2UDQ YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x78; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvttps2udq_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvttps2udq_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VCVTTPS2UDQ 5-64 PAGE 1888 LINE 97393 define pcodeop vcvttps2udq_avx512f ; -:VCVTTPS2UDQ ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x78; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VCVTTPS2UDQ ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x78; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vcvttps2udq_avx512f( ZmmReg2_m512_m32bcst ); + ZmmResult = vcvttps2udq_avx512f( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VCVTTPS2QQ 5-66 PAGE 1890 LINE 97497 define pcodeop vcvttps2qq_avx512vl ; -:VCVTTPS2QQ XmmReg1^KWriteMask, XmmReg2_m64_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64_m32bcst +:VCVTTPS2QQ XmmReg1 XmmOpMask64, XmmReg2_m64_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x7A; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:16 = vcvttps2qq_avx512vl( XmmReg2_m64_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvttps2qq_avx512vl( XmmReg2_m64_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTTPS2QQ 5-66 PAGE 1890 LINE 97500 -:VCVTTPS2QQ YmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7A; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTTPS2QQ YmmReg1 YmmOpMask64, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x7A; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:32 = vcvttps2qq_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvttps2qq_avx512vl( XmmReg2_m128_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTTPS2QQ 5-66 PAGE 1890 LINE 97503 define pcodeop vcvttps2qq_avx512dq ; -:VCVTTPS2QQ ZmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7A; ZmmReg1 ... & YmmReg2_m256_m32bcst +:VCVTTPS2QQ ZmmReg1 ZmmOpMask64, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x7A; (ZmmReg1 & ZmmOpMask64) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - ZmmReg1 = vcvttps2qq_avx512dq( YmmReg2_m256_m32bcst ); + ZmmResult = vcvttps2qq_avx512dq( YmmReg2_m256_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VCVTTPS2UQQ 5-68 PAGE 1892 LINE 97608 define pcodeop vcvttps2uqq_avx512vl ; -:VCVTTPS2UQQ XmmReg1^KWriteMask, XmmReg2_m64_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x78; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64_m32bcst +:VCVTTPS2UQQ XmmReg1 XmmOpMask64, XmmReg2_m64_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x78; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:16 = vcvttps2uqq_avx512vl( XmmReg2_m64_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvttps2uqq_avx512vl( XmmReg2_m64_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTTPS2UQQ 5-68 PAGE 1892 LINE 97611 -:VCVTTPS2UQQ YmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x78; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTTPS2UQQ YmmReg1 YmmOpMask64, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x78; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:32 = vcvttps2uqq_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvttps2uqq_avx512vl( XmmReg2_m128_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTTPS2UQQ 5-68 PAGE 1892 LINE 97615 define pcodeop vcvttps2uqq_avx512dq ; -:VCVTTPS2UQQ ZmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x78; ZmmReg1 ... & YmmReg2_m256_m32bcst +:VCVTTPS2UQQ ZmmReg1 ZmmOpMask64, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x78; (ZmmReg1 & ZmmOpMask64) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - ZmmReg1 = vcvttps2uqq_avx512dq( YmmReg2_m256_m32bcst ); + ZmmResult = vcvttps2uqq_avx512dq( YmmReg2_m256_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VCVTTSD2USI 5-70 PAGE 1894 LINE 97722 define pcodeop vcvttsd2usi_avx512f ; -:VCVTTSD2USI Reg32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x78; Reg32 ... & XmmReg2_m64 +:VCVTTSD2USI Reg32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x78; Reg32 ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg32 = vcvttsd2usi_avx512f( XmmReg2_m64 ); @@ -6013,7 +7230,7 @@ define pcodeop vcvttsd2usi_avx512f ; # VCVTTSD2USI 5-70 PAGE 1894 LINE 97725 @ifdef IA64 -:VCVTTSD2USI Reg64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1); byte=0x78; Reg64 ... & XmmReg2_m64 +:VCVTTSD2USI Reg64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1); byte=0x78; Reg64 ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg64 = vcvttsd2usi_avx512f( XmmReg2_m64 ); @@ -6022,7 +7239,7 @@ define pcodeop vcvttsd2usi_avx512f ; # VCVTTSS2USI 5-71 PAGE 1895 LINE 97782 define pcodeop vcvttss2usi_avx512f ; -:VCVTTSS2USI Reg32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x78; Reg32 ... & XmmReg2_m32 +:VCVTTSS2USI Reg32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x78; Reg32 ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg32 = vcvttss2usi_avx512f( XmmReg2_m32 ); @@ -6031,7 +7248,7 @@ define pcodeop vcvttss2usi_avx512f ; # VCVTTSS2USI 5-71 PAGE 1895 LINE 97785 @ifdef IA64 -:VCVTTSS2USI Reg64, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x78; Reg64 ... & XmmReg2_m32 +:VCVTTSS2USI Reg64, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1); byte=0x78; Reg64 ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 5; ] # (TupleType T1F) { Reg64 = vcvttss2usi_avx512f( XmmReg2_m32 ); @@ -6040,108 +7257,135 @@ define pcodeop vcvttss2usi_avx512f ; # VCVTUDQ2PD 5-73 PAGE 1897 LINE 97852 define pcodeop vcvtudq2pd_avx512vl ; -:VCVTUDQ2PD XmmReg1^KWriteMask, XmmReg2_m64_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64_m32bcst +:VCVTUDQ2PD XmmReg1 XmmOpMask64, XmmReg2_m64_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x7A; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:16 = vcvtudq2pd_avx512vl( XmmReg2_m64_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtudq2pd_avx512vl( XmmReg2_m64_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTUDQ2PD 5-73 PAGE 1897 LINE 97855 -:VCVTUDQ2PD YmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7A; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTUDQ2PD YmmReg1 YmmOpMask64, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x7A; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - local tmp:32 = vcvtudq2pd_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtudq2pd_avx512vl( XmmReg2_m128_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTUDQ2PD 5-73 PAGE 1897 LINE 97859 define pcodeop vcvtudq2pd_avx512f ; -:VCVTUDQ2PD ZmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7A; ZmmReg1 ... & YmmReg2_m256_m32bcst +:VCVTUDQ2PD ZmmReg1 ZmmOpMask64, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0); byte=0x7A; (ZmmReg1 & ZmmOpMask64) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 1; ] # (TupleType HV) { - ZmmReg1 = vcvtudq2pd_avx512f( YmmReg2_m256_m32bcst ); + ZmmResult = vcvtudq2pd_avx512f( YmmReg2_m256_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VCVTUDQ2PS 5-75 PAGE 1899 LINE 97962 define pcodeop vcvtudq2ps_avx512vl ; -:VCVTUDQ2PS XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VCVTUDQ2PS XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x7A; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtudq2ps_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtudq2ps_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VCVTUDQ2PS 5-75 PAGE 1899 LINE 97965 -:VCVTUDQ2PS YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7A; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VCVTUDQ2PS YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x7A; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtudq2ps_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtudq2ps_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VCVTUDQ2PS 5-75 PAGE 1899 LINE 97968 define pcodeop vcvtudq2ps_avx512f ; -:VCVTUDQ2PS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & KWriteMask; byte=0x7A; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VCVTUDQ2PS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x7A; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vcvtudq2ps_avx512f( ZmmReg2_m512_m32bcst ); + ZmmResult = vcvtudq2ps_avx512f( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VCVTUQQ2PD 5-77 PAGE 1901 LINE 98078 define pcodeop vcvtuqq2pd_avx512vl ; -:VCVTUQQ2PD XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTUQQ2PD XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) ; byte=0x7A; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtuqq2pd_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtuqq2pd_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VCVTUQQ2PD 5-77 PAGE 1901 LINE 98081 -:VCVTUQQ2PD YmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7A; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTUQQ2PD YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) ; byte=0x7A; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtuqq2pd_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtuqq2pd_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VCVTUQQ2PD 5-77 PAGE 1901 LINE 98084 define pcodeop vcvtuqq2pd_avx512dq ; -:VCVTUQQ2PD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7A; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VCVTUQQ2PD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) ; byte=0x7A; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vcvtuqq2pd_avx512dq( ZmmReg2_m512_m64bcst ); + ZmmResult = vcvtuqq2pd_avx512dq( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VCVTUQQ2PS 5-79 PAGE 1903 LINE 98193 define pcodeop vcvtuqq2ps_avx512vl ; -:VCVTUQQ2PS XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VCVTUQQ2PS XmmReg1 XmmOpMask32, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0x7A; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtuqq2ps_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtuqq2ps_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VCVTUQQ2PS 5-79 PAGE 1903 LINE 98196 -:VCVTUQQ2PS XmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7A; (XmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VCVTUQQ2PS XmmReg1 XmmOpMask32, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0x7A; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vcvtuqq2ps_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vcvtuqq2ps_avx512vl( YmmReg2_m256_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VCVTUQQ2PS 5-79 PAGE 1903 LINE 98199 define pcodeop vcvtuqq2ps_avx512dq ; -:VCVTUQQ2PS YmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & KWriteMask; byte=0x7A; (YmmReg1 & ZmmReg1) ... & ZmmReg2_m512_m64bcst +:VCVTUQQ2PS YmmReg1 YmmOpMask32, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) ; byte=0x7A; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vcvtuqq2ps_avx512dq( ZmmReg2_m512_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vcvtuqq2ps_avx512dq( ZmmReg2_m512_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VCVTUSI2SD 5-81 PAGE 1905 LINE 98308 define pcodeop vcvtusi2sd_avx512f ; -:VCVTUSI2SD XmmReg1, vexVVVV_XmmReg, rm32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x7B; (XmmReg1 & ZmmReg1) ... & rm32 +:VCVTUSI2SD XmmReg1, vexVVVV_XmmReg, rm32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x7B; (XmmReg1 & ZmmReg1) ... & rm32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vcvtusi2sd_avx512f( vexVVVV_XmmReg, rm32 ); @@ -6150,7 +7394,7 @@ define pcodeop vcvtusi2sd_avx512f ; # VCVTUSI2SD 5-81 PAGE 1905 LINE 98311 @ifdef IA64 -:VCVTUSI2SD XmmReg1, vexVVVV_XmmReg, rm64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x7B; (XmmReg1 & ZmmReg1) ... & rm64 +:VCVTUSI2SD XmmReg1, vexVVVV_XmmReg, rm64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x7B; (XmmReg1 & ZmmReg1) ... & rm64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vcvtusi2sd_avx512f( vexVVVV_XmmReg, rm64 ); @@ -6160,7 +7404,7 @@ define pcodeop vcvtusi2sd_avx512f ; # VCVTUSI2SS 5-83 PAGE 1907 LINE 98381 define pcodeop vcvtusi2ss_avx512f ; -:VCVTUSI2SS XmmReg1, vexVVVV_XmmReg, rm32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x7B; (XmmReg1 & ZmmReg1) ... & rm32 +:VCVTUSI2SS XmmReg1, vexVVVV_XmmReg, rm32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x7B; (XmmReg1 & ZmmReg1) ... & rm32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vcvtusi2ss_avx512f( vexVVVV_XmmReg, rm32 ); @@ -6169,7 +7413,7 @@ define pcodeop vcvtusi2ss_avx512f ; # VCVTUSI2SS 5-83 PAGE 1907 LINE 98383 @ifdef IA64 -:VCVTUSI2SS XmmReg1, vexVVVV_XmmReg, rm64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x7B; (XmmReg1 & ZmmReg1) ... & rm64 +:VCVTUSI2SS XmmReg1, vexVVVV_XmmReg, rm64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_F3) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x7B; (XmmReg1 & ZmmReg1) ... & rm64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vcvtusi2ss_avx512f( vexVVVV_XmmReg, rm64 ); @@ -6179,2181 +7423,2848 @@ define pcodeop vcvtusi2ss_avx512f ; # VDBPSADBW 5-85 PAGE 1909 LINE 98455 define pcodeop vdbpsadbw_avx512vl ; -:VDBPSADBW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x42; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128; imm8 +:VDBPSADBW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x42; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vdbpsadbw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vdbpsadbw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # VDBPSADBW 5-85 PAGE 1909 LINE 98460 -:VDBPSADBW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x42; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256; imm8 +:VDBPSADBW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x42; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vdbpsadbw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vdbpsadbw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # VDBPSADBW 5-85 PAGE 1909 LINE 98465 define pcodeop vdbpsadbw_avx512bw ; -:VDBPSADBW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x42; ZmmReg1 ... & ZmmReg2_m512; imm8 +:VDBPSADBW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & evexV5_ZmmReg; byte=0x42; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512; imm8 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vdbpsadbw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512, imm8:1 ); + ZmmResult = vdbpsadbw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # VEXPANDPD 5-89 PAGE 1913 LINE 98660 define pcodeop vexpandpd_avx512vl ; -:VEXPANDPD XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x88; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VEXPANDPD XmmReg1 XmmOpMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x88; (XmmReg1 & ZmmReg1 & XmmOpMask) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vexpandpd_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmMask = XmmReg1; + build XmmOpMask; + XmmResult = vexpandpd_avx512vl( XmmReg2_m128, XmmOpMask ); + ZmmReg1 = zext(XmmResult); } # VEXPANDPD 5-89 PAGE 1913 LINE 98663 -:VEXPANDPD YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x88; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VEXPANDPD YmmReg1 YmmOpMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x88; (YmmReg1 & ZmmReg1 & YmmOpMask) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vexpandpd_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmMask = YmmReg1; + build YmmOpMask; + YmmResult = vexpandpd_avx512vl( YmmReg2_m256, YmmOpMask ); + ZmmReg1 = zext(YmmResult); } # VEXPANDPD 5-89 PAGE 1913 LINE 98665 define pcodeop vexpandpd_avx512f ; -:VEXPANDPD ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x88; ZmmReg1 ... & ZmmReg2_m512 +:VEXPANDPD ZmmReg1 ZmmOpMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x88; (ZmmReg1 & ZmmOpMask) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vexpandpd_avx512f( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask; + ZmmResult = vexpandpd_avx512f( ZmmReg2_m512, ZmmOpMask ); + ZmmReg1 = ZmmResult; } # VEXPANDPS 5-91 PAGE 1915 LINE 98748 define pcodeop vexpandps_avx512vl ; -:VEXPANDPS XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x88; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VEXPANDPS XmmReg1 XmmOpMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x88; (XmmReg1 & ZmmReg1 & XmmOpMask) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vexpandps_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmMask = XmmReg1; + build XmmOpMask; + XmmResult = vexpandps_avx512vl( XmmReg2_m128, XmmOpMask ); + ZmmReg1 = zext(XmmResult); } # VEXPANDPS 5-91 PAGE 1915 LINE 98750 -:VEXPANDPS YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x88; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VEXPANDPS YmmReg1 YmmOpMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x88; (YmmReg1 & ZmmReg1 & YmmOpMask) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vexpandps_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmMask = YmmReg1; + build YmmOpMask; + YmmResult = vexpandps_avx512vl( YmmReg2_m256 ); + ZmmReg1 = zext(YmmResult); } # VEXPANDPS 5-91 PAGE 1915 LINE 98752 define pcodeop vexpandps_avx512f ; -:VEXPANDPS ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x88; ZmmReg1 ... & ZmmReg2_m512 +:VEXPANDPS ZmmReg1 ZmmOpMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x88; (ZmmReg1 & ZmmOpMask) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vexpandps_avx512f( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask; + ZmmResult = vexpandps_avx512f( ZmmReg2_m512, ZmmOpMask ); + ZmmReg1 = ZmmResult; } # VEXP2PD 5-95 PAGE 1919 LINE 98936 define pcodeop vexp2pd_avx512er ; -:VEXP2PD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xC8; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VEXP2PD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0xC8; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vexp2pd_avx512er( ZmmReg1, ZmmReg2_m512_m64bcst ); + ZmmResult = vexp2pd_avx512er( ZmmReg1, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VEXP2PS 5-97 PAGE 1921 LINE 99019 define pcodeop vexp2ps_avx512er ; -:VEXP2PS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xC8; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VEXP2PS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0xC8; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vexp2ps_avx512er( ZmmReg1, ZmmReg2_m512_m32bcst ); + ZmmResult = vexp2ps_avx512er( ZmmReg1, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VEXTRACTF128/VEXTRACTF32x4/VEXTRACTF64x2/VEXTRACTF32x8/VEXTRACTF64x4 5-99 PAGE 1923 LINE 99105 define pcodeop vextractf32x4_avx512vl ; -:VEXTRACTF32X4 XmmReg2_m128^KWriteMask, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x19; YmmReg1 ... & XmmReg2_m128; imm8 +:VEXTRACTF32X4 XmmReg2 XmmOpMask32, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & XmmOpMask32; byte=0x19; YmmReg1 & mod=3 & XmmReg2 & ZmmReg2; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - XmmReg2_m128 = vextractf32x4_avx512vl( YmmReg1, imm8:1 ); + XmmResult = vextractf32x4_avx512vl( YmmReg1, imm8:1 ); + XmmMask = XmmReg2; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult); } +:VEXTRACTF32X4 m128 XmmOpMask32, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & XmmOpMask32; byte=0x19; YmmReg1 ... & m128; imm8 +[ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) +{ + XmmResult = vextractf32x4_avx512vl( YmmReg1, imm8:1 ); + XmmMask = m128; + build XmmOpMask32; + m128 = XmmResult; +} + + # VEXTRACTF128/VEXTRACTF32x4/VEXTRACTF64x2/VEXTRACTF32x8/VEXTRACTF64x4 5-99 PAGE 1923 LINE 99108 define pcodeop vextractf32x4_avx512f ; -:VEXTRACTF32x4 XmmReg2_m128^KWriteMask, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x19; ZmmReg1 ... & XmmReg2_m128; imm8 +:VEXTRACTF32x4 XmmReg2 XmmOpMask32, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & XmmOpMask32; byte=0x19; ZmmReg1 & mod=3 & XmmReg2 & ZmmReg2; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - XmmReg2_m128 = vextractf32x4_avx512f( ZmmReg1, imm8:1 ); + XmmResult = vextractf32x4_avx512f( ZmmReg1, imm8:1 ); + XmmMask = XmmReg2; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult); +} + +:VEXTRACTF32x4 m128 XmmOpMask32, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & XmmOpMask32; byte=0x19; ZmmReg1 ... & m128; imm8 +[ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) +{ + XmmResult = vextractf32x4_avx512f( ZmmReg1, imm8:1 ); + XmmMask = m128; + build XmmOpMask32; + m128 = XmmResult; } # VEXTRACTF128/VEXTRACTF32x4/VEXTRACTF64x2/VEXTRACTF32x8/VEXTRACTF64x4 5-99 PAGE 1923 LINE 99111 define pcodeop vextractf64x2_avx512vl ; -:VEXTRACTF64X2 XmmReg2_m128^KWriteMask, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x19; YmmReg1 ... & XmmReg2_m128; imm8 +:VEXTRACTF64X2 XmmReg2 XmmOpMask64, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & XmmOpMask64; byte=0x19; YmmReg1 & mod=3 & XmmReg2 & ZmmReg2; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - XmmReg2_m128 = vextractf64x2_avx512vl( YmmReg1, imm8:1 ); + XmmResult = vextractf64x2_avx512vl( YmmReg1, imm8:1 ); + XmmMask = XmmReg2; + build XmmOpMask64; + ZmmReg2 = zext(XmmResult); } +:VEXTRACTF64X2 m128 XmmOpMask64, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & XmmOpMask64; byte=0x19; YmmReg1 ... & m128; imm8 +[ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) +{ + XmmResult = vextractf64x2_avx512vl( YmmReg1, imm8:1 ); + XmmMask = m128; + build XmmOpMask64; + m128 = XmmResult; +} + + # VEXTRACTF128/VEXTRACTF32x4/VEXTRACTF64x2/VEXTRACTF32x8/VEXTRACTF64x4 5-99 PAGE 1923 LINE 99114 define pcodeop vextractf64x2_avx512dq ; -:VEXTRACTF64X2 XmmReg2_m128^KWriteMask, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x19; ZmmReg1 ... & XmmReg2_m128; imm8 +:VEXTRACTF64X2 XmmReg2 XmmOpMask64, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & XmmOpMask64; byte=0x19; ZmmReg1 & mod=3 & XmmReg2 & ZmmReg2; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - XmmReg2_m128 = vextractf64x2_avx512dq( ZmmReg1, imm8:1 ); + XmmResult = vextractf64x2_avx512dq( ZmmReg1, imm8:1 ); + XmmMask = XmmReg2; + build XmmOpMask64; + ZmmReg2 = zext(XmmResult); +} + +:VEXTRACTF64X2 m128 XmmOpMask64, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & XmmOpMask64; byte=0x19; ZmmReg1 ... & m128; imm8 +[ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) +{ + XmmResult = vextractf64x2_avx512dq( ZmmReg1, imm8:1 ); + XmmMask = m128; + build XmmOpMask64; + m128 = XmmResult; } # VEXTRACTF128/VEXTRACTF32x4/VEXTRACTF64x2/VEXTRACTF32x8/VEXTRACTF64x4 5-99 PAGE 1923 LINE 99117 define pcodeop vextractf32x8_avx512dq ; -:VEXTRACTF32X8 YmmReg2_m256^KWriteMask, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x1B; ZmmReg1 ... & YmmReg2_m256; imm8 +:VEXTRACTF32X8 YmmReg2 YmmOpMask32, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & YmmOpMask32; byte=0x1B; ZmmReg1 & mod=3 & YmmReg2 & ZmmReg2; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - YmmReg2_m256 = vextractf32x8_avx512dq( ZmmReg1, imm8:1 ); + YmmResult = vextractf32x8_avx512dq( ZmmReg1, imm8:1 ); + YmmMask = YmmReg2; + build YmmOpMask32; + ZmmReg2 = zext(YmmResult); +} + +:VEXTRACTF32X8 m256 YmmOpMask32, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & YmmOpMask32; byte=0x1B; ZmmReg1 ... & m256; imm8 +[ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) +{ + YmmResult = vextractf32x8_avx512dq( ZmmReg1, imm8:1 ); + YmmMask = m256; + build YmmOpMask32; + m256 = YmmResult; } # VEXTRACTF128/VEXTRACTF32x4/VEXTRACTF64x2/VEXTRACTF32x8/VEXTRACTF64x4 5-99 PAGE 1923 LINE 99120 define pcodeop vextractf64x4_avx512f ; -:VEXTRACTF64x4 YmmReg2_m256^KWriteMask, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x1B; ZmmReg1 ... & YmmReg2_m256; imm8 +:VEXTRACTF64x4 YmmReg2 YmmOpMask64, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & YmmOpMask64; byte=0x1B; ZmmReg1 & mod=3 & YmmReg2 & ZmmReg2; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - YmmReg2_m256 = vextractf64x4_avx512f( ZmmReg1, imm8:1 ); + YmmResult = vextractf64x4_avx512f( ZmmReg1, imm8:1 ); + YmmMask = YmmReg2; + build YmmOpMask64; + ZmmReg2 = zext(YmmResult); +} + +:VEXTRACTF64x4 m256 YmmOpMask64, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & YmmOpMask64; byte=0x1B; ZmmReg1 ... & m256; imm8 +[ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) +{ + YmmResult = vextractf64x4_avx512f( ZmmReg1, imm8:1 ); + YmmMask = m256; + build YmmOpMask64; + m256 = YmmResult; } # VEXTRACTI128/VEXTRACTI32x4/VEXTRACTI64x2/VEXTRACTI32x8/VEXTRACTI64x4 5-106 PAGE 1930 LINE 99435 define pcodeop vextracti32x4_avx512vl ; -:VEXTRACTI32X4 XmmReg2_m128^KWriteMask, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x39; YmmReg1 ... & XmmReg2_m128; imm8 +:VEXTRACTI32X4 XmmReg2 XmmOpMask32, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & XmmOpMask32; byte=0x39; YmmReg1 & mod=3 & XmmReg2 & ZmmReg2; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - XmmReg2_m128 = vextracti32x4_avx512vl( YmmReg1, imm8:1 ); + XmmResult = vextracti32x4_avx512vl( YmmReg1, imm8:1 ); + XmmMask = XmmReg2; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult); +} + +:VEXTRACTI32X4 m128 XmmOpMask32, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & XmmOpMask32; byte=0x39; YmmReg1 ... & m128; imm8 +[ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) +{ + XmmResult = vextracti32x4_avx512vl( YmmReg1, imm8:1 ); + XmmMask = m128; + build XmmOpMask32; + m128 = XmmResult; } # VEXTRACTI128/VEXTRACTI32x4/VEXTRACTI64x2/VEXTRACTI32x8/VEXTRACTI64x4 5-106 PAGE 1930 LINE 99438 define pcodeop vextracti32x4_avx512f ; -:VEXTRACTI32x4 XmmReg2_m128^KWriteMask, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x39; ZmmReg1 ... & XmmReg2_m128; imm8 +:VEXTRACTI32x4 XmmReg2 XmmOpMask32, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & XmmOpMask32; byte=0x39; ZmmReg1 & mod=3 & XmmReg2 & ZmmReg2; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - XmmReg2_m128 = vextracti32x4_avx512f( ZmmReg1, imm8:1 ); + XmmResult = vextracti32x4_avx512f( ZmmReg1, imm8:1 ); + XmmMask = XmmReg2; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult); +} + +:VEXTRACTI32x4 m128 XmmOpMask32, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & XmmOpMask32; byte=0x39; ZmmReg1 ... & m128; imm8 +[ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) +{ + XmmResult = vextracti32x4_avx512f( ZmmReg1, imm8:1 ); + XmmMask = m128; + build XmmOpMask32; + m128 = XmmResult; } # VEXTRACTI128/VEXTRACTI32x4/VEXTRACTI64x2/VEXTRACTI32x8/VEXTRACTI64x4 5-106 PAGE 1930 LINE 99441 define pcodeop vextracti64x2_avx512vl ; -:VEXTRACTI64X2 XmmReg2_m128^KWriteMask, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x39; YmmReg1 ... & XmmReg2_m128; imm8 +:VEXTRACTI64X2 XmmReg2 XmmOpMask64, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & XmmOpMask64; byte=0x39; YmmReg1 & mod=3 & XmmReg2 & ZmmReg2; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - XmmReg2_m128 = vextracti64x2_avx512vl( YmmReg1, imm8:1 ); + XmmResult = vextracti64x2_avx512vl( YmmReg1, imm8:1 ); + XmmMask = XmmReg2; + build XmmOpMask64; + ZmmReg2 = zext(XmmResult); +} + +:VEXTRACTI64X2 m128 XmmOpMask64, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & XmmOpMask64; byte=0x39; YmmReg1 ... & m128; imm8 +[ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) +{ + XmmResult = vextracti64x2_avx512vl( YmmReg1, imm8:1 ); + XmmMask = m128; + build XmmOpMask64; + m128 = XmmResult; } # VEXTRACTI128/VEXTRACTI32x4/VEXTRACTI64x2/VEXTRACTI32x8/VEXTRACTI64x4 5-106 PAGE 1930 LINE 99444 define pcodeop vextracti64x2_avx512dq ; -:VEXTRACTI64X2 XmmReg2_m128^KWriteMask, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x39; ZmmReg1 ... & XmmReg2_m128; imm8 +:VEXTRACTI64X2 XmmReg2 XmmOpMask64, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & XmmOpMask64; byte=0x39; ZmmReg1 & mod=3 & XmmReg2 & ZmmReg2; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - XmmReg2_m128 = vextracti64x2_avx512dq( ZmmReg1, imm8:1 ); + XmmResult = vextracti64x2_avx512dq( ZmmReg1, imm8:1 ); + XmmMask = XmmReg2; + build XmmOpMask64; + ZmmReg2 = zext(XmmResult); +} + +:VEXTRACTI64X2 m128 XmmOpMask64, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & XmmOpMask64; byte=0x39; ZmmReg1 ... & m128; imm8 +[ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) +{ + XmmResult = vextracti64x2_avx512dq( ZmmReg1, imm8:1 ); + XmmMask = m128; + build XmmOpMask64; + m128 = XmmResult; } # VEXTRACTI128/VEXTRACTI32x4/VEXTRACTI64x2/VEXTRACTI32x8/VEXTRACTI64x4 5-106 PAGE 1930 LINE 99447 define pcodeop vextracti32x8_avx512dq ; -:VEXTRACTI32X8 YmmReg2_m256^KWriteMask, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x3B; ZmmReg1 ... & YmmReg2_m256; imm8 +:VEXTRACTI32X8 YmmReg2 YmmOpMask32, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & YmmOpMask32; byte=0x3B; ZmmReg1 & mod=3 & YmmReg2 & ZmmReg2; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - YmmReg2_m256 = vextracti32x8_avx512dq( ZmmReg1, imm8:1 ); + YmmResult = vextracti32x8_avx512dq( ZmmReg1, imm8:1 ); + YmmMask = YmmReg2; + build YmmOpMask32; + ZmmReg2 = zext(YmmResult); +} + +:VEXTRACTI32X8 m256 YmmOpMask32, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & YmmOpMask32; byte=0x3B; ZmmReg1 ... & m256; imm8 +[ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) +{ + YmmResult = vextracti32x8_avx512dq( ZmmReg1, imm8:1 ); + YmmMask = m256; + build YmmOpMask32; + m256 = YmmResult; } # VEXTRACTI128/VEXTRACTI32x4/VEXTRACTI64x2/VEXTRACTI32x8/VEXTRACTI64x4 5-106 PAGE 1930 LINE 99450 define pcodeop vextracti64x4_avx512f ; -:VEXTRACTI64x4 YmmReg2_m256^KWriteMask, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x3B; ZmmReg1 ... & YmmReg2_m256; imm8 +:VEXTRACTI64x4 YmmReg2 YmmOpMask64, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & YmmOpMask64; byte=0x3B; ZmmReg1 & mod=3 & YmmReg2 & ZmmReg2; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - YmmReg2_m256 = vextracti64x4_avx512f( ZmmReg1, imm8:1 ); + YmmResult = vextracti64x4_avx512f( ZmmReg1, imm8:1 ); + YmmMask = YmmReg2; + build YmmOpMask64; + ZmmReg2 = zext(YmmResult); } +:VEXTRACTI64x4 m256 YmmOpMask64, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & YmmOpMask64; byte=0x3B; ZmmReg1 ... & m256; imm8 +[ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) +{ + YmmResult = vextracti64x4_avx512f( ZmmReg1, imm8:1 ); + YmmMask = m256; + build YmmOpMask64; + m256 = YmmResult; +} # VFIXUPIMMPD 5-112 PAGE 1936 LINE 99754 define pcodeop vfixupimmpd_avx512vl ; -:VFIXUPIMMPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x54; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst; imm8 +:VFIXUPIMMPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x54; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfixupimmpd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vfixupimmpd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFIXUPIMMPD 5-112 PAGE 1936 LINE 99757 -:VFIXUPIMMPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x54; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst; imm8 +:VFIXUPIMMPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x54; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfixupimmpd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vfixupimmpd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFIXUPIMMPD 5-112 PAGE 1936 LINE 99760 define pcodeop vfixupimmpd_avx512f ; -:VFIXUPIMMPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x54; ZmmReg1 ... & ZmmReg2_m512_m64bcst; imm8 +:VFIXUPIMMPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & evexV5_ZmmReg; byte=0x54; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfixupimmpd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vfixupimmpd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFIXUPIMMPS 5-116 PAGE 1940 LINE 99957 define pcodeop vfixupimmps_avx512vl ; -:VFIXUPIMMPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x54; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst; imm8 +:VFIXUPIMMPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x54; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfixupimmps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vfixupimmps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFIXUPIMMPS 5-116 PAGE 1940 LINE 99960 -:VFIXUPIMMPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x54; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst; imm8 +:VFIXUPIMMPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x54; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfixupimmps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vfixupimmps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFIXUPIMMPS 5-116 PAGE 1940 LINE 99963 define pcodeop vfixupimmps_avx512f ; -:VFIXUPIMMPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x54; ZmmReg1 ... & ZmmReg2_m512_m32bcst; imm8 +:VFIXUPIMMPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & evexV5_ZmmReg; byte=0x54; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfixupimmps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vfixupimmps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFIXUPIMMSD 5-120 PAGE 1944 LINE 100159 define pcodeop vfixupimmsd_avx512f ; -:VFIXUPIMMSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64, imm8 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x55; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64; imm8 +:VFIXUPIMMSD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64, imm8 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x55; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64; imm8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfixupimmsd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vfixupimmsd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFIXUPIMMSS 5-123 PAGE 1947 LINE 100331 define pcodeop vfixupimmss_avx512f ; -:VFIXUPIMMSS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32, imm8 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x55; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32; imm8 +:VFIXUPIMMSS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32, imm8 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x55; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32; imm8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfixupimmss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vfixupimmss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMADD132PD/VFMADD213PD/VFMADD231PD 5-126 PAGE 1950 LINE 100523 define pcodeop vfmadd132pd_avx512vl ; -:VFMADD132PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x98; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFMADD132PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x98; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst { - local tmp:16 = vfmadd132pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmadd132pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMADD132PD/VFMADD213PD/VFMADD231PD 5-126 PAGE 1950 LINE 100526 define pcodeop vfmadd213pd_avx512vl ; -:VFMADD213PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xA8; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFMADD213PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xA8; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmadd213pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmadd213pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMADD132PD/VFMADD213PD/VFMADD231PD 5-126 PAGE 1950 LINE 100529 define pcodeop vfmadd231pd_avx512vl ; -:VFMADD231PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xB8; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFMADD231PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xB8; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmadd231pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmadd231pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMADD132PD/VFMADD213PD/VFMADD231PD 5-126 PAGE 1950 LINE 100532 -:VFMADD132PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x98; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFMADD132PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x98; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmadd132pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmadd132pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFMADD132PD/VFMADD213PD/VFMADD231PD 5-126 PAGE 1950 LINE 100535 -:VFMADD213PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xA8; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFMADD213PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xA8; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmadd213pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmadd213pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFMADD132PD/VFMADD213PD/VFMADD231PD 5-126 PAGE 1950 LINE 100538 -:VFMADD231PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xB8; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFMADD231PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xB8; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmadd231pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmadd231pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFMADD132PD/VFMADD213PD/VFMADD231PD 5-126 PAGE 1950 LINE 100541 define pcodeop vfmadd132pd_avx512f ; -:VFMADD132PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x98; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFMADD132PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x98; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmadd132pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfmadd132pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFMADD132PD/VFMADD213PD/VFMADD231PD 5-126 PAGE 1950 LINE 100544 define pcodeop vfmadd213pd_avx512f ; -:VFMADD213PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xA8; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFMADD213PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0xA8; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmadd213pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfmadd213pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFMADD132PD/VFMADD213PD/VFMADD231PD 5-126 PAGE 1950 LINE 100547 define pcodeop vfmadd231pd_avx512f ; -:VFMADD231PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xB8; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFMADD231PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0xB8; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmadd231pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfmadd231pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFMADD132PS/VFMADD213PS/VFMADD231PS 5-133 PAGE 1957 LINE 100884 define pcodeop vfmadd132ps_avx512vl ; -:VFMADD132PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x98; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFMADD132PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x98; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmadd132ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmadd132ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMADD132PS/VFMADD213PS/VFMADD231PS 5-133 PAGE 1957 LINE 100887 define pcodeop vfmadd213ps_avx512vl ; -:VFMADD213PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xA8; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFMADD213PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xA8; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmadd213ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmadd213ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMADD132PS/VFMADD213PS/VFMADD231PS 5-133 PAGE 1957 LINE 100890 define pcodeop vfmadd231ps_avx512vl ; -:VFMADD231PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xB8; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFMADD231PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xB8; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmadd231ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmadd231ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMADD132PS/VFMADD213PS/VFMADD231PS 5-133 PAGE 1957 LINE 100893 -:VFMADD132PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x98; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFMADD132PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x98; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmadd132ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmadd132ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFMADD132PS/VFMADD213PS/VFMADD231PS 5-133 PAGE 1957 LINE 100896 -:VFMADD213PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xA8; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFMADD213PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xA8; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmadd213ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmadd213ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFMADD132PS/VFMADD213PS/VFMADD231PS 5-133 PAGE 1957 LINE 100899 -:VFMADD231PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xB8; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFMADD231PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xB8; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmadd231ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmadd231ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFMADD132PS/VFMADD213PS/VFMADD231PS 5-133 PAGE 1957 LINE 100902 define pcodeop vfmadd132ps_avx512f ; -:VFMADD132PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x98; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFMADD132PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x98; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmadd132ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfmadd132ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFMADD132PS/VFMADD213PS/VFMADD231PS 5-133 PAGE 1957 LINE 100905 define pcodeop vfmadd213ps_avx512f ; -:VFMADD213PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xA8; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFMADD213PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0xA8; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmadd213ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfmadd213ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFMADD132PS/VFMADD213PS/VFMADD231PS 5-133 PAGE 1957 LINE 100908 define pcodeop vfmadd231ps_avx512f ; -:VFMADD231PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xB8; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFMADD231PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0xB8; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmadd231ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfmadd231ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFMADD132SD/VFMADD213SD/VFMADD231SD 5-140 PAGE 1964 LINE 101235 define pcodeop vfmadd132sd_avx512f ; -:VFMADD132SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x99; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VFMADD132SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x99; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfmadd132sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vfmadd132sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMADD132SD/VFMADD213SD/VFMADD231SD 5-140 PAGE 1964 LINE 101238 define pcodeop vfmadd213sd_avx512f ; -:VFMADD213SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xA9; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VFMADD213SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xA9; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfmadd213sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vfmadd213sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMADD132SD/VFMADD213SD/VFMADD231SD 5-140 PAGE 1964 LINE 101241 define pcodeop vfmadd231sd_avx512f ; -:VFMADD231SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xB9; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VFMADD231SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xB9; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfmadd231sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vfmadd231sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMADD132SS/VFMADD213SS/VFMADD231SS 5-143 PAGE 1967 LINE 101403 define pcodeop vfmadd132ss_avx512f ; -:VFMADD132SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x99; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VFMADD132SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x99; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfmadd132ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vfmadd132ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMADD132SS/VFMADD213SS/VFMADD231SS 5-143 PAGE 1967 LINE 101406 define pcodeop vfmadd213ss_avx512f ; -:VFMADD213SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xA9; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VFMADD213SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xA9; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfmadd213ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vfmadd213ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMADD132SS/VFMADD213SS/VFMADD231SS 5-143 PAGE 1967 LINE 101409 define pcodeop vfmadd231ss_avx512f ; -:VFMADD231SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xB9; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VFMADD231SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xB9; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfmadd231ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vfmadd231ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMADDSUB132PD/VFMADDSUB213PD/VFMADDSUB231PD 5-146 PAGE 1970 LINE 101585 define pcodeop vfmaddsub213pd_avx512vl ; -:VFMADDSUB213PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xA6; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFMADDSUB213PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xA6; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmaddsub213pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmaddsub213pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMADDSUB132PD/VFMADDSUB213PD/VFMADDSUB231PD 5-146 PAGE 1970 LINE 101589 define pcodeop vfmaddsub231pd_avx512vl ; -:VFMADDSUB231PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xB6; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFMADDSUB231PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xB6; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmaddsub231pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmaddsub231pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMADDSUB132PD/VFMADDSUB213PD/VFMADDSUB231PD 5-146 PAGE 1970 LINE 101593 define pcodeop vfmaddsub132pd_avx512vl ; -:VFMADDSUB132PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x96; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFMADDSUB132PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x96; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmaddsub132pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmaddsub132pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMADDSUB132PD/VFMADDSUB213PD/VFMADDSUB231PD 5-146 PAGE 1970 LINE 101597 -:VFMADDSUB213PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xA6; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFMADDSUB213PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xA6; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmaddsub213pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmaddsub213pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFMADDSUB132PD/VFMADDSUB213PD/VFMADDSUB231PD 5-146 PAGE 1970 LINE 101601 -:VFMADDSUB231PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xB6; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFMADDSUB231PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xB6; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmaddsub231pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmaddsub231pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFMADDSUB132PD/VFMADDSUB213PD/VFMADDSUB231PD 5-146 PAGE 1970 LINE 101605 -:VFMADDSUB132PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x96; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFMADDSUB132PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x96; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmaddsub132pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmaddsub132pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFMADDSUB132PD/VFMADDSUB213PD/VFMADDSUB231PD 5-147 PAGE 1971 LINE 101621 define pcodeop vfmaddsub213pd_avx512f ; -:VFMADDSUB213PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xA6; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFMADDSUB213PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0xA6; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmaddsub213pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfmaddsub213pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFMADDSUB132PD/VFMADDSUB213PD/VFMADDSUB231PD 5-147 PAGE 1971 LINE 101625 define pcodeop vfmaddsub231pd_avx512f ; -:VFMADDSUB231PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xB6; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFMADDSUB231PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0xB6; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmaddsub231pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfmaddsub231pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFMADDSUB132PD/VFMADDSUB213PD/VFMADDSUB231PD 5-147 PAGE 1971 LINE 101629 define pcodeop vfmaddsub132pd_avx512f ; -:VFMADDSUB132PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x96; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFMADDSUB132PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x96; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmaddsub132pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfmaddsub132pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFMADDSUB132PS/VFMADDSUB213PS/VFMADDSUB231PS 5-156 PAGE 1980 LINE 102024 define pcodeop vfmaddsub213ps_avx512vl ; -:VFMADDSUB213PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xA6; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFMADDSUB213PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xA6; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmaddsub213ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmaddsub213ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMADDSUB132PS/VFMADDSUB213PS/VFMADDSUB231PS 5-156 PAGE 1980 LINE 102028 define pcodeop vfmaddsub231ps_avx512vl ; -:VFMADDSUB231PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xB6; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFMADDSUB231PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xB6; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmaddsub231ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmaddsub231ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMADDSUB132PS/VFMADDSUB213PS/VFMADDSUB231PS 5-156 PAGE 1980 LINE 102031 define pcodeop vfmaddsub132ps_avx512vl ; -:VFMADDSUB132PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x96; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFMADDSUB132PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x96; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmaddsub132ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmaddsub132ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMADDSUB132PS/VFMADDSUB213PS/VFMADDSUB231PS 5-156 PAGE 1980 LINE 102034 -:VFMADDSUB213PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xA6; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFMADDSUB213PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xA6; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmaddsub213ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmaddsub213ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFMADDSUB132PS/VFMADDSUB213PS/VFMADDSUB231PS 5-156 PAGE 1980 LINE 102038 -:VFMADDSUB231PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xB6; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFMADDSUB231PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xB6; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmaddsub231ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmaddsub231ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFMADDSUB132PS/VFMADDSUB213PS/VFMADDSUB231PS 5-156 PAGE 1980 LINE 102041 -:VFMADDSUB132PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x96; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFMADDSUB132PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x96; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmaddsub132ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmaddsub132ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFMADDSUB132PS/VFMADDSUB213PS/VFMADDSUB231PS 5-156 PAGE 1980 LINE 102044 define pcodeop vfmaddsub213ps_avx512f ; -:VFMADDSUB213PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xA6; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFMADDSUB213PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0xA6; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmaddsub213ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfmaddsub213ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFMADDSUB132PS/VFMADDSUB213PS/VFMADDSUB231PS 5-156 PAGE 1980 LINE 102048 define pcodeop vfmaddsub231ps_avx512f ; -:VFMADDSUB231PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xB6; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFMADDSUB231PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0xB6; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmaddsub231ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfmaddsub231ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFMADDSUB132PS/VFMADDSUB213PS/VFMADDSUB231PS 5-156 PAGE 1980 LINE 102051 define pcodeop vfmaddsub132ps_avx512f ; -:VFMADDSUB132PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x96; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFMADDSUB132PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x96; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmaddsub132ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfmaddsub132ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFMSUBADD132PD/VFMSUBADD213PD/VFMSUBADD231PD 5-165 PAGE 1989 LINE 102454 define pcodeop vfmsubadd132pd_avx512vl ; -:VFMSUBADD132PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x97; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFMSUBADD132PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x97; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmsubadd132pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsubadd132pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMSUBADD132PD/VFMSUBADD213PD/VFMSUBADD231PD 5-165 PAGE 1989 LINE 102458 define pcodeop vfmsubadd213pd_avx512vl ; -:VFMSUBADD213PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xA7; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFMSUBADD213PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xA7; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmsubadd213pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsubadd213pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMSUBADD132PD/VFMSUBADD213PD/VFMSUBADD231PD 5-165 PAGE 1989 LINE 102462 define pcodeop vfmsubadd231pd_avx512vl ; -:VFMSUBADD231PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xB7; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFMSUBADD231PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xB7; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmsubadd231pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsubadd231pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMSUBADD132PD/VFMSUBADD213PD/VFMSUBADD231PD 5-165 PAGE 1989 LINE 102466 -:VFMSUBADD132PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x97; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFMSUBADD132PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x97; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmsubadd132pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmsubadd132pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFMSUBADD132PD/VFMSUBADD213PD/VFMSUBADD231PD 5-165 PAGE 1989 LINE 102470 -:VFMSUBADD213PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xA7; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFMSUBADD213PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xA7; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmsubadd213pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmsubadd213pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFMSUBADD132PD/VFMSUBADD213PD/VFMSUBADD231PD 5-165 PAGE 1989 LINE 102474 -:VFMSUBADD231PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xB7; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFMSUBADD231PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xB7; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmsubadd231pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmsubadd231pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFMSUBADD132PD/VFMSUBADD213PD/VFMSUBADD231PD 5-166 PAGE 1990 LINE 102490 define pcodeop vfmsubadd132pd_avx512f ; -:VFMSUBADD132PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x97; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFMSUBADD132PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x97; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmsubadd132pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfmsubadd132pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFMSUBADD132PD/VFMSUBADD213PD/VFMSUBADD231PD 5-166 PAGE 1990 LINE 102494 define pcodeop vfmsubadd213pd_avx512f ; -:VFMSUBADD213PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xA7; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFMSUBADD213PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0xA7; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmsubadd213pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfmsubadd213pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFMSUBADD132PD/VFMSUBADD213PD/VFMSUBADD231PD 5-166 PAGE 1990 LINE 102498 define pcodeop vfmsubadd231pd_avx512f ; -:VFMSUBADD231PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xB7; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFMSUBADD231PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0xB7; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmsubadd231pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfmsubadd231pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFMSUBADD132PS/VFMSUBADD213PS/VFMSUBADD231PS 5-175 PAGE 1999 LINE 102894 define pcodeop vfmsubadd132ps_avx512vl ; -:VFMSUBADD132PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x97; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFMSUBADD132PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x97; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmsubadd132ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsubadd132ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMSUBADD132PS/VFMSUBADD213PS/VFMSUBADD231PS 5-175 PAGE 1999 LINE 102897 define pcodeop vfmsubadd213ps_avx512vl ; -:VFMSUBADD213PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xA7; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFMSUBADD213PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xA7; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmsubadd213ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsubadd213ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMSUBADD132PS/VFMSUBADD213PS/VFMSUBADD231PS 5-175 PAGE 1999 LINE 102901 define pcodeop vfmsubadd231ps_avx512vl ; -:VFMSUBADD231PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xB7; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFMSUBADD231PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xB7; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmsubadd231ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsubadd231ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMSUBADD132PS/VFMSUBADD213PS/VFMSUBADD231PS 5-175 PAGE 1999 LINE 102904 -:VFMSUBADD132PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x97; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFMSUBADD132PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x97; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmsubadd132ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmsubadd132ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFMSUBADD132PS/VFMSUBADD213PS/VFMSUBADD231PS 5-175 PAGE 1999 LINE 102907 -:VFMSUBADD213PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xA7; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFMSUBADD213PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xA7; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmsubadd213ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmsubadd213ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFMSUBADD132PS/VFMSUBADD213PS/VFMSUBADD231PS 5-175 PAGE 1999 LINE 102911 -:VFMSUBADD231PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xB7; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFMSUBADD231PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xB7; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmsubadd231ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmsubadd231ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFMSUBADD132PS/VFMSUBADD213PS/VFMSUBADD231PS 5-175 PAGE 1999 LINE 102914 define pcodeop vfmsubadd132ps_avx512f ; -:VFMSUBADD132PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x97; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFMSUBADD132PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x97; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmsubadd132ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfmsubadd132ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFMSUBADD132PS/VFMSUBADD213PS/VFMSUBADD231PS 5-175 PAGE 1999 LINE 102917 define pcodeop vfmsubadd213ps_avx512f ; -:VFMSUBADD213PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xA7; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFMSUBADD213PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0xA7; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmsubadd213ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfmsubadd213ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFMSUBADD132PS/VFMSUBADD213PS/VFMSUBADD231PS 5-175 PAGE 1999 LINE 102921 define pcodeop vfmsubadd231ps_avx512f ; -:VFMSUBADD231PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xB7; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFMSUBADD231PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0xB7; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmsubadd231ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfmsubadd231ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFMSUB132PD/VFMSUB213PD/VFMSUB231PD 5-185 PAGE 2009 LINE 103332 define pcodeop vfmsub132pd_avx512vl ; -:VFMSUB132PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x9A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFMSUB132PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x9A; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmsub132pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsub132pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMSUB132PD/VFMSUB213PD/VFMSUB231PD 5-185 PAGE 2009 LINE 103335 define pcodeop vfmsub213pd_avx512vl ; -:VFMSUB213PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xAA; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFMSUB213PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xAA; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmsub213pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsub213pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMSUB132PD/VFMSUB213PD/VFMSUB231PD 5-185 PAGE 2009 LINE 103338 define pcodeop vfmsub231pd_avx512vl ; -:VFMSUB231PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xBA; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFMSUB231PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xBA; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmsub231pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsub231pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMSUB132PD/VFMSUB213PD/VFMSUB231PD 5-185 PAGE 2009 LINE 103341 -:VFMSUB132PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x9A; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFMSUB132PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x9A; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmsub132pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmsub132pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFMSUB132PD/VFMSUB213PD/VFMSUB231PD 5-185 PAGE 2009 LINE 103344 -:VFMSUB213PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xAA; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFMSUB213PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xAA; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmsub213pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmsub213pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFMSUB132PD/VFMSUB213PD/VFMSUB231PD 5-185 PAGE 2009 LINE 103347 -:VFMSUB231PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xBA; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFMSUB231PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xBA; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmsub231pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmsub231pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFMSUB132PD/VFMSUB213PD/VFMSUB231PD 5-185 PAGE 2009 LINE 103350 define pcodeop vfmsub132pd_avx512f ; -:VFMSUB132PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x9A; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFMSUB132PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x9A; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmsub132pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfmsub132pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFMSUB132PD/VFMSUB213PD/VFMSUB231PD 5-185 PAGE 2009 LINE 103353 define pcodeop vfmsub213pd_avx512f ; -:VFMSUB213PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xAA; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFMSUB213PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0xAA; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmsub213pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfmsub213pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFMSUB132PD/VFMSUB213PD/VFMSUB231PD 5-185 PAGE 2009 LINE 103356 define pcodeop vfmsub231pd_avx512f ; -:VFMSUB231PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xBA; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFMSUB231PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0xBA; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmsub231pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfmsub231pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFMSUB132PS/VFMSUB213PS/VFMSUB231PS 5-192 PAGE 2016 LINE 103692 define pcodeop vfmsub132ps_avx512vl ; -:VFMSUB132PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x9A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFMSUB132PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x9A; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmsub132ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsub132ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMSUB132PS/VFMSUB213PS/VFMSUB231PS 5-192 PAGE 2016 LINE 103695 define pcodeop vfmsub213ps_avx512vl ; -:VFMSUB213PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xAA; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFMSUB213PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xAA; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmsub213ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsub213ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMSUB132PS/VFMSUB213PS/VFMSUB231PS 5-192 PAGE 2016 LINE 103698 define pcodeop vfmsub231ps_avx512vl ; -:VFMSUB231PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xBA; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFMSUB231PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xBA; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfmsub231ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsub231ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMSUB132PS/VFMSUB213PS/VFMSUB231PS 5-192 PAGE 2016 LINE 103701 -:VFMSUB132PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x9A; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFMSUB132PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x9A; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmsub132ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmsub132ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFMSUB132PS/VFMSUB213PS/VFMSUB231PS 5-192 PAGE 2016 LINE 103704 -:VFMSUB213PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xAA; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFMSUB213PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xAA; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmsub213ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmsub213ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFMSUB132PS/VFMSUB213PS/VFMSUB231PS 5-192 PAGE 2016 LINE 103707 -:VFMSUB231PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xBA; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFMSUB231PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xBA; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfmsub231ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfmsub231ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFMSUB132PS/VFMSUB213PS/VFMSUB231PS 5-192 PAGE 2016 LINE 103710 define pcodeop vfmsub132ps_avx512f ; -:VFMSUB132PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x9A; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFMSUB132PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x9A; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmsub132ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfmsub132ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFMSUB132PS/VFMSUB213PS/VFMSUB231PS 5-192 PAGE 2016 LINE 103713 define pcodeop vfmsub213ps_avx512f ; -:VFMSUB213PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xAA; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFMSUB213PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0xAA; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmsub213ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfmsub213ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFMSUB132PS/VFMSUB213PS/VFMSUB231PS 5-192 PAGE 2016 LINE 103716 define pcodeop vfmsub231ps_avx512f ; -:VFMSUB231PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xBA; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFMSUB231PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0xBA; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfmsub231ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfmsub231ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFMSUB132SD/VFMSUB213SD/VFMSUB231SD 5-199 PAGE 2023 LINE 104042 define pcodeop vfmsub132sd_avx512f ; -:VFMSUB132SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x9B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VFMSUB132SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x9B; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfmsub132sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsub132sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMSUB132SD/VFMSUB213SD/VFMSUB231SD 5-199 PAGE 2023 LINE 104045 define pcodeop vfmsub213sd_avx512f ; -:VFMSUB213SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xAB; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VFMSUB213SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xAB; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfmsub213sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsub213sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMSUB132SD/VFMSUB213SD/VFMSUB231SD 5-199 PAGE 2023 LINE 104048 define pcodeop vfmsub231sd_avx512f ; -:VFMSUB231SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xBB; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VFMSUB231SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xBB; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfmsub231sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsub231sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFMSUB132SS/VFMSUB213SS/VFMSUB231SS 5-202 PAGE 2026 LINE 104217 define pcodeop vfmsub132ss_avx512f ; -:VFMSUB132SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x9B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VFMSUB132SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x9B; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfmsub132ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsub132ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMSUB132SS/VFMSUB213SS/VFMSUB231SS 5-202 PAGE 2026 LINE 104220 define pcodeop vfmsub213ss_avx512f ; -:VFMSUB213SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xAB; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VFMSUB213SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xAB; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfmsub213ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsub213ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFMSUB132SS/VFMSUB213SS/VFMSUB231SS 5-202 PAGE 2026 LINE 104223 define pcodeop vfmsub231ss_avx512f ; -:VFMSUB231SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xBB; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VFMSUB231SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xBB; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfmsub231ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vfmsub231ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFNMADD132PD/VFNMADD213PD/VFNMADD231PD 5-205 PAGE 2029 LINE 104401 define pcodeop vfnmadd132pd_avx512vl ; -:VFNMADD132PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x9C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFNMADD132PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x9C; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfnmadd132pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmadd132pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFNMADD132PD/VFNMADD213PD/VFNMADD231PD 5-205 PAGE 2029 LINE 104405 define pcodeop vfnmadd213pd_avx512vl ; -:VFNMADD213PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xAC; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFNMADD213PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xAC; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfnmadd213pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmadd213pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFNMADD132PD/VFNMADD213PD/VFNMADD231PD 5-205 PAGE 2029 LINE 104408 define pcodeop vfnmadd231pd_avx512vl ; -:VFNMADD231PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xBC; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFNMADD231PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xBC; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfnmadd231pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmadd231pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFNMADD132PD/VFNMADD213PD/VFNMADD231PD 5-205 PAGE 2029 LINE 104412 -:VFNMADD132PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x9C; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFNMADD132PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x9C; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfnmadd132pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfnmadd132pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFNMADD132PD/VFNMADD213PD/VFNMADD231PD 5-205 PAGE 2029 LINE 104416 -:VFNMADD213PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xAC; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFNMADD213PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xAC; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfnmadd213pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfnmadd213pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFNMADD132PD/VFNMADD213PD/VFNMADD231PD 5-205 PAGE 2029 LINE 104419 -:VFNMADD231PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xBC; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFNMADD231PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xBC; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfnmadd231pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfnmadd231pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFNMADD132PD/VFNMADD213PD/VFNMADD231PD 5-205 PAGE 2029 LINE 104423 define pcodeop vfnmadd132pd_avx512f ; -:VFNMADD132PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x9C; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFNMADD132PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x9C; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfnmadd132pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfnmadd132pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFNMADD132PD/VFNMADD213PD/VFNMADD231PD 5-205 PAGE 2029 LINE 104426 define pcodeop vfnmadd213pd_avx512f ; -:VFNMADD213PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xAC; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFNMADD213PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0xAC; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfnmadd213pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfnmadd213pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFNMADD132PD/VFNMADD213PD/VFNMADD231PD 5-205 PAGE 2029 LINE 104429 define pcodeop vfnmadd231pd_avx512f ; -:VFNMADD231PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xBC; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFNMADD231PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0xBC; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfnmadd231pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfnmadd231pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFNMADD132PS/VFNMADD213PS/VFNMADD231PS 5-212 PAGE 2036 LINE 104760 define pcodeop vfnmadd132ps_avx512vl ; -:VFNMADD132PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x9C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFNMADD132PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x9C; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfnmadd132ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmadd132ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFNMADD132PS/VFNMADD213PS/VFNMADD231PS 5-212 PAGE 2036 LINE 104763 define pcodeop vfnmadd213ps_avx512vl ; -:VFNMADD213PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xAC; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFNMADD213PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xAC; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfnmadd213ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmadd213ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFNMADD132PS/VFNMADD213PS/VFNMADD231PS 5-212 PAGE 2036 LINE 104766 define pcodeop vfnmadd231ps_avx512vl ; -:VFNMADD231PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xBC; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFNMADD231PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xBC; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfnmadd231ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmadd231ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFNMADD132PS/VFNMADD213PS/VFNMADD231PS 5-212 PAGE 2036 LINE 104769 -:VFNMADD132PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x9C; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFNMADD132PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x9C; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfnmadd132ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfnmadd132ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFNMADD132PS/VFNMADD213PS/VFNMADD231PS 5-212 PAGE 2036 LINE 104772 -:VFNMADD213PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xAC; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFNMADD213PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xAC; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfnmadd213ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfnmadd213ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFNMADD132PS/VFNMADD213PS/VFNMADD231PS 5-212 PAGE 2036 LINE 104775 -:VFNMADD231PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xBC; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFNMADD231PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xBC; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfnmadd231ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfnmadd231ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFNMADD132PS/VFNMADD213PS/VFNMADD231PS 5-212 PAGE 2036 LINE 104778 -:VFNMADD132PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x9C; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFNMADD132PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x9C; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfnmadd132ps_avx512vl( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfnmadd132ps_avx512vl( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFNMADD132PS/VFNMADD213PS/VFNMADD231PS 5-212 PAGE 2036 LINE 104781 define pcodeop vfnmadd213ps_avx512f ; -:VFNMADD213PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xAC; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFNMADD213PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0xAC; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfnmadd213ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfnmadd213ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFNMADD132PS/VFNMADD213PS/VFNMADD231PS 5-212 PAGE 2036 LINE 104784 define pcodeop vfnmadd231ps_avx512f ; -:VFNMADD231PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xBC; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFNMADD231PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0xBC; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfnmadd231ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfnmadd231ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFNMADD132SD/VFNMADD213SD/VFNMADD231SD 5-218 PAGE 2042 LINE 105098 define pcodeop vfnmadd132sd_avx512f ; -:VFNMADD132SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x9D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VFNMADD132SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x9D; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfnmadd132sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmadd132sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFNMADD132SD/VFNMADD213SD/VFNMADD231SD 5-218 PAGE 2042 LINE 105101 define pcodeop vfnmadd213sd_avx512f ; -:VFNMADD213SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xAD; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VFNMADD213SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xAD; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfnmadd213sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmadd213sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFNMADD132SD/VFNMADD213SD/VFNMADD231SD 5-218 PAGE 2042 LINE 105104 define pcodeop vfnmadd231sd_avx512f ; -:VFNMADD231SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xBD; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VFNMADD231SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xBD; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfnmadd231sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmadd231sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFNMADD132SS/VFNMADD213SS/VFNMADD231SS 5-221 PAGE 2045 LINE 105270 define pcodeop vfnmadd132ss_avx512f ; -:VFNMADD132SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x9D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VFNMADD132SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x9D; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfnmadd132ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmadd132ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFNMADD132SS/VFNMADD213SS/VFNMADD231SS 5-221 PAGE 2045 LINE 105273 define pcodeop vfnmadd213ss_avx512f ; -:VFNMADD213SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xAD; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VFNMADD213SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xAD; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfnmadd213ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmadd213ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFNMADD132SS/VFNMADD213SS/VFNMADD231SS 5-221 PAGE 2045 LINE 105276 define pcodeop vfnmadd231ss_avx512f ; -:VFNMADD231SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xBD; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VFNMADD231SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xBD; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfnmadd231ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmadd231ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFNMSUB132PD/VFNMSUB213PD/VFNMSUB231PD 5-224 PAGE 2048 LINE 105451 define pcodeop vfnmsub132pd_avx512vl ; -:VFNMSUB132PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x9E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFNMSUB132PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x9E; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfnmsub132pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmsub132pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFNMSUB132PD/VFNMSUB213PD/VFNMSUB231PD 5-224 PAGE 2048 LINE 105455 define pcodeop vfnmsub213pd_avx512vl ; -:VFNMSUB213PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xAE; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFNMSUB213PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xAE; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfnmsub213pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmsub213pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFNMSUB132PD/VFNMSUB213PD/VFNMSUB231PD 5-224 PAGE 2048 LINE 105458 define pcodeop vfnmsub231pd_avx512vl ; -:VFNMSUB231PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xBE; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VFNMSUB231PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xBE; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfnmsub231pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmsub231pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFNMSUB132PD/VFNMSUB213PD/VFNMSUB231PD 5-224 PAGE 2048 LINE 105462 -:VFNMSUB132PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x9E; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFNMSUB132PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x9E; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfnmsub132pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfnmsub132pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFNMSUB132PD/VFNMSUB213PD/VFNMSUB231PD 5-224 PAGE 2048 LINE 105466 -:VFNMSUB213PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xAE; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFNMSUB213PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xAE; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfnmsub213pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfnmsub213pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFNMSUB132PD/VFNMSUB213PD/VFNMSUB231PD 5-224 PAGE 2048 LINE 105469 -:VFNMSUB231PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0xBE; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VFNMSUB231PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0xBE; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfnmsub231pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfnmsub231pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VFNMSUB132PD/VFNMSUB213PD/VFNMSUB231PD 5-224 PAGE 2048 LINE 105473 define pcodeop vfnmsub132pd_avx512f ; -:VFNMSUB132PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x9E; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFNMSUB132PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x9E; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfnmsub132pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfnmsub132pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFNMSUB132PD/VFNMSUB213PD/VFNMSUB231PD 5-224 PAGE 2048 LINE 105476 define pcodeop vfnmsub213pd_avx512f ; -:VFNMSUB213PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xAE; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFNMSUB213PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0xAE; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfnmsub213pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfnmsub213pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFNMSUB132PD/VFNMSUB213PD/VFNMSUB231PD 5-224 PAGE 2048 LINE 105479 define pcodeop vfnmsub231pd_avx512f ; -:VFNMSUB231PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0xBE; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VFNMSUB231PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0xBE; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfnmsub231pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vfnmsub231pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VFNMSUB132PS/VFNMSUB213PS/VFNMSUB231PS 5-230 PAGE 2054 LINE 105800 define pcodeop vfnmsub132ps_avx512vl ; -:VFNMSUB132PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x9E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFNMSUB132PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x9E; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfnmsub132ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmsub132ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFNMSUB132PS/VFNMSUB213PS/VFNMSUB231PS 5-230 PAGE 2054 LINE 105803 define pcodeop vfnmsub213ps_avx512vl ; -:VFNMSUB213PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xAE; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFNMSUB213PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xAE; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfnmsub213ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmsub213ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFNMSUB132PS/VFNMSUB213PS/VFNMSUB231PS 5-230 PAGE 2054 LINE 105806 define pcodeop vfnmsub231ps_avx512vl ; -:VFNMSUB231PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xBE; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VFNMSUB231PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xBE; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vfnmsub231ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmsub231ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFNMSUB132PS/VFNMSUB213PS/VFNMSUB231PS 5-230 PAGE 2054 LINE 105809 -:VFNMSUB132PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x9E; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFNMSUB132PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x9E; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfnmsub132ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfnmsub132ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFNMSUB132PS/VFNMSUB213PS/VFNMSUB231PS 5-230 PAGE 2054 LINE 105812 -:VFNMSUB213PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xAE; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFNMSUB213PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xAE; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfnmsub213ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfnmsub213ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFNMSUB132PS/VFNMSUB213PS/VFNMSUB231PS 5-230 PAGE 2054 LINE 105815 -:VFNMSUB231PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0xBE; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VFNMSUB231PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0xBE; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vfnmsub231ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vfnmsub231ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VFNMSUB132PS/VFNMSUB213PS/VFNMSUB231PS 5-230 PAGE 2054 LINE 105818 define pcodeop vfnmsub132ps_avx512f ; -:VFNMSUB132PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x9E; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFNMSUB132PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x9E; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfnmsub132ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfnmsub132ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFNMSUB132PS/VFNMSUB213PS/VFNMSUB231PS 5-230 PAGE 2054 LINE 105821 define pcodeop vfnmsub213ps_avx512f ; -:VFNMSUB213PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xAE; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFNMSUB213PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0xAE; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfnmsub213ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfnmsub213ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFNMSUB132PS/VFNMSUB213PS/VFNMSUB231PS 5-230 PAGE 2054 LINE 105824 define pcodeop vfnmsub231ps_avx512f ; -:VFNMSUB231PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0xBE; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VFNMSUB231PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0xBE; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vfnmsub231ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vfnmsub231ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VFNMSUB132SD/VFNMSUB213SD/VFNMSUB231SD 5-236 PAGE 2060 LINE 106135 define pcodeop vfnmsub132sd_avx512f ; -:VFNMSUB132SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x9F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VFNMSUB132SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x9F; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfnmsub132sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmsub132sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFNMSUB132SD/VFNMSUB213SD/VFNMSUB231SD 5-236 PAGE 2060 LINE 106138 define pcodeop vfnmsub213sd_avx512f ; -:VFNMSUB213SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xAF; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VFNMSUB213SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xAF; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfnmsub213sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmsub213sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFNMSUB132SD/VFNMSUB213SD/VFNMSUB231SD 5-236 PAGE 2060 LINE 106141 define pcodeop vfnmsub231sd_avx512f ; -:VFNMSUB231SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xBF; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VFNMSUB231SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xBF; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfnmsub231sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmsub231sd_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VFNMSUB132SS/VFNMSUB213SS/VFNMSUB231SS 5-239 PAGE 2063 LINE 106307 define pcodeop vfnmsub132ss_avx512f ; -:VFNMSUB132SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x9F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VFNMSUB132SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x9F; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfnmsub132ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmsub132ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFNMSUB132SS/VFNMSUB213SS/VFNMSUB231SS 5-239 PAGE 2063 LINE 106310 define pcodeop vfnmsub213ss_avx512f ; -:VFNMSUB213SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xAF; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VFNMSUB213SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xAF; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfnmsub213ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmsub213ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFNMSUB132SS/VFNMSUB213SS/VFNMSUB231SS 5-239 PAGE 2063 LINE 106313 define pcodeop vfnmsub231ss_avx512f ; -:VFNMSUB231SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xBF; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VFNMSUB231SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xBF; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vfnmsub231ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vfnmsub231ss_avx512f( XmmReg1, vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VFPCLASSPD 5-242 PAGE 2066 LINE 106466 +# There is an error in the manual where the immediate byte is not specified in the operand encoding, but it is present define pcodeop vfpclasspd_avx512vl ; -:VFPCLASSPD KReg_reg^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x66; KReg_reg ... & XmmReg2_m128_m64bcst +:VFPCLASSPD KReg_reg AVXOpMask, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask; byte=0x66; KReg_reg ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vfpclasspd_avx512vl( XmmReg2_m128_m64bcst ); + KReg_reg = vfpclasspd_avx512vl( XmmReg2_m128_m64bcst, AVXOpMask, imm8:1 ); } # VFPCLASSPD 5-242 PAGE 2066 LINE 106470 -:VFPCLASSPD KReg_reg^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x66; KReg_reg ... & YmmReg2_m256_m64bcst +:VFPCLASSPD KReg_reg AVXOpMask, YmmReg2_m256_m64bcst,imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask; byte=0x66; KReg_reg ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vfpclasspd_avx512vl( YmmReg2_m256_m64bcst ); + KReg_reg = vfpclasspd_avx512vl( YmmReg2_m256_m64bcst, AVXOpMask, imm8:1 ); } # VFPCLASSPD 5-242 PAGE 2066 LINE 106474 define pcodeop vfpclasspd_avx512dq ; -:VFPCLASSPD KReg_reg^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x66; KReg_reg ... & ZmmReg2_m512_m64bcst +:VFPCLASSPD KReg_reg AVXOpMask, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask; byte=0x66; KReg_reg ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vfpclasspd_avx512dq( ZmmReg2_m512_m64bcst ); + KReg_reg = vfpclasspd_avx512dq( ZmmReg2_m512_m64bcst, AVXOpMask, imm8:1 ); } # VFPCLASSPS 5-245 PAGE 2069 LINE 106608 +# There is an error in the manual where the immediate byte is not specified in the operand encoding, but it is present define pcodeop vfpclassps_avx512vl ; -:VFPCLASSPS KReg_reg^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x66; KReg_reg ... & XmmReg2_m128_m32bcst +:VFPCLASSPS KReg_reg AVXOpMask, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask; byte=0x66; KReg_reg ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vfpclassps_avx512vl( XmmReg2_m128_m32bcst ); + KReg_reg = vfpclassps_avx512vl( XmmReg2_m128_m32bcst, AVXOpMask, imm8:1 ); } # VFPCLASSPS 5-245 PAGE 2069 LINE 106612 -:VFPCLASSPS KReg_reg^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x66; KReg_reg ... & YmmReg2_m256_m32bcst +:VFPCLASSPS KReg_reg AVXOpMask, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask; byte=0x66; KReg_reg ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vfpclassps_avx512vl( YmmReg2_m256_m32bcst ); + KReg_reg = vfpclassps_avx512vl( YmmReg2_m256_m32bcst, AVXOpMask,imm8:1 ); } # VFPCLASSPS 5-245 PAGE 2069 LINE 106616 define pcodeop vfpclassps_avx512dq ; -:VFPCLASSPS KReg_reg^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x66; KReg_reg ... & ZmmReg2_m512_m32bcst +:VFPCLASSPS KReg_reg AVXOpMask, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask; byte=0x66; KReg_reg ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vfpclassps_avx512dq( ZmmReg2_m512_m32bcst ); + KReg_reg = vfpclassps_avx512dq( ZmmReg2_m512_m32bcst, AVXOpMask, imm8:1 ); } # VFPCLASSSD 5-247 PAGE 2071 LINE 106722 define pcodeop vfpclasssd_avx512dq ; -:VFPCLASSSD KReg_reg^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x67; KReg_reg ... & XmmReg2_m64 +:VFPCLASSSD KReg_reg AVXOpMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask; byte=0x67; KReg_reg ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - KReg_reg = vfpclasssd_avx512dq( XmmReg2_m64 ); + KReg_reg = vfpclasssd_avx512dq( XmmReg2_m64, AVXOpMask ); } # VFPCLASSSS 5-249 PAGE 2073 LINE 106810 define pcodeop vfpclassss_avx512dq ; -:VFPCLASSSS KReg_reg^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x67; KReg_reg ... & XmmReg2_m32 +:VFPCLASSSS KReg_reg AVXOpMask, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask; byte=0x67; KReg_reg ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - KReg_reg = vfpclassss_avx512dq( XmmReg2_m32 ); + KReg_reg = vfpclassss_avx512dq( XmmReg2_m32, AVXOpMask ); } # VGATHERDPS/VGATHERDPD 5-261 PAGE 2085 LINE 107357 # WARNING: did not recognize qualifier /vsib for "VGATHERDPS xmm1 {k1}, vm32x" define pcodeop vgatherdps_avx512vl ; -:VGATHERDPS XmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x92; (XmmReg1 & ZmmReg1) ... & m32 +:VGATHERDPS XmmReg1 XmmOpMask32, m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x92; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vgatherdps_avx512vl( m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vgatherdps_avx512vl( m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VGATHERDPS/VGATHERDPD 5-261 PAGE 2085 LINE 107359 # WARNING: did not recognize qualifier /vsib for "VGATHERDPS ymm1 {k1}, vm32y" -:VGATHERDPS YmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x92; (YmmReg1 & ZmmReg1) ... & m32 +:VGATHERDPS YmmReg1 YmmOpMask32, m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x92; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vgatherdps_avx512vl( m32 ); - ZmmReg1 = zext(tmp); + YmmResult = vgatherdps_avx512vl( m32 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VGATHERDPS/VGATHERDPD 5-261 PAGE 2085 LINE 107361 # WARNING: did not recognize qualifier /vsib for "VGATHERDPS zmm1 {k1}, vm32z" define pcodeop vgatherdps_avx512f ; -:VGATHERDPS ZmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x92; ZmmReg1 ... & m32 +:VGATHERDPS ZmmReg1 ZmmOpMask32, m32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x92; (ZmmReg1 & ZmmOpMask32) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vgatherdps_avx512f( m32 ); + ZmmResult = vgatherdps_avx512f( m32 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VGATHERDPS/VGATHERDPD 5-261 PAGE 2085 LINE 107363 # WARNING: did not recognize qualifier /vsib for "VGATHERDPD xmm1 {k1}, vm32x" define pcodeop vgatherdpd_avx512vl ; -:VGATHERDPD XmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x92; (XmmReg1 & ZmmReg1) ... & m32 +:VGATHERDPD XmmReg1 XmmOpMask64, m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x92; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vgatherdpd_avx512vl( m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vgatherdpd_avx512vl( m32 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VGATHERDPS/VGATHERDPD 5-261 PAGE 2085 LINE 107366 # WARNING: did not recognize qualifier /vsib for "VGATHERDPD ymm1 {k1}, vm32x" -:VGATHERDPD YmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x92; (YmmReg1 & ZmmReg1) ... & m32 +:VGATHERDPD YmmReg1 YmmOpMask64, m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x92; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vgatherdpd_avx512vl( m32 ); - ZmmReg1 = zext(tmp); + YmmResult = vgatherdpd_avx512vl( m32 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VGATHERDPS/VGATHERDPD 5-261 PAGE 2085 LINE 107369 # WARNING: did not recognize qualifier /vsib for "VGATHERDPD zmm1 {k1}, vm32y" define pcodeop vgatherdpd_avx512f ; -:VGATHERDPD ZmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x92; ZmmReg1 ... & m32 +:VGATHERDPD ZmmReg1 ZmmOpMask64, m32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x92; (ZmmReg1 & ZmmOpMask64) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vgatherdpd_avx512f( m32 ); + ZmmResult = vgatherdpd_avx512f( m32 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VGATHERPF0DPS/VGATHERPF0QPS/VGATHERPF0DPD/VGATHERPF0QPD 5-264 PAGE 2088 LINE 107497 # WARNING: did not recognize qualifier /vsib for "VGATHERPF0DPS vm32z {k1}" define pcodeop vgatherpf0dps_avx512pf ; -:VGATHERPF0DPS m32 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xC6; reg_opcode=1 ... & m32 +:VGATHERPF0DPS m32 XmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask; byte=0xC6; reg_opcode=1 ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vgatherpf0dps_avx512pf( m32 ); + vgatherpf0dps_avx512pf( m32, XmmOpMask ); # TODO missing destination or side effects } # VGATHERPF0DPS/VGATHERPF0QPS/VGATHERPF0DPD/VGATHERPF0QPD 5-264 PAGE 2088 LINE 107500 # WARNING: did not recognize qualifier /vsib for "VGATHERPF0QPS vm64z {k1}" define pcodeop vgatherpf0qps_avx512pf ; -:VGATHERPF0QPS m64 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xC7; reg_opcode=1 ... & m64 +:VGATHERPF0QPS m64 XmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask; byte=0xC7; reg_opcode=1 ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vgatherpf0qps_avx512pf( m64 ); + vgatherpf0qps_avx512pf( m64, XmmOpMask ); # TODO missing destination or side effects } # VGATHERPF0DPS/VGATHERPF0QPS/VGATHERPF0DPD/VGATHERPF0QPD 5-264 PAGE 2088 LINE 107503 # WARNING: did not recognize qualifier /vsib for "VGATHERPF0DPD vm32y {k1}" define pcodeop vgatherpf0dpd_avx512pf ; -:VGATHERPF0DPD m32 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xC6; reg_opcode=1 ... & m32 +:VGATHERPF0DPD m32 XmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & XmmOpMask; byte=0xC6; reg_opcode=1 ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vgatherpf0dpd_avx512pf( m32 ); + vgatherpf0dpd_avx512pf( m32, XmmOpMask ); # TODO missing destination or side effects } # VGATHERPF0DPS/VGATHERPF0QPS/VGATHERPF0DPD/VGATHERPF0QPD 5-264 PAGE 2088 LINE 107506 # WARNING: did not recognize qualifier /vsib for "VGATHERPF0QPD vm64z {k1}" define pcodeop vgatherpf0qpd_avx512pf ; -:VGATHERPF0QPD m64 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xC7; reg_opcode=1 ... & m64 +:VGATHERPF0QPD m64 XmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & XmmOpMask; byte=0xC7; reg_opcode=1 ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vgatherpf0qpd_avx512pf( m64 ); + vgatherpf0qpd_avx512pf( m64, XmmOpMask ); # TODO missing destination or side effects } # VGATHERPF1DPS/VGATHERPF1QPS/VGATHERPF1DPD/VGATHERPF1QPD 5-267 PAGE 2091 LINE 107620 # WARNING: did not recognize qualifier /vsib for "VGATHERPF1DPS vm32z {k1}" define pcodeop vgatherpf1dps_avx512pf ; -:VGATHERPF1DPS m32 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xC6; reg_opcode=2 ... & m32 +:VGATHERPF1DPS m32 XmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask; byte=0xC6; reg_opcode=2 ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vgatherpf1dps_avx512pf( m32 ); + vgatherpf1dps_avx512pf( m32 , XmmOpMask); # TODO missing destination or side effects } # VGATHERPF1DPS/VGATHERPF1QPS/VGATHERPF1DPD/VGATHERPF1QPD 5-267 PAGE 2091 LINE 107623 # WARNING: did not recognize qualifier /vsib for "VGATHERPF1QPS vm64z {k1}" define pcodeop vgatherpf1qps_avx512pf ; -:VGATHERPF1QPS m64 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xC7; reg_opcode=2 ... & m64 +:VGATHERPF1QPS m64 XmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask; byte=0xC7; reg_opcode=2 ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vgatherpf1qps_avx512pf( m64 ); + vgatherpf1qps_avx512pf( m64, XmmOpMask ); # TODO missing destination or side effects } # VGATHERPF1DPS/VGATHERPF1QPS/VGATHERPF1DPD/VGATHERPF1QPD 5-267 PAGE 2091 LINE 107626 # WARNING: did not recognize qualifier /vsib for "VGATHERPF1DPD vm32y {k1}" define pcodeop vgatherpf1dpd_avx512pf ; -:VGATHERPF1DPD m32 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xC6; reg_opcode=2 ... & m32 +:VGATHERPF1DPD m32 XmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & XmmOpMask; byte=0xC6; reg_opcode=2 ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vgatherpf1dpd_avx512pf( m32 ); + vgatherpf1dpd_avx512pf( m32, XmmOpMask ); # TODO missing destination or side effects } # VGATHERPF1DPS/VGATHERPF1QPS/VGATHERPF1DPD/VGATHERPF1QPD 5-267 PAGE 2091 LINE 107629 # WARNING: did not recognize qualifier /vsib for "VGATHERPF1QPD vm64z {k1}" define pcodeop vgatherpf1qpd_avx512pf ; -:VGATHERPF1QPD m64 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xC7; reg_opcode=2 ... & m64 +:VGATHERPF1QPD m64 XmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & XmmOpMask; byte=0xC7; reg_opcode=2 ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vgatherpf1qpd_avx512pf( m64 ); + vgatherpf1qpd_avx512pf( m64, XmmOpMask ); # TODO missing destination or side effects } # VGATHERQPS/VGATHERQPD 5-270 PAGE 2094 LINE 107742 # WARNING: did not recognize qualifier /vsib for "VGATHERQPS xmm1 {k1}, vm64x" define pcodeop vgatherqps_avx512vl ; -:VGATHERQPS XmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x93; (XmmReg1 & ZmmReg1) ... & m64 +:VGATHERQPS XmmReg1 XmmOpMask64, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x93; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vgatherqps_avx512vl( m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vgatherqps_avx512vl( m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VGATHERQPS/VGATHERQPD 5-270 PAGE 2094 LINE 107745 # WARNING: did not recognize qualifier /vsib for "VGATHERQPS xmm1 {k1}, vm64y" -:VGATHERQPS XmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x93; (XmmReg1 & ZmmReg1) ... & m64 +:VGATHERQPS XmmReg1 XmmOpMask64, m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x93; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vgatherqps_avx512vl( m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vgatherqps_avx512vl( m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VGATHERQPS/VGATHERQPD 5-270 PAGE 2094 LINE 107748 # WARNING: did not recognize qualifier /vsib for "VGATHERQPS ymm1 {k1}, vm64z" define pcodeop vgatherqps_avx512f ; -:VGATHERQPS YmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x93; (YmmReg1 & ZmmReg1) ... & m64 +:VGATHERQPS YmmReg1 YmmOpMask64, m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x93; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vgatherqps_avx512f( m64 ); - ZmmReg1 = zext(tmp); + YmmResult = vgatherqps_avx512f( m64 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VGATHERQPS/VGATHERQPD 5-270 PAGE 2094 LINE 107751 # WARNING: did not recognize qualifier /vsib for "VGATHERQPD xmm1 {k1}, vm64x" define pcodeop vgatherqpd_avx512vl ; -:VGATHERQPD XmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x93; (XmmReg1 & ZmmReg1) ... & m64 +:VGATHERQPD XmmReg1 XmmOpMask64, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x93; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vgatherqpd_avx512vl( m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vgatherqpd_avx512vl( m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VGATHERQPS/VGATHERQPD 5-270 PAGE 2094 LINE 107753 # WARNING: did not recognize qualifier /vsib for "VGATHERQPD ymm1 {k1}, vm64y" -:VGATHERQPD YmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x93; (YmmReg1 & ZmmReg1) ... & m64 +:VGATHERQPD YmmReg1 YmmOpMask64, m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x93; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vgatherqpd_avx512vl( m64 ); - ZmmReg1 = zext(tmp); + YmmResult = vgatherqpd_avx512vl( m64 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VGATHERQPS/VGATHERQPD 5-270 PAGE 2094 LINE 107755 # WARNING: did not recognize qualifier /vsib for "VGATHERQPD zmm1 {k1}, vm64z" define pcodeop vgatherqpd_avx512f ; -:VGATHERQPD ZmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x93; ZmmReg1 ... & m64 +:VGATHERQPD ZmmReg1 ZmmOpMask64, m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x93; (ZmmReg1 & ZmmOpMask64) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vgatherqpd_avx512f( m64 ); + ZmmResult = vgatherqpd_avx512f( m64 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPGATHERDD/VPGATHERDQ 5-277 PAGE 2101 LINE 108099 # WARNING: did not recognize qualifier /vsib for "VPGATHERDD xmm1 {k1}, vm32x" define pcodeop vpgatherdd_avx512vl ; -:VPGATHERDD XmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x90; (XmmReg1 & ZmmReg1) ... & m32 +:VPGATHERDD XmmReg1 XmmOpMask32, m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x90; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vpgatherdd_avx512vl( m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vpgatherdd_avx512vl( m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPGATHERDD/VPGATHERDQ 5-277 PAGE 2101 LINE 108101 # WARNING: did not recognize qualifier /vsib for "VPGATHERDD ymm1 {k1}, vm32y" -:VPGATHERDD YmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x90; (YmmReg1 & ZmmReg1) ... & m32 +:VPGATHERDD YmmReg1 YmmOpMask32, m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x90; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vpgatherdd_avx512vl( m32 ); - ZmmReg1 = zext(tmp); + YmmResult = vpgatherdd_avx512vl( m32 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPGATHERDD/VPGATHERDQ 5-277 PAGE 2101 LINE 108103 # WARNING: did not recognize qualifier /vsib for "VPGATHERDD zmm1 {k1}, vm32z" define pcodeop vpgatherdd_avx512f ; -:VPGATHERDD ZmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x90; ZmmReg1 ... & m32 +:VPGATHERDD ZmmReg1 ZmmOpMask32, m32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x90; (ZmmReg1 & ZmmOpMask32) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vpgatherdd_avx512f( m32 ); + ZmmResult = vpgatherdd_avx512f( m32 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPGATHERDD/VPGATHERDQ 5-277 PAGE 2101 LINE 108105 # WARNING: did not recognize qualifier /vsib for "VPGATHERDQ xmm1 {k1}, vm32x" define pcodeop vpgatherdq_avx512vl ; -:VPGATHERDQ XmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x90; (XmmReg1 & ZmmReg1) ... & m32 +:VPGATHERDQ XmmReg1 XmmOpMask64, m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x90; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vpgatherdq_avx512vl( m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vpgatherdq_avx512vl( m32 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPGATHERDD/VPGATHERDQ 5-277 PAGE 2101 LINE 108107 # WARNING: did not recognize qualifier /vsib for "VPGATHERDQ ymm1 {k1}, vm32x" -:VPGATHERDQ YmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x90; (YmmReg1 & ZmmReg1) ... & m32 +:VPGATHERDQ YmmReg1 YmmOpMask64, m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x90; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vpgatherdq_avx512vl( m32 ); - ZmmReg1 = zext(tmp); + YmmResult = vpgatherdq_avx512vl( m32 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPGATHERDD/VPGATHERDQ 5-277 PAGE 2101 LINE 108109 # WARNING: did not recognize qualifier /vsib for "VPGATHERDQ zmm1 {k1}, vm32y" define pcodeop vpgatherdq_avx512f ; -:VPGATHERDQ ZmmReg1^KWriteMask, m32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x90; ZmmReg1 ... & m32 +:VPGATHERDQ ZmmReg1 ZmmOpMask64, m32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x90; (ZmmReg1 & ZmmOpMask64) ... & m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vpgatherdq_avx512f( m32 ); + ZmmResult = vpgatherdq_avx512f( m32 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPGATHERQD/VPGATHERQQ 5-285 PAGE 2109 LINE 108457 # WARNING: did not recognize qualifier /vsib for "VPGATHERQD xmm1 {k1}, vm64x" define pcodeop vpgatherqd_avx512vl ; -:VPGATHERQD XmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x91; (XmmReg1 & ZmmReg1) ... & m64 +:VPGATHERQD XmmReg1 XmmOpMask32, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x91; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vpgatherqd_avx512vl( m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vpgatherqd_avx512vl( m64 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPGATHERQD/VPGATHERQQ 5-285 PAGE 2109 LINE 108459 # WARNING: did not recognize qualifier /vsib for "VPGATHERQD xmm1 {k1}, vm64y" -:VPGATHERQD XmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x91; (XmmReg1 & ZmmReg1) ... & m64 +:VPGATHERQD XmmReg1 XmmOpMask32, m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x91; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vpgatherqd_avx512vl( m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vpgatherqd_avx512vl( m64 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPGATHERQD/VPGATHERQQ 5-285 PAGE 2109 LINE 108461 # WARNING: did not recognize qualifier /vsib for "VPGATHERQD ymm1 {k1}, vm64z" define pcodeop vpgatherqd_avx512f ; -:VPGATHERQD YmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x91; (YmmReg1 & ZmmReg1) ... & m64 +:VPGATHERQD YmmReg1 YmmOpMask32, m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x91; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vpgatherqd_avx512f( m64 ); - ZmmReg1 = zext(tmp); + YmmResult = vpgatherqd_avx512f( m64 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPGATHERQD/VPGATHERQQ 5-285 PAGE 2109 LINE 108463 # WARNING: did not recognize qualifier /vsib for "VPGATHERQQ xmm1 {k1}, vm64x" define pcodeop vpgatherqq_avx512vl ; -:VPGATHERQQ XmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x91; (XmmReg1 & ZmmReg1) ... & m64 +:VPGATHERQQ XmmReg1 XmmOpMask64, m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x91; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vpgatherqq_avx512vl( m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vpgatherqq_avx512vl( m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPGATHERQD/VPGATHERQQ 5-285 PAGE 2109 LINE 108465 # WARNING: did not recognize qualifier /vsib for "VPGATHERQQ ymm1 {k1}, vm64y" -:VPGATHERQQ YmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x91; (YmmReg1 & ZmmReg1) ... & m64 +:VPGATHERQQ YmmReg1 YmmOpMask64, m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x91; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vpgatherqq_avx512vl( m64 ); - ZmmReg1 = zext(tmp); + YmmResult = vpgatherqq_avx512vl( m64 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPGATHERQD/VPGATHERQQ 5-285 PAGE 2109 LINE 108467 # WARNING: did not recognize qualifier /vsib for "VPGATHERQQ zmm1 {k1}, vm64z" define pcodeop vpgatherqq_avx512f ; -:VPGATHERQQ ZmmReg1^KWriteMask, m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x91; ZmmReg1 ... & m64 +:VPGATHERQQ ZmmReg1 ZmmOpMask64, m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x91; (ZmmReg1 & ZmmOpMask64) ... & m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vpgatherqq_avx512f( m64 ); + ZmmResult = vpgatherqq_avx512f( m64 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VGETEXPPD 5-288 PAGE 2112 LINE 108594 define pcodeop vgetexppd_avx512vl ; -:VGETEXPPD XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x42; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VGETEXPPD XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x42; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vgetexppd_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vgetexppd_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VGETEXPPD 5-288 PAGE 2112 LINE 108598 -:VGETEXPPD YmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x42; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VGETEXPPD YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x42; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vgetexppd_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vgetexppd_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VGETEXPPD 5-288 PAGE 2112 LINE 108602 define pcodeop vgetexppd_avx512f ; -:VGETEXPPD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x42; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VGETEXPPD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x42; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vgetexppd_avx512f( ZmmReg2_m512_m64bcst ); + ZmmResult = vgetexppd_avx512f( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VGETEXPPS 5-291 PAGE 2115 LINE 108760 define pcodeop vgetexpps_avx512vl ; -:VGETEXPPS XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x42; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VGETEXPPS XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x42; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vgetexpps_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vgetexpps_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VGETEXPPS 5-291 PAGE 2115 LINE 108764 -:VGETEXPPS YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x42; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VGETEXPPS YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x42; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vgetexpps_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vgetexpps_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VGETEXPPS 5-291 PAGE 2115 LINE 108768 define pcodeop vgetexpps_avx512f ; -:VGETEXPPS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x42; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VGETEXPPS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x42; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vgetexpps_avx512f( ZmmReg2_m512_m32bcst ); + ZmmResult = vgetexpps_avx512f( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VGETEXPSD 5-295 PAGE 2119 LINE 108959 define pcodeop vgetexpsd_avx512f ; -:VGETEXPSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x43; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VGETEXPSD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x43; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vgetexpsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vgetexpsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VGETEXPSS 5-297 PAGE 2121 LINE 109037 define pcodeop vgetexpss_avx512f ; -:VGETEXPSS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x43; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VGETEXPSS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x43; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vgetexpss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vgetexpss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VGETMANTPD 5-299 PAGE 2123 LINE 109120 define pcodeop vgetmantpd_avx512vl ; -:VGETMANTPD XmmReg1^KWriteMask, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x26; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst; imm8 +:VGETMANTPD XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x26; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:16 = vgetmantpd_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vgetmantpd_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VGETMANTPD 5-299 PAGE 2123 LINE 109125 -:VGETMANTPD YmmReg1^KWriteMask, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x26; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst; imm8 +:VGETMANTPD YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x26; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:32 = vgetmantpd_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vgetmantpd_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VGETMANTPD 5-299 PAGE 2123 LINE 109130 define pcodeop vgetmantpd_avx512f ; -:VGETMANTPD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x26; ZmmReg1 ... & ZmmReg2_m512_m64bcst; imm8 +:VGETMANTPD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x26; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - ZmmReg1 = vgetmantpd_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vgetmantpd_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VGETMANTPS 5-303 PAGE 2127 LINE 109339 define pcodeop vgetmantps_avx512vl ; -:VGETMANTPS XmmReg1^KWriteMask, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x26; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst; imm8 +:VGETMANTPS XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x26; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:16 = vgetmantps_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vgetmantps_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VGETMANTPS 5-303 PAGE 2127 LINE 109344 -:VGETMANTPS YmmReg1^KWriteMask, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x26; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst; imm8 +:VGETMANTPS YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x26; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - local tmp:32 = vgetmantps_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vgetmantps_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VGETMANTPS 5-303 PAGE 2127 LINE 109349 define pcodeop vgetmantps_avx512f ; -:VGETMANTPS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x26; ZmmReg1 ... & ZmmReg2_m512_m32bcst; imm8 +:VGETMANTPS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x26; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FVI) { - ZmmReg1 = vgetmantps_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vgetmantps_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VGETMANTSD 5-306 PAGE 2130 LINE 109519 define pcodeop vgetmantsd_avx512f ; -:VGETMANTSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x27; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VGETMANTSD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x27; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vgetmantsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vgetmantsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VGETMANTSS 5-308 PAGE 2132 LINE 109610 define pcodeop vgetmantss_avx512f ; -:VGETMANTSS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x27; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VGETMANTSS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x27; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vgetmantss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vgetmantss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VINSERTF128/VINSERTF32x4/VINSERTF64x2/VINSERTF32x8/VINSERTF64x4 5-310 PAGE 2134 LINE 109706 define pcodeop vinsertf32x4_avx512vl ; -:VINSERTF32X4 YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x18; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128; imm8 +:VINSERTF32X4 YmmReg1 YmmOpMask32, vexVVVV_YmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x18; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - local tmp:32 = vinsertf32x4_avx512vl( vexVVVV_YmmReg, XmmReg2_m128, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vinsertf32x4_avx512vl( vexVVVV_YmmReg, XmmReg2_m128, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VINSERTF128/VINSERTF32x4/VINSERTF64x2/VINSERTF32x8/VINSERTF64x4 5-310 PAGE 2134 LINE 109709 define pcodeop vinsertf32x4_avx512f ; -:VINSERTF32X4 ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x18; ZmmReg1 ... & XmmReg2_m128; imm8 +:VINSERTF32X4 ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & evexV5_ZmmReg; byte=0x18; (ZmmReg1 & ZmmOpMask32) ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - ZmmReg1 = vinsertf32x4_avx512f( evexV5_ZmmReg, XmmReg2_m128, imm8:1 ); + ZmmResult = vinsertf32x4_avx512f( evexV5_ZmmReg, XmmReg2_m128, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VINSERTF128/VINSERTF32x4/VINSERTF64x2/VINSERTF32x8/VINSERTF64x4 5-310 PAGE 2134 LINE 109712 define pcodeop vinsertf64x2_avx512vl ; -:VINSERTF64X2 YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x18; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128; imm8 +:VINSERTF64X2 YmmReg1 YmmOpMask64, vexVVVV_YmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x18; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - local tmp:32 = vinsertf64x2_avx512vl( vexVVVV_YmmReg, XmmReg2_m128, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vinsertf64x2_avx512vl( vexVVVV_YmmReg, XmmReg2_m128, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VINSERTF128/VINSERTF32x4/VINSERTF64x2/VINSERTF32x8/VINSERTF64x4 5-310 PAGE 2134 LINE 109715 define pcodeop vinsertf64x2_avx512dq ; -:VINSERTF64X2 ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x18; ZmmReg1 ... & XmmReg2_m128; imm8 +:VINSERTF64X2 ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & evexV5_ZmmReg; byte=0x18; (ZmmReg1 & ZmmOpMask64) ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - ZmmReg1 = vinsertf64x2_avx512dq( evexV5_ZmmReg, XmmReg2_m128, imm8:1 ); + ZmmResult = vinsertf64x2_avx512dq( evexV5_ZmmReg, XmmReg2_m128, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VINSERTF128/VINSERTF32x4/VINSERTF64x2/VINSERTF32x8/VINSERTF64x4 5-310 PAGE 2134 LINE 109718 define pcodeop vinsertf32x8_avx512dq ; -:VINSERTF32X8 ZmmReg1^KWriteMask, evexV5_ZmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x1A; ZmmReg1 ... & YmmReg2_m256; imm8 +:VINSERTF32X8 ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & evexV5_ZmmReg; byte=0x1A; (ZmmReg1 & ZmmOpMask32) ... & YmmReg2_m256; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - ZmmReg1 = vinsertf32x8_avx512dq( evexV5_ZmmReg, YmmReg2_m256, imm8:1 ); + ZmmResult = vinsertf32x8_avx512dq( evexV5_ZmmReg, YmmReg2_m256, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VINSERTF128/VINSERTF32x4/VINSERTF64x2/VINSERTF32x8/VINSERTF64x4 5-310 PAGE 2134 LINE 109721 define pcodeop vinsertf64x4_avx512f ; -:VINSERTF64X4 ZmmReg1^KWriteMask, evexV5_ZmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x1A; ZmmReg1 ... & YmmReg2_m256; imm8 +:VINSERTF64X4 ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & evexV5_ZmmReg; byte=0x1A; (ZmmReg1 & ZmmOpMask64) ... & YmmReg2_m256; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - ZmmReg1 = vinsertf64x4_avx512f( evexV5_ZmmReg, YmmReg2_m256, imm8:1 ); + ZmmResult = vinsertf64x4_avx512f( evexV5_ZmmReg, YmmReg2_m256, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VINSERTI128/VINSERTI32x4/VINSERTI64x2/VINSERTI32x8/VINSERTI64x4 5-314 PAGE 2138 LINE 109930 define pcodeop vinserti32x4_avx512vl ; -:VINSERTI32X4 YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x38; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128; imm8 +:VINSERTI32X4 YmmReg1 YmmOpMask32, vexVVVV_YmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x38; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - local tmp:32 = vinserti32x4_avx512vl( vexVVVV_YmmReg, XmmReg2_m128, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vinserti32x4_avx512vl( vexVVVV_YmmReg, XmmReg2_m128, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VINSERTI128/VINSERTI32x4/VINSERTI64x2/VINSERTI32x8/VINSERTI64x4 5-314 PAGE 2138 LINE 109933 define pcodeop vinserti32x4_avx512f ; -:VINSERTI32X4 ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x38; ZmmReg1 ... & XmmReg2_m128; imm8 +:VINSERTI32X4 ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & evexV5_ZmmReg; byte=0x38; (ZmmReg1 & ZmmOpMask32) ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - ZmmReg1 = vinserti32x4_avx512f( evexV5_ZmmReg, XmmReg2_m128, imm8:1 ); + ZmmResult = vinserti32x4_avx512f( evexV5_ZmmReg, XmmReg2_m128, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VINSERTI128/VINSERTI32x4/VINSERTI64x2/VINSERTI32x8/VINSERTI64x4 5-314 PAGE 2138 LINE 109936 define pcodeop vinserti64x2_avx512vl ; -:VINSERTI64X2 YmmReg1^KWriteMask, vexVVVV_YmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x38; (YmmReg1 & ZmmReg1) ... & XmmReg2_m128; imm8 +:VINSERTI64X2 YmmReg1 YmmOpMask64, vexVVVV_YmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x38; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - local tmp:32 = vinserti64x2_avx512vl( vexVVVV_YmmReg, XmmReg2_m128, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vinserti64x2_avx512vl( vexVVVV_YmmReg, XmmReg2_m128, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VINSERTI128/VINSERTI32x4/VINSERTI64x2/VINSERTI32x8/VINSERTI64x4 5-314 PAGE 2138 LINE 109939 define pcodeop vinserti64x2_avx512dq ; -:VINSERTI64X2 ZmmReg1^KWriteMask, evexV5_ZmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x38; ZmmReg1 ... & XmmReg2_m128; imm8 +:VINSERTI64X2 ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, XmmReg2_m128, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & evexV5_ZmmReg; byte=0x38; (ZmmReg1 & ZmmOpMask64) ... & XmmReg2_m128; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - ZmmReg1 = vinserti64x2_avx512dq( evexV5_ZmmReg, XmmReg2_m128, imm8:1 ); + ZmmResult = vinserti64x2_avx512dq( evexV5_ZmmReg, XmmReg2_m128, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VINSERTI128/VINSERTI32x4/VINSERTI64x2/VINSERTI32x8/VINSERTI64x4 5-314 PAGE 2138 LINE 109942 define pcodeop vinserti32x8_avx512dq ; -:VINSERTI32X8 ZmmReg1^KWriteMask, evexV5_ZmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x3A; ZmmReg1 ... & YmmReg2_m256; imm8 +:VINSERTI32X8 ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & evexV5_ZmmReg; byte=0x3A; (ZmmReg1 & ZmmOpMask32) ... & YmmReg2_m256; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - ZmmReg1 = vinserti32x8_avx512dq( evexV5_ZmmReg, YmmReg2_m256, imm8:1 ); + ZmmResult = vinserti32x8_avx512dq( evexV5_ZmmReg, YmmReg2_m256, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VINSERTI128/VINSERTI32x4/VINSERTI64x2/VINSERTI32x8/VINSERTI64x4 5-314 PAGE 2138 LINE 109945 define pcodeop vinserti64x4_avx512f ; -:VINSERTI64X4 ZmmReg1^KWriteMask, evexV5_ZmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x3A; ZmmReg1 ... & YmmReg2_m256; imm8 +:VINSERTI64X4 ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, YmmReg2_m256, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & evexV5_ZmmReg; byte=0x3A; (ZmmReg1 & ZmmOpMask64) ... & YmmReg2_m256; imm8 [ evexD8Type = 1; evexTType = 6; ] # (TupleType T2,T4,T8) { - ZmmReg1 = vinserti64x4_avx512f( evexV5_ZmmReg, YmmReg2_m256, imm8:1 ); + ZmmResult = vinserti64x4_avx512f( evexV5_ZmmReg, YmmReg2_m256, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPBLENDMB/VPBLENDMW 5-323 PAGE 2147 LINE 110393 define pcodeop vpblendmb_avx512vl ; -:VPBLENDMB XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x66; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPBLENDMB XmmReg1 XmmOpMask8, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x66; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpblendmb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpblendmb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # VPBLENDMB/VPBLENDMW 5-323 PAGE 2147 LINE 110396 -:VPBLENDMB YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x66; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPBLENDMB YmmReg1 YmmOpMask8, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x66; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpblendmb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpblendmb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # VPBLENDMB/VPBLENDMW 5-323 PAGE 2147 LINE 110399 define pcodeop vpblendmb_avx512bw ; -:VPBLENDMB ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x66; ZmmReg1 ... & ZmmReg2_m512 +:VPBLENDMB ZmmReg1 ZmmOpMask8, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x66; (ZmmReg1 & ZmmOpMask8) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpblendmb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpblendmb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # VPBLENDMB/VPBLENDMW 5-323 PAGE 2147 LINE 110402 define pcodeop vpblendmw_avx512vl ; -:VPBLENDMW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x66; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPBLENDMW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x66; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpblendmw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpblendmw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # VPBLENDMB/VPBLENDMW 5-323 PAGE 2147 LINE 110405 -:VPBLENDMW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x66; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPBLENDMW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x66; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpblendmw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpblendmw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # VPBLENDMB/VPBLENDMW 5-323 PAGE 2147 LINE 110408 define pcodeop vpblendmw_avx512bw ; -:VPBLENDMW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x66; ZmmReg1 ... & ZmmReg2_m512 +:VPBLENDMW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x66; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpblendmw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpblendmw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # VPBLENDMD/VPBLENDMQ 5-325 PAGE 2149 LINE 110495 define pcodeop vpblendmd_avx512vl ; -:VPBLENDMD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x64; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPBLENDMD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x64; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpblendmd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpblendmd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPBLENDMD/VPBLENDMQ 5-325 PAGE 2149 LINE 110498 -:VPBLENDMD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x64; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPBLENDMD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x64; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpblendmd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpblendmd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPBLENDMD/VPBLENDMQ 5-325 PAGE 2149 LINE 110501 define pcodeop vpblendmd_avx512f ; -:VPBLENDMD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x64; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPBLENDMD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x64; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpblendmd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpblendmd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPBLENDMD/VPBLENDMQ 5-325 PAGE 2149 LINE 110504 define pcodeop vpblendmq_avx512vl ; -:VPBLENDMQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x64; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPBLENDMQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x64; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpblendmq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpblendmq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPBLENDMD/VPBLENDMQ 5-325 PAGE 2149 LINE 110507 -:VPBLENDMQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x64; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPBLENDMQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x64; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpblendmq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpblendmq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPBLENDMD/VPBLENDMQ 5-325 PAGE 2149 LINE 110510 define pcodeop vpblendmq_avx512f ; -:VPBLENDMQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x64; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPBLENDMQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x64; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpblendmq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpblendmq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPBROADCASTB/W/D/Q 5-328 PAGE 2152 LINE 110617 # WARNING: did not recognize operand "reg" (encoding ModRM:r/m (r)) for "VPBROADCASTB xmm1 {k1}{z}, reg" +#TODO: fix define pcodeop vpbroadcastb_avx512vl ; -:VPBROADCASTB XmmReg1 KWriteMask is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x7A; (XmmReg1 & ZmmReg1) +:VPBROADCASTB XmmReg1 XmmOpMask8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x7A; (XmmReg1 & ZmmReg1 & XmmOpMask8) [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { local tmp:16 = vpbroadcastb_avx512vl( ); + build XmmOpMask8; ZmmReg1 = zext(tmp); } # VPBROADCASTB/W/D/Q 5-328 PAGE 2152 LINE 110619 # WARNING: did not recognize operand "reg" (encoding ModRM:r/m (r)) for "VPBROADCASTB ymm1 {k1}{z}, reg" -:VPBROADCASTB YmmReg1 KWriteMask is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x7A; (YmmReg1 & ZmmReg1) +:VPBROADCASTB YmmReg1 YmmOpMask8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x7A; (YmmReg1 & ZmmReg1 & YmmOpMask8) [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vpbroadcastb_avx512vl( ); - ZmmReg1 = zext(tmp); + YmmResult = vpbroadcastb_avx512vl( ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # VPBROADCASTB/W/D/Q 5-328 PAGE 2152 LINE 110621 # WARNING: did not recognize operand "reg" (encoding ModRM:r/m (r)) for "VPBROADCASTB zmm1 {k1}{z}, reg" define pcodeop vpbroadcastb_avx512bw ; -:VPBROADCASTB ZmmReg1 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x7A; ZmmReg1 +:VPBROADCASTB ZmmReg1 ZmmOpMask8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x7A; ZmmReg1 & ZmmOpMask8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vpbroadcastb_avx512bw( ); + ZmmResult = vpbroadcastb_avx512bw( ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # VPBROADCASTB/W/D/Q 5-328 PAGE 2152 LINE 110623 # WARNING: did not recognize operand "reg" (encoding ModRM:r/m (r)) for "VPBROADCASTW xmm1 {k1}{z}, reg" define pcodeop vpbroadcastw_avx512vl ; -:VPBROADCASTW XmmReg1 KWriteMask is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x7B; (XmmReg1 & ZmmReg1) +:VPBROADCASTW XmmReg1 XmmOpMask16 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x7B; (XmmReg1 & ZmmReg1 & XmmOpMask16) [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vpbroadcastw_avx512vl( ); - ZmmReg1 = zext(tmp); + XmmResult = vpbroadcastw_avx512vl( ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # VPBROADCASTB/W/D/Q 5-328 PAGE 2152 LINE 110625 # WARNING: did not recognize operand "reg" (encoding ModRM:r/m (r)) for "VPBROADCASTW ymm1 {k1}{z}, reg" -:VPBROADCASTW YmmReg1 KWriteMask is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x7B; (YmmReg1 & ZmmReg1) +:VPBROADCASTW YmmReg1 YmmOpMask16 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x7B; (YmmReg1 & ZmmReg1 & YmmOpMask16) [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vpbroadcastw_avx512vl( ); - ZmmReg1 = zext(tmp); + YmmResult = vpbroadcastw_avx512vl( ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # VPBROADCASTB/W/D/Q 5-328 PAGE 2152 LINE 110627 # WARNING: did not recognize operand "reg" (encoding ModRM:r/m (r)) for "VPBROADCASTW zmm1 {k1}{z}, reg" define pcodeop vpbroadcastw_avx512bw ; -:VPBROADCASTW ZmmReg1 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x7B; ZmmReg1 +:VPBROADCASTW ZmmReg1 ZmmOpMask16 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x7B; ZmmReg1 & ZmmOpMask16 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vpbroadcastw_avx512bw( ); + ZmmResult = vpbroadcastw_avx512bw( ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # VPBROADCASTB/W/D/Q 5-328 PAGE 2152 LINE 110629 # WARNING: did not recognize operand "r32" (encoding ModRM:r/m (r)) for "VPBROADCASTD xmm1 {k1}{z}, r32" define pcodeop vpbroadcastd_avx512vl ; -:VPBROADCASTD XmmReg1 KWriteMask is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x7C; (XmmReg1 & ZmmReg1) +:VPBROADCASTD XmmReg1 XmmOpMask32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x7C; (XmmReg1 & ZmmReg1 & XmmOpMask32) [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vpbroadcastd_avx512vl( ); - ZmmReg1 = zext(tmp); + XmmResult = vpbroadcastd_avx512vl( ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPBROADCASTB/W/D/Q 5-328 PAGE 2152 LINE 110631 # WARNING: did not recognize operand "r32" (encoding ModRM:r/m (r)) for "VPBROADCASTD ymm1 {k1}{z}, r32" -:VPBROADCASTD YmmReg1 KWriteMask is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x7C; (YmmReg1 & ZmmReg1) +:VPBROADCASTD YmmReg1 YmmOpMask32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x7C; (YmmReg1 & ZmmReg1 & YmmOpMask32) [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vpbroadcastd_avx512vl( ); - ZmmReg1 = zext(tmp); + YmmResult = vpbroadcastd_avx512vl( ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPBROADCASTB/W/D/Q 5-328 PAGE 2152 LINE 110633 # WARNING: did not recognize operand "r32" (encoding ModRM:r/m (r)) for "VPBROADCASTD zmm1 {k1}{z}, r32" define pcodeop vpbroadcastd_avx512f ; -:VPBROADCASTD ZmmReg1 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x7C; ZmmReg1 +:VPBROADCASTD ZmmReg1 ZmmOpMask32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x7C; ZmmReg1 & ZmmOpMask32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vpbroadcastd_avx512f( ); + ZmmResult = vpbroadcastd_avx512f( ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPBROADCASTB/W/D/Q 5-328 PAGE 2152 LINE 110635 # WARNING: did not recognize operand "r64" (encoding ModRM:r/m (r)) for "VPBROADCASTQ xmm1 {k1}{z}, r64" define pcodeop vpbroadcastq_avx512vl ; @ifdef IA64 -:VPBROADCASTQ XmmReg1 KWriteMask is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x7C; (XmmReg1 & ZmmReg1) +:VPBROADCASTQ XmmReg1 XmmOpMask64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1); byte=0x7C; (XmmReg1 & ZmmReg1 & XmmOpMask64) [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vpbroadcastq_avx512vl( ); - ZmmReg1 = zext(tmp); + XmmResult = vpbroadcastq_avx512vl( ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } @endif # VPBROADCASTB/W/D/Q 5-328 PAGE 2152 LINE 110637 # WARNING: did not recognize operand "r64" (encoding ModRM:r/m (r)) for "VPBROADCASTQ ymm1 {k1}{z}, r64" @ifdef IA64 -:VPBROADCASTQ YmmReg1 KWriteMask is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x7C; (YmmReg1 & ZmmReg1) +:VPBROADCASTQ YmmReg1 YmmOpMask64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1); byte=0x7C; (YmmReg1 & ZmmReg1 & YmmOpMask64) [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vpbroadcastq_avx512vl( ); - ZmmReg1 = zext(tmp); + YmmResult = vpbroadcastq_avx512vl( ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } @endif @@ -8361,914 +10272,1197 @@ define pcodeop vpbroadcastq_avx512vl ; # WARNING: did not recognize operand "r64" (encoding ModRM:r/m (r)) for "VPBROADCASTQ zmm1 {k1}{z}, r64" define pcodeop vpbroadcastq_avx512f ; @ifdef IA64 -:VPBROADCASTQ ZmmReg1 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x7C; ZmmReg1 +:VPBROADCASTQ ZmmReg1 ZmmOpMask64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1); byte=0x7C; ZmmReg1 & ZmmOpMask64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vpbroadcastq_avx512f( ); + ZmmResult = vpbroadcastq_avx512f( ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } @endif # VPBROADCAST 5-331 PAGE 2155 LINE 110780 -:VPBROADCASTB XmmReg1^KWriteMask, XmmReg2_m8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x78; (XmmReg1 & ZmmReg1) ... & XmmReg2_m8 +:VPBROADCASTB XmmReg1 XmmOpMask8, XmmReg2_m8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x78; (XmmReg1 & ZmmReg1 & XmmOpMask8) ... & XmmReg2_m8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:16 = vpbroadcastb_avx512vl( XmmReg2_m8 ); - ZmmReg1 = zext(tmp); + XmmResult = vpbroadcastb_avx512vl( XmmReg2_m8 ); + XmmMask = XmmReg1; + build XmmOpMask8; + ZmmReg1 = zext(XmmResult); } # VPBROADCAST 5-331 PAGE 2155 LINE 110782 -:VPBROADCASTB YmmReg1^KWriteMask, XmmReg2_m8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x78; (YmmReg1 & ZmmReg1) ... & XmmReg2_m8 +:VPBROADCASTB YmmReg1 YmmOpMask8, XmmReg2_m8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x78; (YmmReg1 & ZmmReg1 & YmmOpMask8) ... & XmmReg2_m8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:32 = vpbroadcastb_avx512vl( XmmReg2_m8 ); - ZmmReg1 = zext(tmp); + YmmResult = vpbroadcastb_avx512vl( XmmReg2_m8 ); + YmmMask = YmmReg1; + build YmmOpMask8; + ZmmReg1 = zext(YmmResult); } # VPBROADCAST 5-331 PAGE 2155 LINE 110784 -:VPBROADCASTB ZmmReg1^KWriteMask, XmmReg2_m8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x78; ZmmReg1 ... & XmmReg2_m8 +:VPBROADCASTB ZmmReg1 ZmmOpMask8, XmmReg2_m8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x78; (ZmmReg1 & ZmmOpMask8) ... & XmmReg2_m8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vpbroadcastb_avx512bw( XmmReg2_m8 ); + ZmmResult = vpbroadcastb_avx512bw( XmmReg2_m8 ); + ZmmMask = ZmmReg1; + build ZmmOpMask8; + ZmmReg1 = ZmmResult; } # VPBROADCAST 5-331 PAGE 2155 LINE 110791 -:VPBROADCASTW XmmReg1^KWriteMask, XmmReg2_m16 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x79; (XmmReg1 & ZmmReg1) ... & XmmReg2_m16 +:VPBROADCASTW XmmReg1 XmmOpMask16, XmmReg2_m16 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x79; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m16 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:16 = vpbroadcastw_avx512vl( XmmReg2_m16 ); - ZmmReg1 = zext(tmp); + XmmResult = vpbroadcastw_avx512vl( XmmReg2_m16 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # VPBROADCAST 5-331 PAGE 2155 LINE 110794 -:VPBROADCASTW YmmReg1^KWriteMask, XmmReg2_m16 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x79; (YmmReg1 & ZmmReg1) ... & XmmReg2_m16 +:VPBROADCASTW YmmReg1 YmmOpMask16, XmmReg2_m16 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x79; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & XmmReg2_m16 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:32 = vpbroadcastw_avx512vl( XmmReg2_m16 ); - ZmmReg1 = zext(tmp); + YmmResult = vpbroadcastw_avx512vl( XmmReg2_m16 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # VPBROADCAST 5-331 PAGE 2155 LINE 110797 -:VPBROADCASTW ZmmReg1^KWriteMask, XmmReg2_m16 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x79; ZmmReg1 ... & XmmReg2_m16 +:VPBROADCASTW ZmmReg1 ZmmOpMask16, XmmReg2_m16 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x79; (ZmmReg1 & ZmmOpMask16) ... & XmmReg2_m16 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vpbroadcastw_avx512bw( XmmReg2_m16 ); + ZmmResult = vpbroadcastw_avx512bw( XmmReg2_m16 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # VPBROADCAST 5-331 PAGE 2155 LINE 110804 -:VPBROADCASTD XmmReg1^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x58; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VPBROADCASTD XmmReg1 XmmOpMask32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x58; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:16 = vpbroadcastd_avx512vl( XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vpbroadcastd_avx512vl( XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPBROADCAST 5-331 PAGE 2155 LINE 110807 -:VPBROADCASTD YmmReg1^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x58; (YmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VPBROADCASTD YmmReg1 YmmOpMask32, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x58; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:32 = vpbroadcastd_avx512vl( XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + YmmResult = vpbroadcastd_avx512vl( XmmReg2_m32 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPBROADCAST 5-331 PAGE 2155 LINE 110810 -:VPBROADCASTD ZmmReg1^KWriteMask, XmmReg2_m32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x58; ZmmReg1 ... & XmmReg2_m32 +:VPBROADCASTD ZmmReg1 ZmmOpMask32, XmmReg2_m32 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x58; (ZmmReg1 & ZmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vpbroadcastd_avx512f( XmmReg2_m32 ); + ZmmResult = vpbroadcastd_avx512f( XmmReg2_m32 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPBROADCAST 5-331 PAGE 2155 LINE 110817 -:VPBROADCASTQ XmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x59; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VPBROADCASTQ XmmReg1 XmmOpMask64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x59; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:16 = vpbroadcastq_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vpbroadcastq_avx512vl( XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPBROADCAST 5-331 PAGE 2155 LINE 110819 -:VPBROADCASTQ YmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x59; (YmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VPBROADCASTQ YmmReg1 YmmOpMask64, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x59; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:32 = vpbroadcastq_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + YmmResult = vpbroadcastq_avx512vl( XmmReg2_m64 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPBROADCAST 5-331 PAGE 2155 LINE 110821 -:VPBROADCASTQ ZmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x59; ZmmReg1 ... & XmmReg2_m64 +:VPBROADCASTQ ZmmReg1 ZmmOpMask64, XmmReg2_m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x59; (ZmmReg1 & ZmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vpbroadcastq_avx512f( XmmReg2_m64 ); + ZmmResult = vpbroadcastq_avx512f( XmmReg2_m64 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPBROADCAST 5-331 PAGE 2155 LINE 110823 define pcodeop vbroadcasti32x2_avx512vl ; -:VBROADCASTI32x2 XmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x59; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VBROADCASTI32x2 XmmReg1 XmmOpMask32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x59; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:16 = vbroadcasti32x2_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vbroadcasti32x2_avx512vl( XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPBROADCAST 5-332 PAGE 2156 LINE 110837 -:VBROADCASTI32x2 YmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x59; (YmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VBROADCASTI32x2 YmmReg1 YmmOpMask32, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x59; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:32 = vbroadcasti32x2_avx512vl( XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + YmmResult = vbroadcasti32x2_avx512vl( XmmReg2_m64 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPBROADCAST 5-332 PAGE 2156 LINE 110840 define pcodeop vbroadcasti32x2_avx512dq ; -:VBROADCASTI32x2 ZmmReg1^KWriteMask, XmmReg2_m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x59; ZmmReg1 ... & XmmReg2_m64 +:VBROADCASTI32x2 ZmmReg1 ZmmOpMask32, XmmReg2_m64 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x59; (ZmmReg1 & ZmmOpMask32) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vbroadcasti32x2_avx512dq( XmmReg2_m64 ); + ZmmResult = vbroadcasti32x2_avx512dq( XmmReg2_m64 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPBROADCAST 5-332 PAGE 2156 LINE 110845 define pcodeop vbroadcasti32x4_avx512vl ; -:VBROADCASTI32X4 YmmReg1^KWriteMask, m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x5A; (YmmReg1 & ZmmReg1) ... & m128 +:VBROADCASTI32X4 YmmReg1 YmmOpMask32, m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x5A; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & m128 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:32 = vbroadcasti32x4_avx512vl( m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vbroadcasti32x4_avx512vl( m128 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPBROADCAST 5-332 PAGE 2156 LINE 110848 define pcodeop vbroadcasti32x4_avx512f ; -:VBROADCASTI32X4 ZmmReg1^KWriteMask, m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x5A; ZmmReg1 ... & m128 +:VBROADCASTI32X4 ZmmReg1 ZmmOpMask32, m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x5A; (ZmmReg1 & ZmmOpMask32) ... & m128 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vbroadcasti32x4_avx512f( m128 ); + ZmmResult = vbroadcasti32x4_avx512f( m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPBROADCAST 5-332 PAGE 2156 LINE 110851 define pcodeop vbroadcasti64x2_avx512vl ; -:VBROADCASTI64X2 YmmReg1^KWriteMask, m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x5A; (YmmReg1 & ZmmReg1) ... & m128 +:VBROADCASTI64X2 YmmReg1 YmmOpMask64, m128 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x5A; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & m128 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - local tmp:32 = vbroadcasti64x2_avx512vl( m128 ); - ZmmReg1 = zext(tmp); + YmmResult = vbroadcasti64x2_avx512vl( m128 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPBROADCAST 5-332 PAGE 2156 LINE 110854 define pcodeop vbroadcasti64x2_avx512dq ; -:VBROADCASTI64X2 ZmmReg1^KWriteMask, m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x5A; ZmmReg1 ... & m128 +:VBROADCASTI64X2 ZmmReg1 ZmmOpMask64, m128 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x5A; (ZmmReg1 & ZmmOpMask64) ... & m128 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vbroadcasti64x2_avx512dq( m128 ); + ZmmResult = vbroadcasti64x2_avx512dq( m128 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPBROADCAST 5-332 PAGE 2156 LINE 110857 define pcodeop vbroadcasti32x8_avx512dq ; -:VBROADCASTI32X8 ZmmReg1^KWriteMask, m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x5B; ZmmReg1 ... & m256 +:VBROADCASTI32X8 ZmmReg1 ZmmOpMask32, m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x5B; (ZmmReg1 & ZmmOpMask32) ... & m256 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vbroadcasti32x8_avx512dq( m256 ); + ZmmResult = vbroadcasti32x8_avx512dq( m256 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPBROADCAST 5-332 PAGE 2156 LINE 110860 define pcodeop vbroadcasti64x4_avx512f ; -:VBROADCASTI64X4 ZmmReg1^KWriteMask, m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x5B; ZmmReg1 ... & m256 +:VBROADCASTI64X4 ZmmReg1 ZmmOpMask64, m256 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x5B; (ZmmReg1 & ZmmOpMask64) ... & m256 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S,T2,T4,T8) { - ZmmReg1 = vbroadcasti64x4_avx512f( m256 ); + ZmmResult = vbroadcasti64x4_avx512f( m256 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPCMPB/VPCMPUB 5-339 PAGE 2163 LINE 111259 define pcodeop vpcmpb_avx512vl ; -:VPCMPB KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x3F; KReg_reg ... & XmmReg2_m128 +:VPCMPB KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask & vexVVVV_XmmReg; byte=0x3F; KReg_reg ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + local tmp = vpcmpb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # VPCMPB/VPCMPUB 5-339 PAGE 2163 LINE 111263 -:VPCMPB KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x3F; KReg_reg ... & YmmReg2_m256 +:VPCMPB KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask & vexVVVV_YmmReg; byte=0x3F; KReg_reg ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + local tmp = vpcmpb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + KReg_reg = zext(AVXOpMask[0,32]) & tmp; } # VPCMPB/VPCMPUB 5-339 PAGE 2163 LINE 111267 define pcodeop vpcmpb_avx512bw ; -:VPCMPB KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x3F; KReg_reg ... & ZmmReg2_m512 +:VPCMPB KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask & evexV5_ZmmReg; byte=0x3F; KReg_reg ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + local tmp = vpcmpb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + KReg_reg = zext(AVXOpMask[0,64]) & tmp; } # VPCMPB/VPCMPUB 5-339 PAGE 2163 LINE 111271 define pcodeop vpcmpub_avx512vl ; -:VPCMPUB KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x3E; KReg_reg ... & XmmReg2_m128 +:VPCMPUB KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask & vexVVVV_XmmReg; byte=0x3E; KReg_reg ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpub_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + local tmp = vpcmpub_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # VPCMPB/VPCMPUB 5-339 PAGE 2163 LINE 111275 -:VPCMPUB KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x3E; KReg_reg ... & YmmReg2_m256 +:VPCMPUB KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask & vexVVVV_YmmReg; byte=0x3E; KReg_reg ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpub_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + local tmp = vpcmpub_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + KReg_reg = zext(AVXOpMask[0,32]) & tmp; } # VPCMPB/VPCMPUB 5-339 PAGE 2163 LINE 111279 define pcodeop vpcmpub_avx512bw ; -:VPCMPUB KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x3E; KReg_reg ... & ZmmReg2_m512 +:VPCMPUB KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask & evexV5_ZmmReg; byte=0x3E; KReg_reg ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpub_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + local tmp = vpcmpub_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + KReg_reg = zext(AVXOpMask[0,64]) & tmp; + } # VPCMPD/VPCMPUD 5-342 PAGE 2166 LINE 111422 define pcodeop vpcmpd_avx512vl ; -:VPCMPD KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x1F; KReg_reg ... & XmmReg2_m128_m32bcst; imm8 +:VPCMPD KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask & vexVVVV_XmmReg; byte=0x1F; KReg_reg ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8:1 ); + local tmp = vpcmpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8:1 ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; } # VPCMPD/VPCMPUD 5-342 PAGE 2166 LINE 111426 -:VPCMPD KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x1F; KReg_reg ... & YmmReg2_m256_m32bcst; imm8 +:VPCMPD KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask & vexVVVV_YmmReg; byte=0x1F; KReg_reg ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8:1 ); + local tmp = vpcmpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8:1 ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # VPCMPD/VPCMPUD 5-342 PAGE 2166 LINE 111430 define pcodeop vpcmpd_avx512f ; -:VPCMPD KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x1F; KReg_reg ... & ZmmReg2_m512_m32bcst; imm8 +:VPCMPD KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask & evexV5_ZmmReg; byte=0x1F; KReg_reg ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8:1 ); + local tmp = vpcmpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8:1 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # VPCMPD/VPCMPUD 5-342 PAGE 2166 LINE 111434 define pcodeop vpcmpud_avx512vl ; -:VPCMPUD KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x1E; KReg_reg ... & XmmReg2_m128_m32bcst; imm8 +:VPCMPUD KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask & vexVVVV_XmmReg; byte=0x1E; KReg_reg ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpud_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8:1 ); + local tmp = vpcmpud_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8:1 ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; } # VPCMPD/VPCMPUD 5-342 PAGE 2166 LINE 111438 -:VPCMPUD KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x1E; KReg_reg ... & YmmReg2_m256_m32bcst; imm8 +:VPCMPUD KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask & vexVVVV_YmmReg; byte=0x1E; KReg_reg ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpud_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8:1 ); + local tmp = vpcmpud_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8:1 ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # VPCMPD/VPCMPUD 5-342 PAGE 2166 LINE 111442 define pcodeop vpcmpud_avx512f ; -:VPCMPUD KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x1E; KReg_reg ... & ZmmReg2_m512_m32bcst; imm8 +:VPCMPUD KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & AVXOpMask & evexV5_ZmmReg; byte=0x1E; KReg_reg ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpud_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8:1 ); + local tmp = vpcmpud_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8:1 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # VPCMPQ/VPCMPUQ 5-345 PAGE 2169 LINE 111573 define pcodeop vpcmpq_avx512vl ; -:VPCMPQ KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x1F; KReg_reg ... & XmmReg2_m128_m64bcst; imm8 +:VPCMPQ KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask & vexVVVV_XmmReg; byte=0x1F; KReg_reg ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8:1 ); + local tmp = vpcmpq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8:1 ); + KReg_reg = zext(AVXOpMask[0,2]) & tmp; } # VPCMPQ/VPCMPUQ 5-345 PAGE 2169 LINE 111577 -:VPCMPQ KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x1F; KReg_reg ... & YmmReg2_m256_m64bcst; imm8 +:VPCMPQ KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask & vexVVVV_YmmReg; byte=0x1F; KReg_reg ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8:1 ); + local tmp = vpcmpq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8:1 ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; } # VPCMPQ/VPCMPUQ 5-345 PAGE 2169 LINE 111581 define pcodeop vpcmpq_avx512f ; -:VPCMPQ KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x1F; KReg_reg ... & ZmmReg2_m512_m64bcst; imm8 +:VPCMPQ KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask & evexV5_ZmmReg; byte=0x1F; KReg_reg ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8:1 ); + local tmp = vpcmpq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8:1 ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # VPCMPQ/VPCMPUQ 5-345 PAGE 2169 LINE 111585 define pcodeop vpcmpuq_avx512vl ; -:VPCMPUQ KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x1E; KReg_reg ... & XmmReg2_m128_m64bcst; imm8 +:VPCMPUQ KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask & vexVVVV_XmmReg; byte=0x1E; KReg_reg ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpuq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8:1 ); + local tmp = vpcmpuq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8:1 ); + KReg_reg = zext(AVXOpMask[0,2]) & tmp; } # VPCMPQ/VPCMPUQ 5-345 PAGE 2169 LINE 111589 -:VPCMPUQ KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x1E; KReg_reg ... & YmmReg2_m256_m64bcst; imm8 +:VPCMPUQ KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask & vexVVVV_YmmReg; byte=0x1E; KReg_reg ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpuq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8:1 ); + local tmp = vpcmpuq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8:1 ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; } # VPCMPQ/VPCMPUQ 5-345 PAGE 2169 LINE 111593 define pcodeop vpcmpuq_avx512f ; -:VPCMPUQ KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x1E; KReg_reg ... & ZmmReg2_m512_m64bcst; imm8 +:VPCMPUQ KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask & evexV5_ZmmReg; byte=0x1E; KReg_reg ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vpcmpuq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8:1 ); + local tmp = vpcmpuq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8:1 ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # VPCMPW/VPCMPUW 5-348 PAGE 2172 LINE 111724 define pcodeop vpcmpw_avx512vl ; -:VPCMPW KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x3F; KReg_reg ... & XmmReg2_m128 +:VPCMPW KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask & vexVVVV_XmmReg; byte=0x3F; KReg_reg ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + local tmp = vpcmpw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # VPCMPW/VPCMPUW 5-348 PAGE 2172 LINE 111728 -:VPCMPW KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x3F; KReg_reg ... & YmmReg2_m256 +:VPCMPW KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask & vexVVVV_YmmReg; byte=0x3F; KReg_reg ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + local tmp = vpcmpw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # VPCMPW/VPCMPUW 5-348 PAGE 2172 LINE 111732 define pcodeop vpcmpw_avx512bw ; -:VPCMPW KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x3F; KReg_reg ... & ZmmReg2_m512 +:VPCMPW KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask & evexV5_ZmmReg; byte=0x3F; KReg_reg ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + local tmp = vpcmpw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + KReg_reg = zext(AVXOpMask[0,32]) & tmp; } # VPCMPW/VPCMPUW 5-348 PAGE 2172 LINE 111736 define pcodeop vpcmpuw_avx512vl ; -:VPCMPUW KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x3E; KReg_reg ... & XmmReg2_m128 +:VPCMPUW KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask & vexVVVV_XmmReg; byte=0x3E; KReg_reg ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpuw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + local tmp = vpcmpuw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # VPCMPW/VPCMPUW 5-348 PAGE 2172 LINE 111740 -:VPCMPUW KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x3E; KReg_reg ... & YmmReg2_m256 +:VPCMPUW KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask & vexVVVV_YmmReg; byte=0x3E; KReg_reg ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpuw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + local tmp = vpcmpuw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # VPCMPW/VPCMPUW 5-348 PAGE 2172 LINE 111745 define pcodeop vpcmpuw_avx512bw ; -:VPCMPUW KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x3E; KReg_reg ... & ZmmReg2_m512 +:VPCMPUW KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & AVXOpMask & evexV5_ZmmReg; byte=0x3E; KReg_reg ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vpcmpuw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + local tmp = vpcmpuw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + KReg_reg = zext(AVXOpMask[0,32]) & tmp; } # VPCOMPRESSD 5-351 PAGE 2175 LINE 111873 define pcodeop vpcompressd_avx512vl ; -:VPCOMPRESSD XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x8B; XmmReg1 ... & XmmReg2_m128 +:VPCOMPRESSD XmmReg2 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x8B; XmmReg1 & mod=3 & XmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - XmmReg2_m128 = vpcompressd_avx512vl( XmmReg1 ); + XmmResult = vpcompressd_avx512vl( XmmReg1 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult); +} + +:VPCOMPRESSD m128 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x8B; XmmReg1 ... & m128 +[ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) +{ + XmmResult = vpcompressd_avx512vl( XmmReg1 ); + XmmMask = m128; + build XmmOpMask32; + m128 = XmmResult; } # VPCOMPRESSD 5-351 PAGE 2175 LINE 111875 -:VPCOMPRESSD YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x8B; YmmReg1 ... & YmmReg2_m256 +:VPCOMPRESSD YmmReg2 YmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask32; byte=0x8B; YmmReg1 & mod=3 & YmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - YmmReg2_m256 = vpcompressd_avx512vl( YmmReg1 ); + YmmResult = vpcompressd_avx512vl( YmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask32; + ZmmReg2 = zext(YmmResult); +} + +:VPCOMPRESSD m256 YmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask32; byte=0x8B; YmmReg1 ... & m256 +[ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) +{ + YmmResult = vpcompressd_avx512vl( YmmReg1 ); + YmmMask = m256; + build YmmOpMask32; + m256 = YmmResult; } # VPCOMPRESSD 5-351 PAGE 2175 LINE 111877 define pcodeop vpcompressd_avx512f ; -:VPCOMPRESSD ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x8B; ZmmReg1 ... & ZmmReg2_m512 +:VPCOMPRESSD ZmmReg2_m512 ZmmOpMask32, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & ZmmOpMask32; byte=0x8B; ZmmReg1 ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg2_m512 = vpcompressd_avx512f( ZmmReg1 ); + ZmmResult = vpcompressd_avx512f( ZmmReg1 ); + ZmmMask = ZmmReg2_m512; + build ZmmOpMask32; + ZmmReg2_m512 = ZmmResult; } # VPCOMPRESSQ 5-353 PAGE 2177 LINE 111970 define pcodeop vpcompressq_avx512vl ; -:VPCOMPRESSQ XmmReg2_m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x8B; XmmReg1 ... & XmmReg2_m128 +:VPCOMPRESSQ XmmReg2 XmmOpMask64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & XmmOpMask64; byte=0x8B; XmmReg1 & mod=3 & XmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - XmmReg2_m128 = vpcompressq_avx512vl( XmmReg1 ); + XmmReg2 = vpcompressq_avx512vl( XmmReg1 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg2 = zext(XmmResult); +} + +:VPCOMPRESSQ m128 XmmOpMask64, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & XmmOpMask64; byte=0x8B; XmmReg1 ... & m128 +[ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) +{ + XmmResult = vpcompressq_avx512vl( XmmReg1 ); + XmmMask = m128; + build XmmOpMask64; + m128 = XmmResult; } # VPCOMPRESSQ 5-353 PAGE 2177 LINE 111972 -:VPCOMPRESSQ YmmReg2_m256^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x8B; YmmReg1 ... & YmmReg2_m256 +:VPCOMPRESSQ YmmReg2 YmmOpMask64, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & YmmOpMask64; byte=0x8B; YmmReg1 & mod=3 & YmmReg2 & ZmmReg2 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - YmmReg2_m256 = vpcompressq_avx512vl( YmmReg1 ); + YmmResult = vpcompressq_avx512vl( YmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask64; + ZmmReg2 = zext(YmmResult); +} + +:VPCOMPRESSQ m256 YmmOpMask64, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & YmmOpMask64; byte=0x8B; YmmReg1 ... & m256 +[ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) +{ + YmmResult = vpcompressq_avx512vl( YmmReg1 ); + YmmMask = m256; + build YmmOpMask64; + m256 = YmmResult; } # VPCOMPRESSQ 5-353 PAGE 2177 LINE 111974 define pcodeop vpcompressq_avx512f ; -:VPCOMPRESSQ ZmmReg2_m512^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x8B; ZmmReg1 ... & ZmmReg2_m512 +:VPCOMPRESSQ ZmmReg2_m512 ZmmOpMask64, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & ZmmOpMask64; byte=0x8B; ZmmReg1 ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg2_m512 = vpcompressq_avx512f( ZmmReg1 ); + ZmmResult = vpcompressq_avx512f( ZmmReg1 ); + ZmmMask = ZmmReg2_m512; + build ZmmOpMask64; + ZmmReg2_m512 = ZmmResult; } # VPCONFLICTD/Q 5-355 PAGE 2179 LINE 112068 define pcodeop vpconflictd_avx512vl ; -:VPCONFLICTD XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xC4; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPCONFLICTD XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0xC4; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpconflictd_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpconflictd_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPCONFLICTD/Q 5-355 PAGE 2179 LINE 112072 -:VPCONFLICTD YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xC4; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPCONFLICTD YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0xC4; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpconflictd_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpconflictd_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPCONFLICTD/Q 5-355 PAGE 2179 LINE 112076 define pcodeop vpconflictd_avx512cd ; -:VPCONFLICTD ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xC4; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPCONFLICTD ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0xC4; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpconflictd_avx512cd( ZmmReg2_m512_m32bcst ); + ZmmResult = vpconflictd_avx512cd( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPCONFLICTD/Q 5-355 PAGE 2179 LINE 112080 define pcodeop vpconflictq_avx512vl ; -:VPCONFLICTQ XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xC4; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPCONFLICTQ XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0xC4; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpconflictq_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpconflictq_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPCONFLICTD/Q 5-355 PAGE 2179 LINE 112084 -:VPCONFLICTQ YmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xC4; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPCONFLICTQ YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0xC4; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpconflictq_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpconflictq_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPCONFLICTD/Q 5-355 PAGE 2179 LINE 112088 define pcodeop vpconflictq_avx512cd ; -:VPCONFLICTQ ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xC4; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPCONFLICTQ ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0xC4; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpconflictq_avx512cd( ZmmReg2_m512_m64bcst ); + ZmmResult = vpconflictq_avx512cd( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPERMD/VPERMW 5-362 PAGE 2186 LINE 112407 define pcodeop vpermd_avx512vl ; -:VPERMD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x36; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPERMD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x36; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpermd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpermd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPERMD/VPERMW 5-362 PAGE 2186 LINE 112410 define pcodeop vpermd_avx512f ; -:VPERMD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x36; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPERMD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x36; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpermd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpermd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPERMD/VPERMW 5-362 PAGE 2186 LINE 112413 define pcodeop vpermw_avx512vl ; -:VPERMW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x8D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPERMW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x8D; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpermw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpermw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # VPERMD/VPERMW 5-362 PAGE 2186 LINE 112417 -:VPERMW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x8D; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPERMW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x8D; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpermw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpermw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # VPERMD/VPERMW 5-362 PAGE 2186 LINE 112421 define pcodeop vpermw_avx512bw ; -:VPERMW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x8D; ZmmReg1 ... & ZmmReg2_m512 +:VPERMW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x8D; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpermw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpermw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # VPERMI2W/D/Q/PS/PD 5-365 PAGE 2189 LINE 112553 define pcodeop vpermi2w_avx512vl ; -:VPERMI2W XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x75; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPERMI2W XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x75; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpermi2w_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpermi2w_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # VPERMI2W/D/Q/PS/PD 5-365 PAGE 2189 LINE 112556 -:VPERMI2W YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x75; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPERMI2W YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x75; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpermi2w_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpermi2w_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # VPERMI2W/D/Q/PS/PD 5-365 PAGE 2189 LINE 112559 define pcodeop vpermi2w_avx512bw ; -:VPERMI2W ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x75; ZmmReg1 ... & ZmmReg2_m512 +:VPERMI2W ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x75; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpermi2w_avx512bw( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpermi2w_avx512bw( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # VPERMI2W/D/Q/PS/PD 5-365 PAGE 2189 LINE 112562 define pcodeop vpermi2d_avx512vl ; -:VPERMI2D XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x76; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPERMI2D XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x76; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpermi2d_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpermi2d_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPERMI2W/D/Q/PS/PD 5-365 PAGE 2189 LINE 112566 -:VPERMI2D YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x76; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPERMI2D YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x76; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpermi2d_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpermi2d_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPERMI2W/D/Q/PS/PD 5-365 PAGE 2189 LINE 112570 define pcodeop vpermi2d_avx512f ; -:VPERMI2D ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x76; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPERMI2D ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x76; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpermi2d_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpermi2d_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPERMI2W/D/Q/PS/PD 5-365 PAGE 2189 LINE 112574 define pcodeop vpermi2q_avx512vl ; -:VPERMI2Q XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x76; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPERMI2Q XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x76; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpermi2q_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpermi2q_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPERMI2W/D/Q/PS/PD 5-365 PAGE 2189 LINE 112578 -:VPERMI2Q YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x76; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPERMI2Q YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x76; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpermi2q_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpermi2q_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPERMI2W/D/Q/PS/PD 5-365 PAGE 2189 LINE 112582 define pcodeop vpermi2q_avx512f ; -:VPERMI2Q ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x76; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPERMI2Q ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x76; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpermi2q_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpermi2q_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPERMI2W/D/Q/PS/PD 5-365 PAGE 2189 LINE 112586 define pcodeop vpermi2ps_avx512vl ; -:VPERMI2PS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x77; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPERMI2PS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x77; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpermi2ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpermi2ps_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPERMI2W/D/Q/PS/PD 5-365 PAGE 2189 LINE 112590 -:VPERMI2PS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x77; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPERMI2PS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x77; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpermi2ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpermi2ps_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPERMI2W/D/Q/PS/PD 5-365 PAGE 2189 LINE 112594 define pcodeop vpermi2ps_avx512f ; -:VPERMI2PS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x77; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPERMI2PS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x77; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpermi2ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpermi2ps_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPERMI2W/D/Q/PS/PD 5-366 PAGE 2190 LINE 112610 define pcodeop vpermi2pd_avx512vl ; -:VPERMI2PD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x77; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPERMI2PD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x77; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpermi2pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpermi2pd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPERMI2W/D/Q/PS/PD 5-366 PAGE 2190 LINE 112614 -:VPERMI2PD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x77; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPERMI2PD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x77; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpermi2pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpermi2pd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPERMI2W/D/Q/PS/PD 5-366 PAGE 2190 LINE 112618 define pcodeop vpermi2pd_avx512f ; -:VPERMI2PD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x77; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPERMI2PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x77; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpermi2pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpermi2pd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPERMILPD 5-371 PAGE 2195 LINE 112866 define pcodeop vpermilpd_avx512vl ; -:VPERMILPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x0D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPERMILPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x0D; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:16 = vpermilpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpermilpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPERMILPD 5-371 PAGE 2195 LINE 112869 -:VPERMILPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x0D; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPERMILPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x0D; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:32 = vpermilpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpermilpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPERMILPD 5-371 PAGE 2195 LINE 112872 define pcodeop vpermilpd_avx512f ; -:VPERMILPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x0D; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPERMILPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x0D; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - ZmmReg1 = vpermilpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpermilpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPERMILPD 5-371 PAGE 2195 LINE 112879 -:VPERMILPD XmmReg1^KWriteMask, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x05; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst; imm8 +:VPERMILPD XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x05; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RM) { - local tmp:16 = vpermilpd_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vpermilpd_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPERMILPD 5-371 PAGE 2195 LINE 112882 -:VPERMILPD YmmReg1^KWriteMask, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x05; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst; imm8 +:VPERMILPD YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x05; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RM) { - local tmp:32 = vpermilpd_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vpermilpd_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPERMILPD 5-371 PAGE 2195 LINE 112885 -:VPERMILPD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x05; ZmmReg1 ... & ZmmReg2_m512_m64bcst; imm8 +:VPERMILPD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x05; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RM) { - ZmmReg1 = vpermilpd_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vpermilpd_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPERMILPS 5-376 PAGE 2200 LINE 113170 define pcodeop vpermilps_avx512vl ; -:VPERMILPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x0C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPERMILPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x0C; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:16 = vpermilps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpermilps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPERMILPS 5-376 PAGE 2200 LINE 113173 -:VPERMILPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x0C; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPERMILPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x0C; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:32 = vpermilps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpermilps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPERMILPS 5-376 PAGE 2200 LINE 113176 define pcodeop vpermilps_avx512f ; -:VPERMILPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x0C; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPERMILPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x0C; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - ZmmReg1 = vpermilps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpermilps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPERMILPS 5-376 PAGE 2200 LINE 113179 -:VPERMILPS XmmReg1^KWriteMask, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x04; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst; imm8 +:VPERMILPS XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x04; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RM) { - local tmp:16 = vpermilps_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vpermilps_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPERMILPS 5-376 PAGE 2200 LINE 113182 -:VPERMILPS YmmReg1^KWriteMask, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x04; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst; imm8 +:VPERMILPS YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x04; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RM) { - local tmp:32 = vpermilps_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vpermilps_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPERMILPS 5-376 PAGE 2200 LINE 113186 -:VPERMILPS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x04; ZmmReg1 ... & ZmmReg2_m512_m32bcst; imm8 +:VPERMILPS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x04; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RM) { - ZmmReg1 = vpermilps_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vpermilps_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPERMPD 5-381 PAGE 2205 LINE 113456 define pcodeop vpermpd_avx512vl ; -:VPERMPD YmmReg1^KWriteMask, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x01; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst; imm8 +:VPERMPD YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x01; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RMI) { - local tmp:32 = vpermpd_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vpermpd_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPERMPD 5-381 PAGE 2205 LINE 113459 define pcodeop vpermpd_avx512f ; -:VPERMPD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x01; ZmmReg1 ... & ZmmReg2_m512_m64bcst; imm8 +:VPERMPD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x01; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RMI) { - ZmmReg1 = vpermpd_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vpermpd_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPERMPD 5-381 PAGE 2205 LINE 113462 -:VPERMPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x16; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPERMPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x16; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:32 = vpermpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpermpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPERMPD 5-381 PAGE 2205 LINE 113465 -:VPERMPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x16; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPERMPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x16; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - ZmmReg1 = vpermpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpermpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPERMPS 5-384 PAGE 2208 LINE 113636 define pcodeop vpermps_avx512vl ; -:VPERMPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x16; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPERMPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x16; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpermps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpermps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPERMPS 5-384 PAGE 2208 LINE 113639 define pcodeop vpermps_avx512f ; -:VPERMPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x16; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPERMPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x16; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpermps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpermps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPERMQ 5-387 PAGE 2211 LINE 113771 define pcodeop vpermq_avx512vl ; -:VPERMQ YmmReg1^KWriteMask, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x00; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst; imm8 +:VPERMQ YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x00; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RMI) { - local tmp:32 = vpermq_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vpermq_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPERMQ 5-387 PAGE 2211 LINE 113774 define pcodeop vpermq_avx512f ; -:VPERMQ ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x00; ZmmReg1 ... & ZmmReg2_m512_m64bcst; imm8 +:VPERMQ ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x00; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RMI) { - ZmmReg1 = vpermq_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vpermq_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPERMQ 5-387 PAGE 2211 LINE 113777 -:VPERMQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x36; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPERMQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x36; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:32 = vpermq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpermq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPERMQ 5-387 PAGE 2211 LINE 113780 -:VPERMQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x36; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPERMQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x36; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - ZmmReg1 = vpermq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpermq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; +} + +define pcodeop vpermt2pd_avx512f; +:VPERMT2PD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x7F; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst { + ZmmResult = vpermt2pd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPEXPANDD 5-390 PAGE 2214 LINE 113945 define pcodeop vpexpandd_avx512vl ; -:VPEXPANDD XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x89; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPEXPANDD XmmReg1 XmmOpMask32, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x89; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vpexpandd_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpexpandd_avx512vl( XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPEXPANDD 5-390 PAGE 2214 LINE 113948 -:VPEXPANDD YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x89; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPEXPANDD YmmReg1 YmmOpMask32, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x89; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vpexpandd_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpexpandd_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPEXPANDD 5-390 PAGE 2214 LINE 113951 define pcodeop vpexpandd_avx512f ; -:VPEXPANDD ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x89; ZmmReg1 ... & ZmmReg2_m512 +:VPEXPANDD ZmmReg1 ZmmOpMask32, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x89; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vpexpandd_avx512f( ZmmReg2_m512 ); + ZmmResult = vpexpandd_avx512f( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPEXPANDQ 5-392 PAGE 2216 LINE 114033 define pcodeop vpexpandq_avx512vl ; -:VPEXPANDQ XmmReg1^KWriteMask, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x89; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPEXPANDQ XmmReg1 XmmOpMask64, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x89; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vpexpandq_avx512vl( XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpexpandq_avx512vl( XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPEXPANDQ 5-392 PAGE 2216 LINE 114035 -:VPEXPANDQ YmmReg1^KWriteMask, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x89; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPEXPANDQ YmmReg1 YmmOpMask64, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x89; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:32 = vpexpandq_avx512vl( YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpexpandq_avx512vl( YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPEXPANDQ 5-392 PAGE 2216 LINE 114037 define pcodeop vpexpandq_avx512f ; -:VPEXPANDQ ZmmReg1^KWriteMask, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x89; ZmmReg1 ... & ZmmReg2_m512 +:VPEXPANDQ ZmmReg1 ZmmOpMask64, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x89; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - ZmmReg1 = vpexpandq_avx512f( ZmmReg2_m512 ); + ZmmResult = vpexpandq_avx512f( ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPLZCNTD/Q 5-394 PAGE 2218 LINE 114118 define pcodeop vplzcntd_avx512vl ; -:VPLZCNTD XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x44; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPLZCNTD XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x44; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vplzcntd_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vplzcntd_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPLZCNTD/Q 5-394 PAGE 2218 LINE 114122 -:VPLZCNTD YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x44; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPLZCNTD YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x44; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vplzcntd_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vplzcntd_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPLZCNTD/Q 5-394 PAGE 2218 LINE 114126 define pcodeop vplzcntd_avx512cd ; -:VPLZCNTD ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x44; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPLZCNTD ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x44; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vplzcntd_avx512cd( ZmmReg2_m512_m32bcst ); + ZmmResult = vplzcntd_avx512cd( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPLZCNTD/Q 5-394 PAGE 2218 LINE 114130 define pcodeop vplzcntq_avx512vl ; -:VPLZCNTQ XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x44; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPLZCNTQ XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x44; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vplzcntq_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vplzcntq_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPLZCNTD/Q 5-394 PAGE 2218 LINE 114134 -:VPLZCNTQ YmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x44; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPLZCNTQ YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x44; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vplzcntq_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vplzcntq_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPLZCNTD/Q 5-394 PAGE 2218 LINE 114138 define pcodeop vplzcntq_avx512cd ; -:VPLZCNTQ ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x44; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPLZCNTQ ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x44; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vplzcntq_avx512cd( ZmmReg2_m512_m64bcst ); + ZmmResult = vplzcntq_avx512cd( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPMOVM2B/VPMOVM2W/VPMOVM2D/VPMOVM2Q 5-400 PAGE 2224 LINE 114413 define pcodeop vpmovm2b_avx512vl ; -:VPMOVM2B XmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x28; (XmmReg1 & ZmmReg1) & KReg_rm +:VPMOVM2B XmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x28; (XmmReg1 & ZmmReg1) & KReg_rm { local tmp:16 = vpmovm2b_avx512vl( KReg_rm ); ZmmReg1 = zext(tmp); } # VPMOVM2B/VPMOVM2W/VPMOVM2D/VPMOVM2Q 5-400 PAGE 2224 LINE 114415 -:VPMOVM2B YmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x28; (YmmReg1 & ZmmReg1) & KReg_rm +:VPMOVM2B YmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x28; (YmmReg1 & ZmmReg1) & KReg_rm { local tmp:32 = vpmovm2b_avx512vl( KReg_rm ); ZmmReg1 = zext(tmp); @@ -9276,21 +11470,21 @@ define pcodeop vpmovm2b_avx512vl ; # VPMOVM2B/VPMOVM2W/VPMOVM2D/VPMOVM2Q 5-400 PAGE 2224 LINE 114417 define pcodeop vpmovm2b_avx512bw ; -:VPMOVM2B ZmmReg1, KReg_rm is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x28; ZmmReg1 & KReg_rm +:VPMOVM2B ZmmReg1, KReg_rm is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x28; ZmmReg1 & KReg_rm { ZmmReg1 = vpmovm2b_avx512bw( KReg_rm ); } # VPMOVM2B/VPMOVM2W/VPMOVM2D/VPMOVM2Q 5-400 PAGE 2224 LINE 114419 define pcodeop vpmovm2w_avx512vl ; -:VPMOVM2W XmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x28; (XmmReg1 & ZmmReg1) & KReg_rm +:VPMOVM2W XmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x28; (XmmReg1 & ZmmReg1) & KReg_rm { local tmp:16 = vpmovm2w_avx512vl( KReg_rm ); ZmmReg1 = zext(tmp); } # VPMOVM2B/VPMOVM2W/VPMOVM2D/VPMOVM2Q 5-400 PAGE 2224 LINE 114421 -:VPMOVM2W YmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x28; (YmmReg1 & ZmmReg1) & KReg_rm +:VPMOVM2W YmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x28; (YmmReg1 & ZmmReg1) & KReg_rm { local tmp:32 = vpmovm2w_avx512vl( KReg_rm ); ZmmReg1 = zext(tmp); @@ -9298,21 +11492,21 @@ define pcodeop vpmovm2w_avx512vl ; # VPMOVM2B/VPMOVM2W/VPMOVM2D/VPMOVM2Q 5-400 PAGE 2224 LINE 114423 define pcodeop vpmovm2w_avx512bw ; -:VPMOVM2W ZmmReg1, KReg_rm is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x28; ZmmReg1 & KReg_rm +:VPMOVM2W ZmmReg1, KReg_rm is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x28; ZmmReg1 & KReg_rm { ZmmReg1 = vpmovm2w_avx512bw( KReg_rm ); } # VPMOVM2B/VPMOVM2W/VPMOVM2D/VPMOVM2Q 5-400 PAGE 2224 LINE 114425 define pcodeop vpmovm2d_avx512vl ; -:VPMOVM2D XmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x38; (XmmReg1 & ZmmReg1) & KReg_rm +:VPMOVM2D XmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x38; (XmmReg1 & ZmmReg1) & KReg_rm { local tmp:16 = vpmovm2d_avx512vl( KReg_rm ); ZmmReg1 = zext(tmp); } # VPMOVM2B/VPMOVM2W/VPMOVM2D/VPMOVM2Q 5-400 PAGE 2224 LINE 114427 -:VPMOVM2D YmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x38; (YmmReg1 & ZmmReg1) & KReg_rm +:VPMOVM2D YmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x38; (YmmReg1 & ZmmReg1) & KReg_rm { local tmp:32 = vpmovm2d_avx512vl( KReg_rm ); ZmmReg1 = zext(tmp); @@ -9320,21 +11514,21 @@ define pcodeop vpmovm2d_avx512vl ; # VPMOVM2B/VPMOVM2W/VPMOVM2D/VPMOVM2Q 5-400 PAGE 2224 LINE 114429 define pcodeop vpmovm2d_avx512dq ; -:VPMOVM2D ZmmReg1, KReg_rm is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x38; ZmmReg1 & KReg_rm +:VPMOVM2D ZmmReg1, KReg_rm is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x38; ZmmReg1 & KReg_rm { ZmmReg1 = vpmovm2d_avx512dq( KReg_rm ); } # VPMOVM2B/VPMOVM2W/VPMOVM2D/VPMOVM2Q 5-400 PAGE 2224 LINE 114431 define pcodeop vpmovm2q_avx512vl ; -:VPMOVM2Q XmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x38; (XmmReg1 & ZmmReg1) & KReg_rm +:VPMOVM2Q XmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x38; (XmmReg1 & ZmmReg1) & KReg_rm { local tmp:16 = vpmovm2q_avx512vl( KReg_rm ); ZmmReg1 = zext(tmp); } # VPMOVM2B/VPMOVM2W/VPMOVM2D/VPMOVM2Q 5-400 PAGE 2224 LINE 114433 -:VPMOVM2Q YmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x38; (YmmReg1 & ZmmReg1) & KReg_rm +:VPMOVM2Q YmmReg1, KReg_rm is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x38; (YmmReg1 & ZmmReg1) & KReg_rm { local tmp:32 = vpmovm2q_avx512vl( KReg_rm ); ZmmReg1 = zext(tmp); @@ -9342,87 +11536,87 @@ define pcodeop vpmovm2q_avx512vl ; # VPMOVM2B/VPMOVM2W/VPMOVM2D/VPMOVM2Q 5-400 PAGE 2224 LINE 114435 define pcodeop vpmovm2q_avx512dq ; -:VPMOVM2Q ZmmReg1, KReg_rm is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x38; ZmmReg1 & KReg_rm +:VPMOVM2Q ZmmReg1, KReg_rm is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x38; ZmmReg1 & KReg_rm { ZmmReg1 = vpmovm2q_avx512dq( KReg_rm ); } # VPMOVB2M/VPMOVW2M/VPMOVD2M/VPMOVQ2M 5-403 PAGE 2227 LINE 114542 define pcodeop vpmovb2m_avx512vl ; -:VPMOVB2M KReg_reg, XmmReg2 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x29; KReg_reg & (mod=0x3 & XmmReg2) +:VPMOVB2M KReg_reg, XmmReg2 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x29; KReg_reg & (mod=0x3 & XmmReg2) { KReg_reg = vpmovb2m_avx512vl( XmmReg2 ); } # VPMOVB2M/VPMOVW2M/VPMOVD2M/VPMOVQ2M 5-403 PAGE 2227 LINE 114544 -:VPMOVB2M KReg_reg, YmmReg2 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x29; KReg_reg & (mod=0x3 & YmmReg2) +:VPMOVB2M KReg_reg, YmmReg2 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x29; KReg_reg & (mod=0x3 & YmmReg2) { KReg_reg = vpmovb2m_avx512vl( YmmReg2 ); } # VPMOVB2M/VPMOVW2M/VPMOVD2M/VPMOVQ2M 5-403 PAGE 2227 LINE 114546 define pcodeop vpmovb2m_avx512bw ; -:VPMOVB2M KReg_reg, ZmmReg2 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x29; KReg_reg & (mod=0x3 & ZmmReg2) +:VPMOVB2M KReg_reg, ZmmReg2 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x29; KReg_reg & (mod=0x3 & ZmmReg2) { KReg_reg = vpmovb2m_avx512bw( ZmmReg2 ); } # VPMOVB2M/VPMOVW2M/VPMOVD2M/VPMOVQ2M 5-403 PAGE 2227 LINE 114548 define pcodeop vpmovw2m_avx512vl ; -:VPMOVW2M KReg_reg, XmmReg2 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x29; KReg_reg & (mod=0x3 & XmmReg2) +:VPMOVW2M KReg_reg, XmmReg2 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x29; KReg_reg & (mod=0x3 & XmmReg2) { KReg_reg = vpmovw2m_avx512vl( XmmReg2 ); } # VPMOVB2M/VPMOVW2M/VPMOVD2M/VPMOVQ2M 5-403 PAGE 2227 LINE 114550 -:VPMOVW2M KReg_reg, YmmReg2 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x29; KReg_reg & (mod=0x3 & YmmReg2) +:VPMOVW2M KReg_reg, YmmReg2 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x29; KReg_reg & (mod=0x3 & YmmReg2) { KReg_reg = vpmovw2m_avx512vl( YmmReg2 ); } # VPMOVB2M/VPMOVW2M/VPMOVD2M/VPMOVQ2M 5-403 PAGE 2227 LINE 114552 define pcodeop vpmovw2m_avx512bw ; -:VPMOVW2M KReg_reg, ZmmReg2 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x29; KReg_reg & (mod=0x3 & ZmmReg2) +:VPMOVW2M KReg_reg, ZmmReg2 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x29; KReg_reg & (mod=0x3 & ZmmReg2) { KReg_reg = vpmovw2m_avx512bw( ZmmReg2 ); } # VPMOVB2M/VPMOVW2M/VPMOVD2M/VPMOVQ2M 5-403 PAGE 2227 LINE 114554 define pcodeop vpmovd2m_avx512vl ; -:VPMOVD2M KReg_reg, XmmReg2 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x39; KReg_reg & (mod=0x3 & XmmReg2) +:VPMOVD2M KReg_reg, XmmReg2 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x39; KReg_reg & (mod=0x3 & XmmReg2) { KReg_reg = vpmovd2m_avx512vl( XmmReg2 ); } # VPMOVB2M/VPMOVW2M/VPMOVD2M/VPMOVQ2M 5-403 PAGE 2227 LINE 114556 -:VPMOVD2M KReg_reg, YmmReg2 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x39; KReg_reg & (mod=0x3 & YmmReg2) +:VPMOVD2M KReg_reg, YmmReg2 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x39; KReg_reg & (mod=0x3 & YmmReg2) { KReg_reg = vpmovd2m_avx512vl( YmmReg2 ); } # VPMOVB2M/VPMOVW2M/VPMOVD2M/VPMOVQ2M 5-403 PAGE 2227 LINE 114558 define pcodeop vpmovd2m_avx512dq ; -:VPMOVD2M KReg_reg, ZmmReg2 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x39; KReg_reg & (mod=0x3 & ZmmReg2) +:VPMOVD2M KReg_reg, ZmmReg2 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0); byte=0x39; KReg_reg & (mod=0x3 & ZmmReg2) { KReg_reg = vpmovd2m_avx512dq( ZmmReg2 ); } # VPMOVB2M/VPMOVW2M/VPMOVD2M/VPMOVQ2M 5-403 PAGE 2227 LINE 114560 define pcodeop vpmovq2m_avx512vl ; -:VPMOVQ2M KReg_reg, XmmReg2 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x39; KReg_reg & (mod=0x3 & XmmReg2) +:VPMOVQ2M KReg_reg, XmmReg2 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x39; KReg_reg & (mod=0x3 & XmmReg2) { KReg_reg = vpmovq2m_avx512vl( XmmReg2 ); } # VPMOVB2M/VPMOVW2M/VPMOVD2M/VPMOVQ2M 5-403 PAGE 2227 LINE 114562 -:VPMOVQ2M KReg_reg, YmmReg2 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x39; KReg_reg & (mod=0x3 & YmmReg2) +:VPMOVQ2M KReg_reg, YmmReg2 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x39; KReg_reg & (mod=0x3 & YmmReg2) { KReg_reg = vpmovq2m_avx512vl( YmmReg2 ); } # VPMOVB2M/VPMOVW2M/VPMOVD2M/VPMOVQ2M 5-403 PAGE 2227 LINE 114564 define pcodeop vpmovq2m_avx512dq ; -:VPMOVQ2M KReg_reg, ZmmReg2 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x39; KReg_reg & (mod=0x3 & ZmmReg2) +:VPMOVQ2M KReg_reg, ZmmReg2 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1); byte=0x39; KReg_reg & (mod=0x3 & ZmmReg2) { KReg_reg = vpmovq2m_avx512dq( ZmmReg2 ); } @@ -9430,1566 +11624,1874 @@ define pcodeop vpmovq2m_avx512dq ; # PROLD/PROLVD/PROLQ/PROLVQ 5-430 PAGE 2254 LINE 115959 define pcodeop vprolvd_avx512vl ; -:VPROLVD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x15; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPROLVD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x15; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:16 = vprolvd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vprolvd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PROLD/PROLVD/PROLQ/PROLVQ 5-430 PAGE 2254 LINE 115962 define pcodeop vprold_avx512vl ; -:VPROLD vexVVVV_XmmReg^KWriteMask, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=1 ... & XmmReg2_m128_m32bcst; imm8 +:VPROLD vexVVVV_XmmReg XmmOpMask32, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask32; byte=0x72; reg_opcode=1 ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-VMI) { - local tmp:64 = vprold_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vprold_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask32; + vexVVVV_ZmmReg = zext(XmmResult); } # PROLD/PROLVD/PROLQ/PROLVQ 5-430 PAGE 2254 LINE 115965 define pcodeop vprolvq_avx512vl ; -:VPROLVQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x15; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPROLVQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x15; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:16 = vprolvq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vprolvq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PROLD/PROLVD/PROLQ/PROLVQ 5-430 PAGE 2254 LINE 115968 define pcodeop vprolq_avx512vl ; -:VPROLQ vexVVVV_XmmReg^KWriteMask, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=1 ... & XmmReg2_m128_m64bcst; imm8 +:VPROLQ vexVVVV_XmmReg XmmOpMask64, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask64; byte=0x72; reg_opcode=1 ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-VMI) { - local tmp:64 = vprolq_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vprolq_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask64; + vexVVVV_ZmmReg = zext(XmmResult); } # PROLD/PROLVD/PROLQ/PROLVQ 5-430 PAGE 2254 LINE 115971 -:VPROLVD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x15; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPROLVD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x15; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:32 = vprolvd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vprolvd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PROLD/PROLVD/PROLQ/PROLVQ 5-430 PAGE 2254 LINE 115974 -:VPROLD vexVVVV_YmmReg^KWriteMask, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=1 ... & YmmReg2_m256_m32bcst; imm8 +:VPROLD vexVVVV_YmmReg YmmOpMask32, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask32; byte=0x72; reg_opcode=1 ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-VMI) { - local tmp:64 = vprold_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vprold_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask32; + vexVVVV_ZmmReg = zext(YmmResult); } # PROLD/PROLVD/PROLQ/PROLVQ 5-430 PAGE 2254 LINE 115977 -:VPROLVQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x15; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPROLVQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x15; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:32 = vprolvq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vprolvq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PROLD/PROLVD/PROLQ/PROLVQ 5-430 PAGE 2254 LINE 115980 -:VPROLQ vexVVVV_YmmReg^KWriteMask, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=1 ... & YmmReg2_m256_m64bcst; imm8 +:VPROLQ vexVVVV_YmmReg YmmOpMask64, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask64; byte=0x72; reg_opcode=1 ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-VMI) { - local tmp:64 = vprolq_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vprolq_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask64; + vexVVVV_ZmmReg = zext(YmmResult); } # PROLD/PROLVD/PROLQ/PROLVQ 5-430 PAGE 2254 LINE 115983 define pcodeop vprolvd_avx512f ; -:VPROLVD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x15; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPROLVD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x15; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - ZmmReg1 = vprolvd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vprolvd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PROLD/PROLVD/PROLQ/PROLVQ 5-430 PAGE 2254 LINE 115987 define pcodeop vprold_avx512f ; -:VPROLD evexV5_ZmmReg^KWriteMask, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg & KWriteMask; byte=0x72; reg_opcode=1 ... & ZmmReg2_m512_m32bcst; imm8 +:VPROLD evexV5_ZmmReg ZmmOpMask32, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg & ZmmOpMask32; byte=0x72; reg_opcode=1 ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-VMI) { - evexV5_ZmmReg = vprold_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vprold_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask32; + evexV5_ZmmReg = ZmmResult; } # PROLD/PROLVD/PROLQ/PROLVQ 5-430 PAGE 2254 LINE 115990 define pcodeop vprolvq_avx512f ; -:VPROLVQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x15; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPROLVQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x15; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - ZmmReg1 = vprolvq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vprolvq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PROLD/PROLVD/PROLQ/PROLVQ 5-430 PAGE 2254 LINE 115993 define pcodeop vprolq_avx512f ; -:VPROLQ evexV5_ZmmReg^KWriteMask, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg & KWriteMask; byte=0x72; reg_opcode=1 ... & ZmmReg2_m512_m64bcst; imm8 +:VPROLQ evexV5_ZmmReg ZmmOpMask64, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg & ZmmOpMask64; byte=0x72; reg_opcode=1 ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-VMI) { - evexV5_ZmmReg = vprolq_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vprolq_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask64; + evexV5_ZmmReg = ZmmResult; } # PRORD/PRORVD/PRORQ/PRORVQ 5-435 PAGE 2259 LINE 116190 define pcodeop vprorvd_avx512vl ; -:VPRORVD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x14; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPRORVD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x14; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:16 = vprorvd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vprorvd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # PRORD/PRORVD/PRORQ/PRORVQ 5-435 PAGE 2259 LINE 116194 define pcodeop vprord_avx512vl ; -:VPRORD vexVVVV_XmmReg^KWriteMask, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=0 ... & XmmReg2_m128_m32bcst; imm8 +:VPRORD vexVVVV_XmmReg XmmOpMask32, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask32; byte=0x72; reg_opcode=0 ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-VMI) { - local tmp:64 = vprord_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vprord_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask32; + vexVVVV_ZmmReg = zext(XmmResult); } # PRORD/PRORVD/PRORQ/PRORVQ 5-435 PAGE 2259 LINE 116197 define pcodeop vprorvq_avx512vl ; -:VPRORVQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x14; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPRORVQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x14; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:16 = vprorvq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vprorvq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # PRORD/PRORVD/PRORQ/PRORVQ 5-435 PAGE 2259 LINE 116200 define pcodeop vprorq_avx512vl ; -:VPRORQ vexVVVV_XmmReg^KWriteMask, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=0 ... & XmmReg2_m128_m64bcst; imm8 +:VPRORQ vexVVVV_XmmReg XmmOpMask64, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_XmmReg & vexVVVV_ZmmReg) & XmmOpMask64; byte=0x72; reg_opcode=0 ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-VMI) { - local tmp:64 = vprorq_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + XmmResult = vprorq_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = vexVVVV_XmmReg; + build XmmOpMask64; + vexVVVV_ZmmReg = zext(XmmResult); } # PRORD/PRORVD/PRORQ/PRORVQ 5-435 PAGE 2259 LINE 116203 -:VPRORVD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x14; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPRORVD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x14; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:32 = vprorvd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vprorvd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # PRORD/PRORVD/PRORQ/PRORVQ 5-435 PAGE 2259 LINE 116207 -:VPRORD vexVVVV_YmmReg^KWriteMask, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=0 ... & YmmReg2_m256_m32bcst; imm8 +:VPRORD vexVVVV_YmmReg YmmOpMask32, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask32; byte=0x72; reg_opcode=0 ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-VMI) { - local tmp:64 = vprord_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vprord_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask32; + vexVVVV_ZmmReg = zext(YmmResult); } # PRORD/PRORVD/PRORQ/PRORVQ 5-435 PAGE 2259 LINE 116210 -:VPRORVQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x14; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPRORVQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x14; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - local tmp:32 = vprorvq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vprorvq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # PRORD/PRORVD/PRORQ/PRORVQ 5-435 PAGE 2259 LINE 116213 -:VPRORQ vexVVVV_YmmReg^KWriteMask, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & KWriteMask; byte=0x72; reg_opcode=0 ... & YmmReg2_m256_m64bcst; imm8 +:VPRORQ vexVVVV_YmmReg YmmOpMask64, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & (vexVVVV_YmmReg & vexVVVV_ZmmReg) & YmmOpMask64; byte=0x72; reg_opcode=0 ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-VMI) { - local tmp:64 = vprorq_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); - vexVVVV_ZmmReg = zext(tmp); + YmmResult = vprorq_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = vexVVVV_YmmReg; + build YmmOpMask64; + vexVVVV_ZmmReg = zext(YmmResult); } # PRORD/PRORVD/PRORQ/PRORVQ 5-435 PAGE 2259 LINE 116216 define pcodeop vprorvd_avx512f ; -:VPRORVD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x14; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPRORVD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x14; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - ZmmReg1 = vprorvd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vprorvd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # PRORD/PRORVD/PRORQ/PRORVQ 5-435 PAGE 2259 LINE 116220 define pcodeop vprord_avx512f ; -:VPRORD evexV5_ZmmReg^KWriteMask, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg & KWriteMask; byte=0x72; reg_opcode=0 ... & ZmmReg2_m512_m32bcst; imm8 +:VPRORD evexV5_ZmmReg ZmmOpMask32, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg & ZmmOpMask32; byte=0x72; reg_opcode=0 ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-VMI) { - evexV5_ZmmReg = vprord_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vprord_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask32; + evexV5_ZmmReg = ZmmResult; } # PRORD/PRORVD/PRORQ/PRORVQ 5-435 PAGE 2259 LINE 116223 define pcodeop vprorvq_avx512f ; -:VPRORVQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x14; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPRORVQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x14; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-RVM) { - ZmmReg1 = vprorvq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vprorvq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # PRORD/PRORVD/PRORQ/PRORVQ 5-435 PAGE 2259 LINE 116226 define pcodeop vprorq_avx512f ; -:VPRORQ evexV5_ZmmReg^KWriteMask, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg & KWriteMask; byte=0x72; reg_opcode=0 ... & ZmmReg2_m512_m64bcst; imm8 +:VPRORQ evexV5_ZmmReg ZmmOpMask64, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg & ZmmOpMask64; byte=0x72; reg_opcode=0 ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV-VMI) { - evexV5_ZmmReg = vprorq_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vprorq_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = evexV5_ZmmReg; + build ZmmOpMask64; + evexV5_ZmmReg = ZmmResult; } # VPSCATTERDD/VPSCATTERDQ/VPSCATTERQD/VPSCATTERQQ 5-440 PAGE 2264 LINE 116424 -# WARNING: did not recognize qualifier /vsib for "VPSCATTERDD vm32x {k1}, xmm1" define pcodeop vpscatterdd_avx512vl ; -:VPSCATTERDD m32^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xA0; XmmReg1 ... & m32 +:VPSCATTERDD x_vm32x XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask; byte=0xA0; XmmReg1 ... & x_vm32x [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vpscatterdd_avx512vl( m32, XmmReg1 ); + vpscatterdd_avx512vl( x_vm32x, XmmOpMask, XmmReg1 ); # TODO missing destination or side effects } # VPSCATTERDD/VPSCATTERDQ/VPSCATTERQD/VPSCATTERQQ 5-440 PAGE 2264 LINE 116426 -# WARNING: did not recognize qualifier /vsib for "VPSCATTERDD vm32y {k1}, ymm1" -:VPSCATTERDD m32^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xA0; YmmReg1 ... & m32 +:VPSCATTERDD y_vm32y YmmOpMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask; byte=0xA0; YmmReg1 ... & y_vm32y [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vpscatterdd_avx512vl( m32, YmmReg1 ); + vpscatterdd_avx512vl( y_vm32y, YmmOpMask, YmmReg1 ); # TODO missing destination or side effects } # VPSCATTERDD/VPSCATTERDQ/VPSCATTERQD/VPSCATTERQQ 5-440 PAGE 2264 LINE 116428 -# WARNING: did not recognize qualifier /vsib for "VPSCATTERDD vm32z {k1}, zmm1" define pcodeop vpscatterdd_avx512f ; -:VPSCATTERDD m32^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xA0; ZmmReg1 ... & m32 +:VPSCATTERDD z_vm32z ZmmOpMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & ZmmOpMask; byte=0xA0; ZmmReg1 ... & z_vm32z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vpscatterdd_avx512f( m32, ZmmReg1 ); + vpscatterdd_avx512f( z_vm32z, ZmmOpMask, ZmmReg1 ); # TODO missing destination or side effects } # VPSCATTERDD/VPSCATTERDQ/VPSCATTERQD/VPSCATTERQQ 5-440 PAGE 2264 LINE 116430 -# WARNING: did not recognize qualifier /vsib for "VPSCATTERDQ vm32x {k1}, xmm1" define pcodeop vpscatterdq_avx512vl ; -:VPSCATTERDQ m32^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xA0; XmmReg1 ... & m32 +:VPSCATTERDQ x_vm32x XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & XmmOpMask; byte=0xA0; XmmReg1 ... & x_vm32x [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vpscatterdq_avx512vl( m32, XmmReg1 ); + vpscatterdq_avx512vl( x_vm32x, XmmOpMask, XmmReg1 ); # TODO missing destination or side effects } # VPSCATTERDD/VPSCATTERDQ/VPSCATTERQD/VPSCATTERQQ 5-440 PAGE 2264 LINE 116432 -# WARNING: did not recognize qualifier /vsib for "VPSCATTERDQ vm32x {k1}, ymm1" -:VPSCATTERDQ m32^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xA0; YmmReg1 ... & m32 +:VPSCATTERDQ y_vm32y YmmOpMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & YmmOpMask; byte=0xA0; YmmReg1 ... & y_vm32y [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vpscatterdq_avx512vl( m32, YmmReg1 ); + vpscatterdq_avx512vl( y_vm32y, YmmOpMask, YmmReg1 ); # TODO missing destination or side effects } # VPSCATTERDD/VPSCATTERDQ/VPSCATTERQD/VPSCATTERQQ 5-440 PAGE 2264 LINE 116434 -# WARNING: did not recognize qualifier /vsib for "VPSCATTERDQ vm32y {k1}, zmm1" define pcodeop vpscatterdq_avx512f ; -:VPSCATTERDQ m32^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xA0; ZmmReg1 ... & m32 +:VPSCATTERDQ z_vm32z ZmmOpMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & ZmmOpMask; byte=0xA0; ZmmReg1 ... & z_vm32z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vpscatterdq_avx512f( m32, ZmmReg1 ); + vpscatterdq_avx512f( z_vm32z, ZmmOpMask, ZmmReg1 ); # TODO missing destination or side effects } +@ifdef IA64 +#technically these should be supported in 32-bit mode, but the assembly differences are notable, and we don't handle vm64 in 32-bit # VPSCATTERDD/VPSCATTERDQ/VPSCATTERQD/VPSCATTERQQ 5-440 PAGE 2264 LINE 116436 # WARNING: did not recognize qualifier /vsib for "VPSCATTERQD vm64x {k1}, xmm1" define pcodeop vpscatterqd_avx512vl ; -:VPSCATTERQD m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xA1; XmmReg1 ... & m64 +:VPSCATTERQD q_vm64x XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask; byte=0xA1; XmmReg1 ... & q_vm64x [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vpscatterqd_avx512vl( m64, XmmReg1 ); + vpscatterqd_avx512vl( q_vm64x, XmmOpMask, XmmReg1 ); # TODO missing destination or side effects } # VPSCATTERDD/VPSCATTERDQ/VPSCATTERQD/VPSCATTERQQ 5-440 PAGE 2264 LINE 116438 # WARNING: did not recognize qualifier /vsib for "VPSCATTERQD vm64y {k1}, xmm1" -:VPSCATTERQD m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xA1; XmmReg1 ... & m64 +:VPSCATTERQD q_vm64x XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask; byte=0xA1; XmmReg1 ... & q_vm64x [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vpscatterqd_avx512vl( m64, XmmReg1 ); + vpscatterqd_avx512vl( q_vm64x, XmmOpMask, XmmReg1 ); # TODO missing destination or side effects } # VPSCATTERDD/VPSCATTERDQ/VPSCATTERQD/VPSCATTERQQ 5-440 PAGE 2264 LINE 116440 # WARNING: did not recognize qualifier /vsib for "VPSCATTERQD vm64z {k1}, ymm1" define pcodeop vpscatterqd_avx512f ; -:VPSCATTERQD m64^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xA1; YmmReg1 ... & m64 +:VPSCATTERQD q_vm64y YmmOpMask, YmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask; byte=0xA1; YmmReg1 ... & q_vm64y [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vpscatterqd_avx512f( m64, YmmReg1 ); + vpscatterqd_avx512f( q_vm64y, YmmOpMask, YmmReg1 ); # TODO missing destination or side effects } # VPSCATTERDD/VPSCATTERDQ/VPSCATTERQD/VPSCATTERQQ 5-440 PAGE 2264 LINE 116442 # WARNING: did not recognize qualifier /vsib for "VPSCATTERQQ vm64x {k1}, xmm1" define pcodeop vpscatterqq_avx512vl ; -:VPSCATTERQQ m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xA1; XmmReg1 ... & m64 +:VPSCATTERQQ x_vm64x XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & XmmOpMask; byte=0xA1; XmmReg1 ... & x_vm64x [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vpscatterqq_avx512vl( m64, XmmReg1 ); + vpscatterqq_avx512vl( x_vm64x, XmmOpMask, XmmReg1 ); # TODO missing destination or side effects } # VPSCATTERDD/VPSCATTERDQ/VPSCATTERQD/VPSCATTERQQ 5-440 PAGE 2264 LINE 116444 # WARNING: did not recognize qualifier /vsib for "VPSCATTERQQ vm64y {k1}, ymm1" -:VPSCATTERQQ m64^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xA1; YmmReg1 ... & m64 +:VPSCATTERQQ y_vm64y YmmOpMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & YmmOpMask; byte=0xA1; YmmReg1 ... & y_vm64y [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vpscatterqq_avx512vl( m64, YmmReg1 ); + vpscatterqq_avx512vl( y_vm64y, YmmOpMask, YmmReg1 ); # TODO missing destination or side effects } # VPSCATTERDD/VPSCATTERDQ/VPSCATTERQD/VPSCATTERQQ 5-440 PAGE 2264 LINE 116446 # WARNING: did not recognize qualifier /vsib for "VPSCATTERQQ vm64z {k1}, zmm1" define pcodeop vpscatterqq_avx512f ; -:VPSCATTERQQ m64^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xA1; ZmmReg1 ... & m64 +:VPSCATTERQQ z_vm64z ZmmOpMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & ZmmOpMask; byte=0xA1; ZmmReg1 ... & z_vm64z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vpscatterqq_avx512f( m64, ZmmReg1 ); + vpscatterqq_avx512f( z_vm64z, ZmmOpMask, ZmmReg1 ); # TODO missing destination or side effects } +@endif # VPSLLVW/VPSLLVD/VPSLLVQ 5-445 PAGE 2269 LINE 116632 define pcodeop vpsllvw_avx512vl ; -:VPSLLVW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x12; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSLLVW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x12; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpsllvw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsllvw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # VPSLLVW/VPSLLVD/VPSLLVQ 5-445 PAGE 2269 LINE 116635 -:VPSLLVW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x12; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPSLLVW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x12; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpsllvw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsllvw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # VPSLLVW/VPSLLVD/VPSLLVQ 5-445 PAGE 2269 LINE 116638 define pcodeop vpsllvw_avx512bw ; -:VPSLLVW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x12; ZmmReg1 ... & ZmmReg2_m512 +:VPSLLVW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x12; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpsllvw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpsllvw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # VPSLLVW/VPSLLVD/VPSLLVQ 5-445 PAGE 2269 LINE 116641 define pcodeop vpsllvd_avx512vl ; -:VPSLLVD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x47; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPSLLVD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x47; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpsllvd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpsllvd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPSLLVW/VPSLLVD/VPSLLVQ 5-445 PAGE 2269 LINE 116644 -:VPSLLVD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x47; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPSLLVD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x47; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpsllvd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpsllvd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPSLLVW/VPSLLVD/VPSLLVQ 5-445 PAGE 2269 LINE 116647 define pcodeop vpsllvd_avx512f ; -:VPSLLVD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x47; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPSLLVD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x47; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpsllvd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpsllvd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPSLLVW/VPSLLVD/VPSLLVQ 5-445 PAGE 2269 LINE 116650 define pcodeop vpsllvq_avx512vl ; -:VPSLLVQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x47; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPSLLVQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x47; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpsllvq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpsllvq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPSLLVW/VPSLLVD/VPSLLVQ 5-445 PAGE 2269 LINE 116653 -:VPSLLVQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x47; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPSLLVQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x47; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpsllvq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpsllvq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPSLLVW/VPSLLVD/VPSLLVQ 5-445 PAGE 2269 LINE 116656 define pcodeop vpsllvq_avx512f ; -:VPSLLVQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x47; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPSLLVQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x47; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpsllvq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpsllvq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPSRAVW/VPSRAVD/VPSRAVQ 5-450 PAGE 2274 LINE 116880 define pcodeop vpsravw_avx512vl ; -:VPSRAVW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x11; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRAVW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x11; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpsravw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsravw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # VPSRAVW/VPSRAVD/VPSRAVQ 5-450 PAGE 2274 LINE 116883 -:VPSRAVW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x11; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPSRAVW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x11; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpsravw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsravw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # VPSRAVW/VPSRAVD/VPSRAVQ 5-450 PAGE 2274 LINE 116886 define pcodeop vpsravw_avx512bw ; -:VPSRAVW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x11; ZmmReg1 ... & ZmmReg2_m512 +:VPSRAVW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x11; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpsravw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpsravw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # VPSRAVW/VPSRAVD/VPSRAVQ 5-450 PAGE 2274 LINE 116889 define pcodeop vpsravd_avx512vl ; -:VPSRAVD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x46; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPSRAVD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x46; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpsravd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpsravd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPSRAVW/VPSRAVD/VPSRAVQ 5-450 PAGE 2274 LINE 116893 -:VPSRAVD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x46; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPSRAVD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x46; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpsravd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpsravd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPSRAVW/VPSRAVD/VPSRAVQ 5-450 PAGE 2274 LINE 116897 define pcodeop vpsravd_avx512f ; -:VPSRAVD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x46; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPSRAVD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x46; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpsravd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpsravd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPSRAVW/VPSRAVD/VPSRAVQ 5-450 PAGE 2274 LINE 116901 define pcodeop vpsravq_avx512vl ; -:VPSRAVQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x46; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPSRAVQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x46; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpsravq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpsravq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPSRAVW/VPSRAVD/VPSRAVQ 5-450 PAGE 2274 LINE 116905 -:VPSRAVQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x46; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPSRAVQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x46; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpsravq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpsravq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPSRAVW/VPSRAVD/VPSRAVQ 5-450 PAGE 2274 LINE 116910 define pcodeop vpsravq_avx512f ; -:VPSRAVQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x46; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPSRAVQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x46; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpsravq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpsravq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPSRLVW/VPSRLVD/VPSRLVQ 5-455 PAGE 2279 LINE 117151 define pcodeop vpsrlvw_avx512vl ; -:VPSRLVW XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x10; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128 +:VPSRLVW XmmReg1 XmmOpMask16, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x10; (XmmReg1 & ZmmReg1 & XmmOpMask16) ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:16 = vpsrlvw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); - ZmmReg1 = zext(tmp); + XmmResult = vpsrlvw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + XmmMask = XmmReg1; + build XmmOpMask16; + ZmmReg1 = zext(XmmResult); } # VPSRLVW/VPSRLVD/VPSRLVQ 5-455 PAGE 2279 LINE 117154 -:VPSRLVW YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x10; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256 +:VPSRLVW YmmReg1 YmmOpMask16, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x10; (YmmReg1 & ZmmReg1 & YmmOpMask16) ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - local tmp:32 = vpsrlvw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); - ZmmReg1 = zext(tmp); + YmmResult = vpsrlvw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + YmmMask = YmmReg1; + build YmmOpMask16; + ZmmReg1 = zext(YmmResult); } # VPSRLVW/VPSRLVD/VPSRLVQ 5-455 PAGE 2279 LINE 117157 define pcodeop vpsrlvw_avx512bw ; -:VPSRLVW ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x10; ZmmReg1 ... & ZmmReg2_m512 +:VPSRLVW ZmmReg1 ZmmOpMask16, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x10; (ZmmReg1 & ZmmOpMask16) ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - ZmmReg1 = vpsrlvw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmResult = vpsrlvw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + ZmmMask = ZmmReg1; + build ZmmOpMask16; + ZmmReg1 = ZmmResult; } # VPSRLVW/VPSRLVD/VPSRLVQ 5-455 PAGE 2279 LINE 117160 define pcodeop vpsrlvd_avx512vl ; -:VPSRLVD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x45; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VPSRLVD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x45; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpsrlvd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpsrlvd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPSRLVW/VPSRLVD/VPSRLVQ 5-455 PAGE 2279 LINE 117163 -:VPSRLVD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x45; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VPSRLVD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x45; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpsrlvd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpsrlvd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPSRLVW/VPSRLVD/VPSRLVQ 5-455 PAGE 2279 LINE 117166 define pcodeop vpsrlvd_avx512f ; -:VPSRLVD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x45; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VPSRLVD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x45; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpsrlvd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vpsrlvd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPSRLVW/VPSRLVD/VPSRLVQ 5-455 PAGE 2279 LINE 117169 define pcodeop vpsrlvq_avx512vl ; -:VPSRLVQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x45; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VPSRLVQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x45; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpsrlvq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vpsrlvq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPSRLVW/VPSRLVD/VPSRLVQ 5-455 PAGE 2279 LINE 117172 -:VPSRLVQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x45; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VPSRLVQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x45; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpsrlvq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vpsrlvq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPSRLVW/VPSRLVD/VPSRLVQ 5-455 PAGE 2279 LINE 117175 define pcodeop vpsrlvq_avx512f ; -:VPSRLVQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x45; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VPSRLVQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x45; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpsrlvq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vpsrlvq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPTERNLOGD/VPTERNLOGQ 5-460 PAGE 2284 LINE 117395 define pcodeop vpternlogd_avx512vl ; -:VPTERNLOGD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x25; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst; imm8 +:VPTERNLOGD XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x25; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpternlogd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vpternlogd_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VPTERNLOGD/VPTERNLOGQ 5-460 PAGE 2284 LINE 117400 -:VPTERNLOGD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x25; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst; imm8 +:VPTERNLOGD YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x25; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpternlogd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vpternlogd_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VPTERNLOGD/VPTERNLOGQ 5-460 PAGE 2284 LINE 117405 define pcodeop vpternlogd_avx512f ; -:VPTERNLOGD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x25; ZmmReg1 ... & ZmmReg2_m512_m32bcst; imm8 +:VPTERNLOGD ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & evexV5_ZmmReg; byte=0x25; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpternlogd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vpternlogd_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VPTERNLOGD/VPTERNLOGQ 5-460 PAGE 2284 LINE 117410 define pcodeop vpternlogq_avx512vl ; -:VPTERNLOGQ XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x25; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst; imm8 +:VPTERNLOGQ XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x25; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vpternlogq_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vpternlogq_avx512vl( XmmReg1, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VPTERNLOGD/VPTERNLOGQ 5-460 PAGE 2284 LINE 117415 -:VPTERNLOGQ YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x25; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst; imm8 +:VPTERNLOGQ YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x25; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vpternlogq_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vpternlogq_avx512vl( YmmReg1, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VPTERNLOGD/VPTERNLOGQ 5-460 PAGE 2284 LINE 117420 define pcodeop vpternlogq_avx512f ; -:VPTERNLOGQ ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x25; ZmmReg1 ... & ZmmReg2_m512_m64bcst; imm8 +:VPTERNLOGQ ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & evexV5_ZmmReg; byte=0x25; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vpternlogq_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vpternlogq_avx512f( ZmmReg1, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VPTESTMB/VPTESTMW/VPTESTMD/VPTESTMQ 5-463 PAGE 2287 LINE 117559 define pcodeop vptestmb_avx512vl ; -:VPTESTMB KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x26; KReg_reg ... & XmmReg2_m128 +:VPTESTMB KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & AVXOpMask & vexVVVV_XmmReg; byte=0x26; KReg_reg ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vptestmb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + local tmp = vptestmb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # VPTESTMB/VPTESTMW/VPTESTMD/VPTESTMQ 5-463 PAGE 2287 LINE 117562 -:VPTESTMB KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x26; KReg_reg ... & YmmReg2_m256 +:VPTESTMB KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & AVXOpMask & vexVVVV_YmmReg; byte=0x26; KReg_reg ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vptestmb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + local tmp = vptestmb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + KReg_reg = zext(AVXOpMask[0,32]) & tmp; } # VPTESTMB/VPTESTMW/VPTESTMD/VPTESTMQ 5-463 PAGE 2287 LINE 117565 define pcodeop vptestmb_avx512bw ; -:VPTESTMB KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x26; KReg_reg ... & ZmmReg2_m512 +:VPTESTMB KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & AVXOpMask & evexV5_ZmmReg; byte=0x26; KReg_reg ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vptestmb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + local tmp = vptestmb_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + KReg_reg = zext(AVXOpMask[0,64]) & tmp; } # VPTESTMB/VPTESTMW/VPTESTMD/VPTESTMQ 5-463 PAGE 2287 LINE 117568 define pcodeop vptestmw_avx512vl ; -:VPTESTMW KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x26; KReg_reg ... & XmmReg2_m128 +:VPTESTMW KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & vexVVVV_XmmReg; byte=0x26; KReg_reg ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vptestmw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + local tmp = vptestmw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # VPTESTMB/VPTESTMW/VPTESTMD/VPTESTMQ 5-463 PAGE 2287 LINE 117571 -:VPTESTMW KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x26; KReg_reg ... & YmmReg2_m256 +:VPTESTMW KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & vexVVVV_YmmReg; byte=0x26; KReg_reg ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vptestmw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + local tmp = vptestmw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # VPTESTMB/VPTESTMW/VPTESTMD/VPTESTMQ 5-463 PAGE 2287 LINE 117574 define pcodeop vptestmw_avx512bw ; -:VPTESTMW KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x26; KReg_reg ... & ZmmReg2_m512 +:VPTESTMW KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & evexV5_ZmmReg; byte=0x26; KReg_reg ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vptestmw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + local tmp = vptestmw_avx512bw( evexV5_ZmmReg, ZmmReg2_m512 ); + KReg_reg = zext(AVXOpMask[0,32]) & tmp; } # VPTESTMB/VPTESTMW/VPTESTMD/VPTESTMQ 5-463 PAGE 2287 LINE 117577 define pcodeop vptestmd_avx512vl ; -:VPTESTMD KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x27; KReg_reg ... & XmmReg2_m128_m32bcst +:VPTESTMD KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & AVXOpMask & vexVVVV_XmmReg; byte=0x27; KReg_reg ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vptestmd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + local tmp = vptestmd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; } # VPTESTMB/VPTESTMW/VPTESTMD/VPTESTMQ 5-463 PAGE 2287 LINE 117581 -:VPTESTMD KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x27; KReg_reg ... & YmmReg2_m256_m32bcst +:VPTESTMD KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & AVXOpMask & vexVVVV_YmmReg; byte=0x27; KReg_reg ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vptestmd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + local tmp = vptestmd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # VPTESTMB/VPTESTMW/VPTESTMD/VPTESTMQ 5-463 PAGE 2287 LINE 117585 define pcodeop vptestmd_avx512f ; -:VPTESTMD KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x27; KReg_reg ... & ZmmReg2_m512_m32bcst +:VPTESTMD KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & AVXOpMask & evexV5_ZmmReg; byte=0x27; KReg_reg ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vptestmd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + local tmp = vptestmd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # VPTESTMB/VPTESTMW/VPTESTMD/VPTESTMQ 5-463 PAGE 2287 LINE 117589 define pcodeop vptestmq_avx512vl ; -:VPTESTMQ KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x27; KReg_reg ... & XmmReg2_m128_m64bcst +:VPTESTMQ KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & vexVVVV_XmmReg; byte=0x27; KReg_reg ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vptestmq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + local tmp = vptestmq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + KReg_reg = zext(AVXOpMask[0,2]) & tmp; } # VPTESTMB/VPTESTMW/VPTESTMD/VPTESTMQ 5-463 PAGE 2287 LINE 117593 -:VPTESTMQ KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x27; KReg_reg ... & YmmReg2_m256_m64bcst +:VPTESTMQ KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & vexVVVV_YmmReg; byte=0x27; KReg_reg ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vptestmq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + local tmp = vptestmq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; } # VPTESTMB/VPTESTMW/VPTESTMD/VPTESTMQ 5-463 PAGE 2287 LINE 117597 define pcodeop vptestmq_avx512f ; -:VPTESTMQ KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x27; KReg_reg ... & ZmmReg2_m512_m64bcst +:VPTESTMQ KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & evexV5_ZmmReg; byte=0x27; KReg_reg ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vptestmq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + local tmp = vptestmq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # VPTESTNMB/W/D/Q 5-466 PAGE 2290 LINE 117717 define pcodeop vptestnmb_avx512vl ; -:VPTESTNMB KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x26; KReg_reg ... & XmmReg2_m128 +:VPTESTNMB KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & AVXOpMask & vexVVVV_XmmReg; byte=0x26; KReg_reg ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vptestnmb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + local tmp = vptestnmb_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # VPTESTNMB/W/D/Q 5-466 PAGE 2290 LINE 117721 -:VPTESTNMB KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x26; KReg_reg ... & YmmReg2_m256 +:VPTESTNMB KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & AVXOpMask & vexVVVV_YmmReg; byte=0x26; KReg_reg ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vptestnmb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + local tmp = vptestnmb_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + KReg_reg = zext(AVXOpMask[0,32]) & tmp; } # VPTESTNMB/W/D/Q 5-466 PAGE 2290 LINE 117725 define pcodeop vptestnmb_avx512f ; -:VPTESTNMB KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x26; KReg_reg ... & ZmmReg2_m512 +:VPTESTNMB KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & AVXOpMask & evexV5_ZmmReg; byte=0x26; KReg_reg ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vptestnmb_avx512f( evexV5_ZmmReg, ZmmReg2_m512 ); + local tmp = vptestnmb_avx512f( evexV5_ZmmReg, ZmmReg2_m512 ); + KReg_reg = zext(AVXOpMask[0,64]) & tmp; } # VPTESTNMB/W/D/Q 5-466 PAGE 2290 LINE 117729 define pcodeop vptestnmw_avx512vl ; -:VPTESTNMW KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x26; KReg_reg ... & XmmReg2_m128 +:VPTESTNMW KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & vexVVVV_XmmReg; byte=0x26; KReg_reg ... & XmmReg2_m128 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vptestnmw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + local tmp = vptestnmw_avx512vl( vexVVVV_XmmReg, XmmReg2_m128 ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # VPTESTNMB/W/D/Q 5-466 PAGE 2290 LINE 117733 -:VPTESTNMW KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x26; KReg_reg ... & YmmReg2_m256 +:VPTESTNMW KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & vexVVVV_YmmReg; byte=0x26; KReg_reg ... & YmmReg2_m256 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vptestnmw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + local tmp = vptestnmw_avx512vl( vexVVVV_YmmReg, YmmReg2_m256 ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # VPTESTNMB/W/D/Q 5-466 PAGE 2290 LINE 117737 define pcodeop vptestnmw_avx512f ; -:VPTESTNMW KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x26; KReg_reg ... & ZmmReg2_m512 +:VPTESTNMW KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & evexV5_ZmmReg; byte=0x26; KReg_reg ... & ZmmReg2_m512 [ evexD8Type = 1; evexTType = 0; ] # (TupleType FVM) { - KReg_reg = vptestnmw_avx512f( evexV5_ZmmReg, ZmmReg2_m512 ); + local tmp = vptestnmw_avx512f( evexV5_ZmmReg, ZmmReg2_m512 ); + KReg_reg = zext(AVXOpMask[0,32]) & tmp; } # VPTESTNMB/W/D/Q 5-466 PAGE 2290 LINE 117741 define pcodeop vptestnmd_avx512vl ; -:VPTESTNMD KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x27; KReg_reg ... & XmmReg2_m128_m32bcst +:VPTESTNMD KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & AVXOpMask & vexVVVV_XmmReg; byte=0x27; KReg_reg ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vptestnmd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + local tmp = vptestnmd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; } # VPTESTNMB/W/D/Q 5-466 PAGE 2290 LINE 117745 -:VPTESTNMD KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x27; KReg_reg ... & YmmReg2_m256_m32bcst +:VPTESTNMD KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & AVXOpMask & vexVVVV_YmmReg; byte=0x27; KReg_reg ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vptestnmd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + local tmp = vptestnmd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # VPTESTNMB/W/D/Q 5-466 PAGE 2290 LINE 117749 define pcodeop vptestnmd_avx512f ; -:VPTESTNMD KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x27; KReg_reg ... & ZmmReg2_m512_m32bcst +:VPTESTNMD KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & AVXOpMask & evexV5_ZmmReg; byte=0x27; KReg_reg ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vptestnmd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + local tmp = vptestnmd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + KReg_reg = zext(AVXOpMask[0,16]) & tmp; } # VPTESTNMB/W/D/Q 5-466 PAGE 2290 LINE 117753 define pcodeop vptestnmq_avx512vl ; -:VPTESTNMQ KReg_reg^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x27; KReg_reg ... & XmmReg2_m128_m64bcst +:VPTESTNMQ KReg_reg AVXOpMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & vexVVVV_XmmReg; byte=0x27; KReg_reg ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vptestnmq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + local tmp = vptestnmq_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + KReg_reg = zext(AVXOpMask[0,2]) & tmp; } # VPTESTNMB/W/D/Q 5-466 PAGE 2290 LINE 117757 -:VPTESTNMQ KReg_reg^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x27; KReg_reg ... & YmmReg2_m256_m64bcst +:VPTESTNMQ KReg_reg AVXOpMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & vexVVVV_YmmReg; byte=0x27; KReg_reg ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vptestnmq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + local tmp = vptestnmq_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + KReg_reg = zext(AVXOpMask[0,4]) & tmp; } # VPTESTNMB/W/D/Q 5-466 PAGE 2290 LINE 117761 define pcodeop vptestnmq_avx512f ; -:VPTESTNMQ KReg_reg^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x27; KReg_reg ... & ZmmReg2_m512_m64bcst +:VPTESTNMQ KReg_reg AVXOpMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W1) & AVXOpMask & evexV5_ZmmReg; byte=0x27; KReg_reg ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - KReg_reg = vptestnmq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + local tmp = vptestnmq_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + KReg_reg = zext(AVXOpMask[0,8]) & tmp; } # VRANGEPD 5-470 PAGE 2294 LINE 117905 define pcodeop vrangepd_avx512vl ; -:VRANGEPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x50; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst; imm8 +:VRANGEPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x50; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vrangepd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vrangepd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VRANGEPD 5-470 PAGE 2294 LINE 117910 -:VRANGEPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x50; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst; imm8 +:VRANGEPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x50; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vrangepd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vrangepd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VRANGEPD 5-470 PAGE 2294 LINE 117915 define pcodeop vrangepd_avx512dq ; -:VRANGEPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x50; ZmmReg1 ... & ZmmReg2_m512_m64bcst; imm8 +:VRANGEPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & evexV5_ZmmReg; byte=0x50; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vrangepd_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vrangepd_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VRANGEPS 5-475 PAGE 2299 LINE 118139 define pcodeop vrangeps_avx512vl ; -:VRANGEPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x50; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst; imm8 +:VRANGEPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x50; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vrangeps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vrangeps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VRANGEPS 5-475 PAGE 2299 LINE 118144 -:VRANGEPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x50; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst; imm8 +:VRANGEPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x50; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vrangeps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vrangeps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VRANGEPS 5-475 PAGE 2299 LINE 118149 define pcodeop vrangeps_avx512dq ; -:VRANGEPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x50; ZmmReg1 ... & ZmmReg2_m512_m32bcst; imm8 +:VRANGEPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & evexV5_ZmmReg; byte=0x50; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vrangeps_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vrangeps_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VRANGESD 5-479 PAGE 2303 LINE 118318 define pcodeop vrangesd_avx512dq ; -:VRANGESD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64, imm8 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x51; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64; imm8 +:VRANGESD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64, imm8 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x51; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64; imm8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vrangesd_avx512dq( vexVVVV_XmmReg, XmmReg2_m64, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vrangesd_avx512dq( vexVVVV_XmmReg, XmmReg2_m64, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VRANGESS 5-482 PAGE 2306 LINE 118473 define pcodeop vrangess_avx512dq ; -:VRANGESS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x51; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VRANGESS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x51; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vrangess_avx512dq( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vrangess_avx512dq( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VRCP14PD 5-485 PAGE 2309 LINE 118626 define pcodeop vrcp14pd_avx512vl ; -:VRCP14PD XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x4C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VRCP14PD XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x4C; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vrcp14pd_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vrcp14pd_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VRCP14PD 5-485 PAGE 2309 LINE 118629 -:VRCP14PD YmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x4C; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VRCP14PD YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x4C; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vrcp14pd_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vrcp14pd_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VRCP14PD 5-485 PAGE 2309 LINE 118632 define pcodeop vrcp14pd_avx512f ; -:VRCP14PD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x4C; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VRCP14PD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x4C; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vrcp14pd_avx512f( ZmmReg2_m512_m64bcst ); + ZmmResult = vrcp14pd_avx512f( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VRCP14SD 5-487 PAGE 2311 LINE 118726 define pcodeop vrcp14sd_avx512f ; -:VRCP14SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x4D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VRCP14SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x4D; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vrcp14sd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vrcp14sd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VRCP14PS 5-489 PAGE 2313 LINE 118800 define pcodeop vrcp14ps_avx512vl ; -:VRCP14PS XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x4C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VRCP14PS XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x4C; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vrcp14ps_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vrcp14ps_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VRCP14PS 5-489 PAGE 2313 LINE 118803 -:VRCP14PS YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x4C; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VRCP14PS YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x4C; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vrcp14ps_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vrcp14ps_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VRCP14PS 5-489 PAGE 2313 LINE 118806 define pcodeop vrcp14ps_avx512f ; -:VRCP14PS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x4C; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VRCP14PS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x4C; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vrcp14ps_avx512f( ZmmReg2_m512_m32bcst ); + ZmmResult = vrcp14ps_avx512f( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VRCP14SS 5-491 PAGE 2315 LINE 118904 define pcodeop vrcp14ss_avx512f ; -:VRCP14SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x4D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VRCP14SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x4D; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vrcp14ss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vrcp14ss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VRCP28PD 5-493 PAGE 2317 LINE 118979 define pcodeop vrcp28pd_avx512er ; -:VRCP28PD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xCA; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VRCP28PD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0xCA; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vrcp28pd_avx512er( ZmmReg2_m512_m64bcst ); + ZmmResult = vrcp28pd_avx512er( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VRCP28SD 5-495 PAGE 2319 LINE 119074 define pcodeop vrcp28sd_avx512er ; -:VRCP28SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xCB; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VRCP28SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xCB; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vrcp28sd_avx512er( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vrcp28sd_avx512er( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VRCP28PS 5-497 PAGE 2321 LINE 119167 define pcodeop vrcp28ps_avx512er ; -:VRCP28PS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xCA; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VRCP28PS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0xCA; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vrcp28ps_avx512er( ZmmReg2_m512_m32bcst ); + ZmmResult = vrcp28ps_avx512er( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VRCP28SS 5-499 PAGE 2323 LINE 119263 define pcodeop vrcp28ss_avx512er ; -:VRCP28SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xCB; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VRCP28SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xCB; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vrcp28ss_avx512er( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vrcp28ss_avx512er( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VREDUCEPD 5-501 PAGE 2325 LINE 119356 define pcodeop vreducepd_avx512vl ; -:VREDUCEPD XmmReg1^KWriteMask, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x56; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst; imm8 +:VREDUCEPD XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x56; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vreducepd_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vreducepd_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VREDUCEPD 5-501 PAGE 2325 LINE 119360 -:VREDUCEPD YmmReg1^KWriteMask, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x56; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst; imm8 +:VREDUCEPD YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x56; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vreducepd_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vreducepd_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VREDUCEPD 5-501 PAGE 2325 LINE 119364 define pcodeop vreducepd_avx512dq ; -:VREDUCEPD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x56; ZmmReg1 ... & ZmmReg2_m512_m64bcst; imm8 +:VREDUCEPD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x56; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vreducepd_avx512dq( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vreducepd_avx512dq( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VREDUCESD 5-504 PAGE 2328 LINE 119510 define pcodeop vreducesd_avx512dq ; -:VREDUCESD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x57; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VREDUCESD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x57; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vreducesd_avx512dq( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vreducesd_avx512dq( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VREDUCEPS 5-506 PAGE 2330 LINE 119605 define pcodeop vreduceps_avx512vl ; -:VREDUCEPS XmmReg1^KWriteMask, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x56; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst; imm8 +:VREDUCEPS XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x56; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vreduceps_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vreduceps_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VREDUCEPS 5-506 PAGE 2330 LINE 119609 -:VREDUCEPS YmmReg1^KWriteMask, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x56; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst; imm8 +:VREDUCEPS YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x56; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vreduceps_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vreduceps_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VREDUCEPS 5-506 PAGE 2330 LINE 119613 define pcodeop vreduceps_avx512dq ; -:VREDUCEPS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x56; ZmmReg1 ... & ZmmReg2_m512_m32bcst; imm8 +:VREDUCEPS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x56; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vreduceps_avx512dq( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vreduceps_avx512dq( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VREDUCESS 5-508 PAGE 2332 LINE 119719 define pcodeop vreducess_avx512dq ; -:VREDUCESS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x57; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VREDUCESS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x57; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vreducess_avx512dq( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vreducess_avx512dq( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VRNDSCALEPD 5-510 PAGE 2334 LINE 119814 define pcodeop vrndscalepd_avx512vl ; -:VRNDSCALEPD XmmReg1^KWriteMask, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x09; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst; imm8 +:VRNDSCALEPD XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x09; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vrndscalepd_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vrndscalepd_avx512vl( XmmReg2_m128_m64bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VRNDSCALEPD 5-510 PAGE 2334 LINE 119818 -:VRNDSCALEPD YmmReg1^KWriteMask, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x09; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst; imm8 +:VRNDSCALEPD YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x09; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vrndscalepd_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vrndscalepd_avx512vl( YmmReg2_m256_m64bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VRNDSCALEPD 5-510 PAGE 2334 LINE 119822 define pcodeop vrndscalepd_avx512f ; -:VRNDSCALEPD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask; byte=0x09; ZmmReg1 ... & ZmmReg2_m512_m64bcst; imm8 +:VRNDSCALEPD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) ; byte=0x09; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vrndscalepd_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmResult = vrndscalepd_avx512f( ZmmReg2_m512_m64bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VRNDSCALESD 5-514 PAGE 2338 LINE 119998 define pcodeop vrndscalesd_avx512f ; -:VRNDSCALESD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64, imm8 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x0B; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64; imm8 +:VRNDSCALESD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64, imm8 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x0B; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64; imm8 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vrndscalesd_avx512f( vexVVVV_XmmReg, XmmReg2_m64, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vrndscalesd_avx512f( vexVVVV_XmmReg, XmmReg2_m64, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VRNDSCALEPS 5-516 PAGE 2340 LINE 120116 define pcodeop vrndscaleps_avx512vl ; -:VRNDSCALEPS XmmReg1^KWriteMask, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x08; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst; imm8 +:VRNDSCALEPS XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x08; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vrndscaleps_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + XmmResult = vrndscaleps_avx512vl( XmmReg2_m128_m32bcst, imm8:1 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VRNDSCALEPS 5-516 PAGE 2340 LINE 120120 -:VRNDSCALEPS YmmReg1^KWriteMask, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x08; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst; imm8 +:VRNDSCALEPS YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x08; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vrndscaleps_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); - ZmmReg1 = zext(tmp); + YmmResult = vrndscaleps_avx512vl( YmmReg2_m256_m32bcst, imm8:1 ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VRNDSCALEPS 5-516 PAGE 2340 LINE 120124 define pcodeop vrndscaleps_avx512f ; -:VRNDSCALEPS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x08; ZmmReg1 ... & ZmmReg2_m512_m32bcst; imm8 +:VRNDSCALEPS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x08; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst; imm8 [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vrndscaleps_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmResult = vrndscaleps_avx512f( ZmmReg2_m512_m32bcst, imm8:1 ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VRNDSCALESS 5-519 PAGE 2343 LINE 120263 define pcodeop vrndscaless_avx512f ; -:VRNDSCALESS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x0A; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VRNDSCALESS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x0A; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vrndscaless_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vrndscaless_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VRSQRT14PD 5-521 PAGE 2345 LINE 120381 define pcodeop vrsqrt14pd_avx512vl ; -:VRSQRT14PD XmmReg1^KWriteMask, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x4E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VRSQRT14PD XmmReg1 XmmOpMask64, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x4E; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vrsqrt14pd_avx512vl( XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vrsqrt14pd_avx512vl( XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VRSQRT14PD 5-521 PAGE 2345 LINE 120385 -:VRSQRT14PD YmmReg1^KWriteMask, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x4E; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VRSQRT14PD YmmReg1 YmmOpMask64, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x4E; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vrsqrt14pd_avx512vl( YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vrsqrt14pd_avx512vl( YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VRSQRT14PD 5-521 PAGE 2345 LINE 120389 define pcodeop vrsqrt14pd_avx512f ; -:VRSQRT14PD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0x4E; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VRSQRT14PD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0x4E; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vrsqrt14pd_avx512f( ZmmReg2_m512_m64bcst ); + ZmmResult = vrsqrt14pd_avx512f( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VRSQRT14SD 5-523 PAGE 2347 LINE 120491 define pcodeop vrsqrt14sd_avx512f ; -:VRSQRT14SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x4F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VRSQRT14SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x4F; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vrsqrt14sd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vrsqrt14sd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VRSQRT14PS 5-525 PAGE 2349 LINE 120578 define pcodeop vrsqrt14ps_avx512vl ; -:VRSQRT14PS XmmReg1^KWriteMask, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x4E; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VRSQRT14PS XmmReg1 XmmOpMask32, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x4E; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vrsqrt14ps_avx512vl( XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vrsqrt14ps_avx512vl( XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VRSQRT14PS 5-525 PAGE 2349 LINE 120582 -:VRSQRT14PS YmmReg1^KWriteMask, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x4E; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VRSQRT14PS YmmReg1 YmmOpMask32, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x4E; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vrsqrt14ps_avx512vl( YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vrsqrt14ps_avx512vl( YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VRSQRT14PS 5-525 PAGE 2349 LINE 120586 define pcodeop vrsqrt14ps_avx512f ; -:VRSQRT14PS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x4E; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VRSQRT14PS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0x4E; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vrsqrt14ps_avx512f( ZmmReg2_m512_m32bcst ); + ZmmResult = vrsqrt14ps_avx512f( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VRSQRT14SS 5-527 PAGE 2351 LINE 120690 define pcodeop vrsqrt14ss_avx512f ; -:VRSQRT14SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x4F; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VRSQRT14SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x4F; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vrsqrt14ss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vrsqrt14ss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VRSQRT28PD 5-529 PAGE 2353 LINE 120778 define pcodeop vrsqrt28pd_avx512er ; -:VRSQRT28PD ZmmReg1^KWriteMask, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xCC; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VRSQRT28PD ZmmReg1 ZmmOpMask64, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) ; byte=0xCC; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vrsqrt28pd_avx512er( ZmmReg2_m512_m64bcst ); + ZmmResult = vrsqrt28pd_avx512er( ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VRSQRT28SD 5-531 PAGE 2355 LINE 120869 define pcodeop vrsqrt28sd_avx512er ; -:VRSQRT28SD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0xCD; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VRSQRT28SD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0xCD; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vrsqrt28sd_avx512er( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vrsqrt28sd_avx512er( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VRSQRT28PS 5-533 PAGE 2357 LINE 120959 define pcodeop vrsqrt28ps_avx512er ; -:VRSQRT28PS ZmmReg1^KWriteMask, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xCC; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VRSQRT28PS ZmmReg1 ZmmOpMask32, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0); byte=0xCC; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vrsqrt28ps_avx512er( ZmmReg2_m512_m32bcst ); + ZmmResult = vrsqrt28ps_avx512er( ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VRSQRT28SS 5-535 PAGE 2359 LINE 121051 define pcodeop vrsqrt28ss_avx512er ; -:VRSQRT28SS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0xCD; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VRSQRT28SS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0xCD; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vrsqrt28ss_avx512er( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vrsqrt28ss_avx512er( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VSCALEFPD 5-537 PAGE 2361 LINE 121140 define pcodeop vscalefpd_avx512vl ; -:VSCALEFPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x2C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VSCALEFPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x2C; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vscalefpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vscalefpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VSCALEFPD 5-537 PAGE 2361 LINE 121143 -:VSCALEFPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x2C; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VSCALEFPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x2C; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vscalefpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vscalefpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VSCALEFPD 5-537 PAGE 2361 LINE 121146 define pcodeop vscalefpd_avx512f ; -:VSCALEFPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x2C; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VSCALEFPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & evexV5_ZmmReg; byte=0x2C; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vscalefpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vscalefpd_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VSCALEFSD 5-540 PAGE 2364 LINE 121269 define pcodeop vscalefsd_avx512f ; -:VSCALEFSD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x2D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m64 +:VSCALEFSD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m64 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x2D; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m64 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vscalefsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); - ZmmReg1 = zext(tmp); + XmmResult = vscalefsd_avx512f( vexVVVV_XmmReg, XmmReg2_m64 ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # VSCALEFPS 5-542 PAGE 2366 LINE 121355 define pcodeop vscalefps_avx512vl ; -:VSCALEFPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x2C; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VSCALEFPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x2C; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vscalefps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vscalefps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VSCALEFPS 5-542 PAGE 2366 LINE 121358 -:VSCALEFPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x2C; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VSCALEFPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x2C; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vscalefps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vscalefps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VSCALEFPS 5-542 PAGE 2366 LINE 121361 define pcodeop vscalefps_avx512f ; -:VSCALEFPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x2C; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VSCALEFPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & evexV5_ZmmReg; byte=0x2C; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vscalefps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vscalefps_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VSCALEFSS 5-544 PAGE 2368 LINE 121470 define pcodeop vscalefss_avx512f ; -:VSCALEFSS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x2D; (XmmReg1 & ZmmReg1) ... & XmmReg2_m32 +:VSCALEFSS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m32 is $(EVEX_NONE) & $(VEX_LIG) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x2D; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m32 [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - local tmp:16 = vscalefss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); - ZmmReg1 = zext(tmp); + XmmResult = vscalefss_avx512f( vexVVVV_XmmReg, XmmReg2_m32 ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # VSCATTERDPS/VSCATTERDPD/VSCATTERQPS/VSCATTERQPD 5-546 PAGE 2370 LINE 121559 -# WARNING: did not recognize qualifier /vsib for "VSCATTERDPS vm32x {k1}, xmm1" define pcodeop vscatterdps_avx512vl ; -:VSCATTERDPS m32^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xA2; XmmReg1 ... & m32 +:VSCATTERDPS x_vm32x XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask; byte=0xA2; XmmReg1 ... & x_vm32x [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterdps_avx512vl( m32, XmmReg1 ); + vscatterdps_avx512vl( x_vm32x, XmmOpMask, XmmReg1 ); # TODO missing destination or side effects } # VSCATTERDPS/VSCATTERDPD/VSCATTERQPS/VSCATTERQPD 5-546 PAGE 2370 LINE 121561 -# WARNING: did not recognize qualifier /vsib for "VSCATTERDPS vm32y {k1}, ymm1" -:VSCATTERDPS m32^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xA2; YmmReg1 ... & m32 +:VSCATTERDPS y_vm32y YmmOpMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask; byte=0xA2; YmmReg1 ... & y_vm32y [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterdps_avx512vl( m32, YmmReg1 ); + vscatterdps_avx512vl( y_vm32y, YmmOpMask, YmmReg1 ); # TODO missing destination or side effects } # VSCATTERDPS/VSCATTERDPD/VSCATTERQPS/VSCATTERQPD 5-546 PAGE 2370 LINE 121563 -# WARNING: did not recognize qualifier /vsib for "VSCATTERDPS vm32z {k1}, zmm1" define pcodeop vscatterdps_avx512f ; -:VSCATTERDPS m32^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xA2; ZmmReg1 ... & m32 +:VSCATTERDPS z_vm32z ZmmOpMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & ZmmOpMask; byte=0xA2; ZmmReg1 ... & z_vm32z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterdps_avx512f( m32, ZmmReg1 ); + vscatterdps_avx512f( z_vm32z, ZmmOpMask, ZmmReg1 ); # TODO missing destination or side effects } # VSCATTERDPS/VSCATTERDPD/VSCATTERQPS/VSCATTERQPD 5-546 PAGE 2370 LINE 121565 -# WARNING: did not recognize qualifier /vsib for "VSCATTERDPD vm32x {k1}, xmm1" define pcodeop vscatterdpd_avx512vl ; -:VSCATTERDPD m32^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xA2; XmmReg1 ... & m32 +:VSCATTERDPD x_vm32x XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & XmmOpMask; byte=0xA2; XmmReg1 ... & x_vm32x [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterdpd_avx512vl( m32, XmmReg1 ); + vscatterdpd_avx512vl( x_vm32x, XmmOpMask, XmmReg1 ); # TODO missing destination or side effects } # VSCATTERDPS/VSCATTERDPD/VSCATTERQPS/VSCATTERQPD 5-546 PAGE 2370 LINE 121567 -# WARNING: did not recognize qualifier /vsib for "VSCATTERDPD vm32x {k1}, ymm1" -:VSCATTERDPD m32^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xA2; YmmReg1 ... & m32 +:VSCATTERDPD y_vm32y YmmOpMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & YmmOpMask; byte=0xA2; YmmReg1 ... & y_vm32y [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterdpd_avx512vl( m32, YmmReg1 ); + vscatterdpd_avx512vl( y_vm32y, YmmOpMask, YmmReg1 ); # TODO missing destination or side effects } # VSCATTERDPS/VSCATTERDPD/VSCATTERQPS/VSCATTERQPD 5-546 PAGE 2370 LINE 121569 -# WARNING: did not recognize qualifier /vsib for "VSCATTERDPD vm32y {k1}, zmm1" define pcodeop vscatterdpd_avx512f ; -:VSCATTERDPD m32^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xA2; ZmmReg1 ... & m32 +:VSCATTERDPD z_vm32z ZmmOpMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & ZmmOpMask; byte=0xA2; ZmmReg1 ... & z_vm32z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterdpd_avx512f( m32, ZmmReg1 ); + vscatterdpd_avx512f( z_vm32z, ZmmOpMask, ZmmReg1 ); # TODO missing destination or side effects } +@ifdef IA64 # VSCATTERDPS/VSCATTERDPD/VSCATTERQPS/VSCATTERQPD 5-546 PAGE 2370 LINE 121571 -# WARNING: did not recognize qualifier /vsib for "VSCATTERQPS vm64x {k1}, xmm1" define pcodeop vscatterqps_avx512vl ; -:VSCATTERQPS m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xA3; XmmReg1 ... & m64 +:VSCATTERQPS q_vm64x XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask; byte=0xA3; XmmReg1 ... & q_vm64x [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterqps_avx512vl( m64, XmmReg1 ); + vscatterqps_avx512vl( q_vm64x, XmmOpMask, XmmReg1 ); # TODO missing destination or side effects } # VSCATTERDPS/VSCATTERDPD/VSCATTERQPS/VSCATTERQPD 5-546 PAGE 2370 LINE 121573 -# WARNING: did not recognize qualifier /vsib for "VSCATTERQPS vm64y {k1}, xmm1" -:VSCATTERQPS m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xA3; XmmReg1 ... & m64 +:VSCATTERQPS q_vm64y XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask; byte=0xA3; XmmReg1 ... & q_vm64y [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterqps_avx512vl( m64, XmmReg1 ); + vscatterqps_avx512vl( q_vm64y, XmmOpMask, XmmReg1 ); # TODO missing destination or side effects } # VSCATTERDPS/VSCATTERDPD/VSCATTERQPS/VSCATTERQPD 5-546 PAGE 2370 LINE 121575 -# WARNING: did not recognize qualifier /vsib for "VSCATTERQPS vm64z {k1}, ymm1" define pcodeop vscatterqps_avx512f ; -:VSCATTERQPS m64^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xA3; YmmReg1 ... & m64 +:VSCATTERQPS q_vm64z YmmOpMask, YmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask; byte=0xA3; YmmReg1 ... & q_vm64z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterqps_avx512f( m64, YmmReg1 ); + vscatterqps_avx512f( q_vm64z, YmmOpMask, YmmReg1 ); # TODO missing destination or side effects } # VSCATTERDPS/VSCATTERDPD/VSCATTERQPS/VSCATTERQPD 5-546 PAGE 2370 LINE 121577 -# WARNING: did not recognize qualifier /vsib for "VSCATTERQPD vm64x {k1}, xmm1" define pcodeop vscatterqpd_avx512vl ; -:VSCATTERQPD m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xA3; XmmReg1 ... & m64 +:VSCATTERQPD x_vm64x XmmOpMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & XmmOpMask; byte=0xA3; XmmReg1 ... & x_vm64x [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterqpd_avx512vl( m64, XmmReg1 ); + vscatterqpd_avx512vl( x_vm64x, XmmOpMask, XmmReg1 ); # TODO missing destination or side effects } # VSCATTERDPS/VSCATTERDPD/VSCATTERQPS/VSCATTERQPD 5-546 PAGE 2370 LINE 121579 -# WARNING: did not recognize qualifier /vsib for "VSCATTERQPD vm64y {k1}, ymm1" -:VSCATTERQPD m64^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xA3; YmmReg1 ... & m64 +:VSCATTERQPD y_vm64y YmmOpMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & YmmOpMask; byte=0xA3; YmmReg1 ... & y_vm64y [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterqpd_avx512vl( m64, YmmReg1 ); + vscatterqpd_avx512vl( y_vm64y, YmmOpMask, YmmReg1 ); # TODO missing destination or side effects } +@endif +@ifdef IA64 # VSCATTERDPS/VSCATTERDPD/VSCATTERQPS/VSCATTERQPD 5-546 PAGE 2370 LINE 121581 -# WARNING: did not recognize qualifier /vsib for "VSCATTERQPD vm64z {k1}, zmm1" define pcodeop vscatterqpd_avx512f ; -:VSCATTERQPD m64^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xA3; ZmmReg1 ... & m64 +:VSCATTERQPD z_vm64z ZmmOpMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & ZmmOpMask; byte=0xA3; ZmmReg1 ... & z_vm64z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterqpd_avx512f( m64, ZmmReg1 ); + vscatterqpd_avx512f( z_vm64z, ZmmOpMask, ZmmReg1 ); # TODO missing destination or side effects } +@endif # VSCATTERPF0DPS/VSCATTERPF0QPS/VSCATTERPF0DPD/VSCATTERPF0QPD 5-551 PAGE 2375 LINE 121759 -# WARNING: did not recognize qualifier /vsib for "VSCATTERPF0DPS vm32z {k1}" define pcodeop vscatterpf0dps_avx512pf ; -:VSCATTERPF0DPS m32 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xC6; reg_opcode=5 ... & m32 +:VSCATTERPF0DPS z_vm32z ZmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & ZmmOpMask; byte=0xC6; reg_opcode=5 ... & z_vm32z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterpf0dps_avx512pf( m32 ); + vscatterpf0dps_avx512pf( z_vm32z , ZmmOpMask); # TODO missing destination or side effects } +@ifdef IA64 # VSCATTERPF0DPS/VSCATTERPF0QPS/VSCATTERPF0DPD/VSCATTERPF0QPD 5-551 PAGE 2375 LINE 121762 -# WARNING: did not recognize qualifier /vsib for "VSCATTERPF0QPS vm64z {k1}" define pcodeop vscatterpf0qps_avx512pf ; -:VSCATTERPF0QPS m64 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xC7; reg_opcode=5 ... & m64 +:VSCATTERPF0QPS z_vm64z ZmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & ZmmOpMask; byte=0xC7; reg_opcode=5 ... & z_vm64z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterpf0qps_avx512pf( m64 ); + vscatterpf0qps_avx512pf( z_vm64z, ZmmOpMask ); # TODO missing destination or side effects } +@endif # VSCATTERPF0DPS/VSCATTERPF0QPS/VSCATTERPF0DPD/VSCATTERPF0QPD 5-551 PAGE 2375 LINE 121765 -# WARNING: did not recognize qualifier /vsib for "VSCATTERPF0DPD vm32y {k1}" define pcodeop vscatterpf0dpd_avx512pf ; -:VSCATTERPF0DPD m32 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xC6; reg_opcode=5 ... & m32 +:VSCATTERPF0DPD y_vm32y YmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & YmmOpMask; byte=0xC6; reg_opcode=5 ... & y_vm32y [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterpf0dpd_avx512pf( m32 ); + vscatterpf0dpd_avx512pf( y_vm32y, YmmOpMask ); # TODO missing destination or side effects } +@ifdef IA64 # VSCATTERPF0DPS/VSCATTERPF0QPS/VSCATTERPF0DPD/VSCATTERPF0QPD 5-551 PAGE 2375 LINE 121768 -# WARNING: did not recognize qualifier /vsib for "VSCATTERPF0QPD vm64z {k1}" define pcodeop vscatterpf0qpd_avx512pf ; -:VSCATTERPF0QPD m64 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xC7; reg_opcode=5 ... & m64 +:VSCATTERPF0QPD z_vm64z ZmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & ZmmOpMask; byte=0xC7; reg_opcode=5 ... & z_vm64z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterpf0qpd_avx512pf( m64 ); + vscatterpf0qpd_avx512pf( z_vm64z, ZmmOpMask ); # TODO missing destination or side effects } +@endif # VSCATTERPF1DPS/VSCATTERPF1QPS/VSCATTERPF1DPD/VSCATTERPF1QPD 5-553 PAGE 2377 LINE 121877 -# WARNING: did not recognize qualifier /vsib for "VSCATTERPF1DPS vm32z {k1}" define pcodeop vscatterpf1dps_avx512pf ; -:VSCATTERPF1DPS m32 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xC6; reg_opcode=6 ... & m32 +:VSCATTERPF1DPS z_vm32z ZmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & ZmmOpMask; byte=0xC6; reg_opcode=6 ... & z_vm32z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterpf1dps_avx512pf( m32 ); + vscatterpf1dps_avx512pf( z_vm32z, ZmmOpMask ); # TODO missing destination or side effects } +@ifdef IA64 # VSCATTERPF1DPS/VSCATTERPF1QPS/VSCATTERPF1DPD/VSCATTERPF1QPD 5-553 PAGE 2377 LINE 121880 -# WARNING: did not recognize qualifier /vsib for "VSCATTERPF1QPS vm64z {k1}" define pcodeop vscatterpf1qps_avx512pf ; -:VSCATTERPF1QPS m64 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0xC7; reg_opcode=6 ... & m64 +:VSCATTERPF1QPS z_vm64z ZmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W0) & ZmmOpMask; byte=0xC7; reg_opcode=6 ... & z_vm64z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterpf1qps_avx512pf( m64 ); + vscatterpf1qps_avx512pf( z_vm64z, ZmmOpMask ); # TODO missing destination or side effects } +@endif # VSCATTERPF1DPS/VSCATTERPF1QPS/VSCATTERPF1DPD/VSCATTERPF1QPD 5-553 PAGE 2377 LINE 121883 -# WARNING: did not recognize qualifier /vsib for "VSCATTERPF1DPD vm32y {k1}" define pcodeop vscatterpf1dpd_avx512pf ; -:VSCATTERPF1DPD m32 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xC6; reg_opcode=6 ... & m32 +:VSCATTERPF1DPD y_vm32y YmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & YmmOpMask; byte=0xC6; reg_opcode=6 ... & y_vm32y [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterpf1dpd_avx512pf( m32 ); + vscatterpf1dpd_avx512pf( y_vm32y, YmmOpMask ); # TODO missing destination or side effects } +@ifdef IA64 # VSCATTERPF1DPS/VSCATTERPF1QPS/VSCATTERPF1DPD/VSCATTERPF1QPD 5-553 PAGE 2377 LINE 121886 -# WARNING: did not recognize qualifier /vsib for "VSCATTERPF1QPD vm64z {k1}" define pcodeop vscatterpf1qpd_avx512pf ; -:VSCATTERPF1QPD m64 KWriteMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & KWriteMask; byte=0xC7; reg_opcode=6 ... & m64 +:VSCATTERPF1QPD z_vm64z ZmmOpMask is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F38) & $(VEX_W1) & ZmmOpMask; byte=0xC7; reg_opcode=6 ... & z_vm64z [ evexD8Type = 1; evexTType = 3; ] # (TupleType T1S) { - vscatterpf1qpd_avx512pf( m64 ); + vscatterpf1qpd_avx512pf( z_vm64z, ZmmOpMask ); # TODO missing destination or side effects } +@endif # VSHUFF32x4/VSHUFF64x2/VSHUFI32x4/VSHUFI64x2 5-555 PAGE 2379 LINE 121994 define pcodeop vshuff32x4_avx512vl ; -:VSHUFF32X4 YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x23; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VSHUFF32X4 YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x23; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vshuff32x4_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vshuff32x4_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VSHUFF32x4/VSHUFF64x2/VSHUFI32x4/VSHUFI64x2 5-555 PAGE 2379 LINE 121998 define pcodeop vshuff32x4_avx512f ; -:VSHUFF32x4 ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x23; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VSHUFF32x4 ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & evexV5_ZmmReg; byte=0x23; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vshuff32x4_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vshuff32x4_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VSHUFF32x4/VSHUFF64x2/VSHUFI32x4/VSHUFI64x2 5-555 PAGE 2379 LINE 122002 define pcodeop vshuff64x2_avx512vl ; -:VSHUFF64X2 YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x23; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VSHUFF64X2 YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x23; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vshuff64x2_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vshuff64x2_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VSHUFF32x4/VSHUFF64x2/VSHUFI32x4/VSHUFI64x2 5-555 PAGE 2379 LINE 122006 define pcodeop vshuff64x2_avx512f ; -:VSHUFF64x2 ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x23; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VSHUFF64x2 ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & evexV5_ZmmReg; byte=0x23; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vshuff64x2_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vshuff64x2_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # VSHUFF32x4/VSHUFF64x2/VSHUFI32x4/VSHUFI64x2 5-555 PAGE 2379 LINE 122010 define pcodeop vshufi32x4_avx512vl ; -:VSHUFI32X4 YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x43; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VSHUFI32X4 YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x43; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vshufi32x4_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vshufi32x4_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # VSHUFF32x4/VSHUFF64x2/VSHUFI32x4/VSHUFI64x2 5-555 PAGE 2379 LINE 122013 define pcodeop vshufi32x4_avx512f ; -:VSHUFI32x4 ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x43; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VSHUFI32x4 ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & evexV5_ZmmReg; byte=0x43; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vshufi32x4_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vshufi32x4_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } # VSHUFF32x4/VSHUFF64x2/VSHUFI32x4/VSHUFI64x2 5-555 PAGE 2379 LINE 122016 define pcodeop vshufi64x2_avx512vl ; -:VSHUFI64X2 YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x43; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VSHUFI64X2 YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x43; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vshufi64x2_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vshufi64x2_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # VSHUFF32x4/VSHUFF64x2/VSHUFI32x4/VSHUFI64x2 5-555 PAGE 2379 LINE 122019 define pcodeop vshufi64x2_avx512f ; -:VSHUFI64x2 ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x43; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VSHUFI64x2 ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1) & evexV5_ZmmReg; byte=0x43; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vshufi64x2_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vshufi64x2_avx512f( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # XORPD 5-596 PAGE 2420 LINE 123834 define pcodeop vxorpd_avx512vl ; -:VXORPD XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_XmmReg; byte=0x57; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m64bcst +:VXORPD XmmReg1 XmmOpMask64, vexVVVV_XmmReg, XmmReg2_m128_m64bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_XmmReg; byte=0x57; (XmmReg1 & ZmmReg1 & XmmOpMask64) ... & XmmReg2_m128_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vxorpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vxorpd_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m64bcst ); + XmmMask = XmmReg1; + build XmmOpMask64; + ZmmReg1 = zext(XmmResult); } # XORPD 5-596 PAGE 2420 LINE 123837 -:VXORPD YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & vexVVVV_YmmReg; byte=0x57; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m64bcst +:VXORPD YmmReg1 YmmOpMask64, vexVVVV_YmmReg, YmmReg2_m256_m64bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vexVVVV_YmmReg; byte=0x57; (YmmReg1 & ZmmReg1 & YmmOpMask64) ... & YmmReg2_m256_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vxorpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vxorpd_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m64bcst ); + YmmMask = YmmReg1; + build YmmOpMask64; + ZmmReg1 = zext(YmmResult); } # XORPD 5-596 PAGE 2420 LINE 123840 define pcodeop vxorpd_avx512dq ; -:VXORPD ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & KWriteMask & evexV5_ZmmReg; byte=0x57; ZmmReg1 ... & ZmmReg2_m512_m64bcst +:VXORPD ZmmReg1 ZmmOpMask64, evexV5_ZmmReg, ZmmReg2_m512_m64bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & evexV5_ZmmReg; byte=0x57; (ZmmReg1 & ZmmOpMask64) ... & ZmmReg2_m512_m64bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vxorpd_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmResult = vxorpd_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m64bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask64; + ZmmReg1 = ZmmResult; } # XORPS 5-599 PAGE 2423 LINE 123959 define pcodeop vxorps_avx512vl ; -:VXORPS XmmReg1^KWriteMask, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_XmmReg; byte=0x57; (XmmReg1 & ZmmReg1) ... & XmmReg2_m128_m32bcst +:VXORPS XmmReg1 XmmOpMask32, vexVVVV_XmmReg, XmmReg2_m128_m32bcst is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_XmmReg; byte=0x57; (XmmReg1 & ZmmReg1 & XmmOpMask32) ... & XmmReg2_m128_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:16 = vxorps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); - ZmmReg1 = zext(tmp); + XmmResult = vxorps_avx512vl( vexVVVV_XmmReg, XmmReg2_m128_m32bcst ); + XmmMask = XmmReg1; + build XmmOpMask32; + ZmmReg1 = zext(XmmResult); } # XORPS 5-599 PAGE 2423 LINE 123962 -:VXORPS YmmReg1^KWriteMask, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & vexVVVV_YmmReg; byte=0x57; (YmmReg1 & ZmmReg1) ... & YmmReg2_m256_m32bcst +:VXORPS YmmReg1 YmmOpMask32, vexVVVV_YmmReg, YmmReg2_m256_m32bcst is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vexVVVV_YmmReg; byte=0x57; (YmmReg1 & ZmmReg1 & YmmOpMask32) ... & YmmReg2_m256_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - local tmp:32 = vxorps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); - ZmmReg1 = zext(tmp); + YmmResult = vxorps_avx512vl( vexVVVV_YmmReg, YmmReg2_m256_m32bcst ); + YmmMask = YmmReg1; + build YmmOpMask32; + ZmmReg1 = zext(YmmResult); } # XORPS 5-599 PAGE 2423 LINE 123965 define pcodeop vxorps_avx512dq ; -:VXORPS ZmmReg1^KWriteMask, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & KWriteMask & evexV5_ZmmReg; byte=0x57; ZmmReg1 ... & ZmmReg2_m512_m32bcst +:VXORPS ZmmReg1 ZmmOpMask32, evexV5_ZmmReg, ZmmReg2_m512_m32bcst is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & evexV5_ZmmReg; byte=0x57; (ZmmReg1 & ZmmOpMask32) ... & ZmmReg2_m512_m32bcst [ evexD8Type = 0; evexTType = 0; ] # (TupleType FV) { - ZmmReg1 = vxorps_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmResult = vxorps_avx512dq( evexV5_ZmmReg, ZmmReg2_m512_m32bcst ); + ZmmMask = ZmmReg1; + build ZmmOpMask32; + ZmmReg1 = ZmmResult; } diff --git a/Ghidra/Processors/x86/data/languages/avx512_manual.sinc b/Ghidra/Processors/x86/data/languages/avx512_manual.sinc index c14f24e143..3c181c7b66 100644 --- a/Ghidra/Processors/x86/data/languages/avx512_manual.sinc +++ b/Ghidra/Processors/x86/data/languages/avx512_manual.sinc @@ -1,71 +1,147 @@ +# KADDW/KADDB/KADDQ/KADDD 3-496 PAGE 1066 LINE 55984 +:KADDW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x4A; KReg_reg & KReg_rm +{ + local tmp:2 = vex1VVV_KReg[0,16] + KReg_rm[0,16]; + KReg_reg = zext(tmp); +} + +# KADDW/KADDB/KADDQ/KADDD 3-496 PAGE 1066 LINE 55986 +:KADDB KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x4A; KReg_reg & KReg_rm +{ + local tmp:1 = vex1VVV_KReg[0,8] + KReg_rm[0,8]; + KReg_reg = zext(tmp); +} + +# KADDW/KADDB/KADDQ/KADDD 3-496 PAGE 1066 LINE 55988 +:KADDQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x4A; KReg_reg & KReg_rm +{ + local tmp:8 = vex1VVV_KReg[0,64] + KReg_rm[0,64]; + KReg_reg = zext(tmp); +} + +# KADDW/KADDB/KADDQ/KADDD 3-496 PAGE 1066 LINE 55990 +:KADDD KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x4A; KReg_reg & KReg_rm +{ + local tmp:4 = vex1VVV_KReg[0,32] + KReg_rm[0,32]; + KReg_reg = zext(tmp); +} + +# KANDW/KANDB/KANDQ/KANDD 3-497 PAGE 1067 LINE 56039 +:KANDW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x41; KReg_reg & KReg_rm +{ + local tmp:2 = vex1VVV_KReg[0,16] & KReg_rm[0,16]; + KReg_reg = zext(tmp); +} + +# KANDW/KANDB/KANDQ/KANDD 3-497 PAGE 1067 LINE 56041 +:KANDB KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x41; KReg_reg & KReg_rm +{ + local tmp:1 = vex1VVV_KReg[0,8] & KReg_rm[0,8]; + KReg_reg = zext(tmp); +} + +# KANDW/KANDB/KANDQ/KANDD 3-497 PAGE 1067 LINE 56043 +:KANDQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x41; KReg_reg & KReg_rm +{ + local tmp:8 = vex1VVV_KReg[0,64] & KReg_rm[0,64]; + KReg_reg = zext(tmp); +} + +# KANDW/KANDB/KANDQ/KANDD 3-497 PAGE 1067 LINE 56045 +:KANDD KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x41; KReg_reg & KReg_rm +{ + local tmp:4 = vex1VVV_KReg[0,32] & KReg_rm[0,32]; + KReg_reg = zext(tmp); +} + +# KANDNW/KANDNB/KANDNQ/KANDND 3-498 PAGE 1068 LINE 56100 +:KANDNW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x42; KReg_reg & KReg_rm +{ + local tmp:2 = ~vex1VVV_KReg[0,16] & KReg_rm[0,16]; + KReg_reg = zext(tmp); +} + +# KANDNW/KANDNB/KANDNQ/KANDND 3-498 PAGE 1068 LINE 56102 +:KANDNB KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x42; KReg_reg & KReg_rm +{ + local tmp:1 = ~vex1VVV_KReg[0,8] & KReg_rm[0,8]; + KReg_reg = zext(tmp); +} + +# KANDNW/KANDNB/KANDNQ/KANDND 3-498 PAGE 1068 LINE 56104 +:KANDNQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x42; KReg_reg & KReg_rm +{ + local tmp:8 = ~vex1VVV_KReg[0,64] & KReg_rm[0,64]; + KReg_reg = zext(tmp); +} + +# KANDNW/KANDNB/KANDNQ/KANDND 3-498 PAGE 1068 LINE 56106 +:KANDND KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x42; KReg_reg & KReg_rm +{ + local tmp:4 = ~vex1VVV_KReg[0,32] & KReg_rm[0,32]; + KReg_reg = zext(tmp); +} + + # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56160 -define pcodeop kmovw_avx512f ; :KMOVW KReg_reg, RegK_m16 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x90; KReg_reg ... & RegK_m16 { - KReg_reg = RegK_m16; + KReg_reg = zext(RegK_m16); } # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56162 -define pcodeop kmovb_avx512dq ; :KMOVB KReg_reg, RegK_m8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x90; KReg_reg ... & RegK_m8 { - KReg_reg = RegK_m8; + KReg_reg = zext(RegK_m8); } # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56164 -define pcodeop kmovq_avx512bw ; :KMOVQ KReg_reg, RegK_m64 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1); byte=0x90; KReg_reg ... & RegK_m64 { - KReg_reg = RegK_m64 ; + KReg_reg = zext(RegK_m64); } # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56166 -define pcodeop kmovd_avx512bw ; :KMOVD KReg_reg, RegK_m32 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x90; KReg_reg ... & RegK_m32 { - KReg_reg = RegK_m32; + KReg_reg = zext(RegK_m32); } # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56168 :KMOVW m16, KReg_reg is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x91; KReg_reg ... & m16 { - local tmp:2 = KReg_reg(0); - m16 = tmp; + m16 = KReg_reg[0,16]; } # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56170 :KMOVB m8, KReg_reg is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x91; KReg_reg ... & m8 { - local tmp:1 = KReg_reg(0); - m8 = tmp; + m8 = KReg_reg[0,8]; } # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56172 :KMOVQ m64, KReg_reg is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1); byte=0x91; KReg_reg ... & m64 { - m64 = KReg_reg; + m64 = KReg_reg[0,64]; } # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56174 :KMOVD m32, KReg_reg is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x91; KReg_reg ... & m32 { - local tmp:4 = KReg_reg(0); - m32 = tmp; + m32 = KReg_reg[0,32]; } # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56176 :KMOVW KReg_reg, Rmr32 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x92; mod=3 & Rmr32 &KReg_reg { - local tmp:2 = Rmr32(0); - KReg_reg = zext(tmp); + KReg_reg = zext(Rmr32[0,16]); } # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56178 :KMOVB KReg_reg, Rmr32 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x92; mod=3 & Rmr32 & KReg_reg { - local tmp:1 = Rmr32(0); - KReg_reg = zext(tmp); + KReg_reg = zext(Rmr32[0,8]); } # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56180 @@ -85,805 +161,1425 @@ define pcodeop kmovd_avx512bw ; # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56184 :KMOVW Reg32, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x93; Reg32 & KReg_rm { - Reg32 = KReg_rm(0); + Reg32 = zext(KReg_rm[0,16]); } # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56186 :KMOVB Reg32, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x93; Reg32 & KReg_rm { - Reg32 = KReg_rm(0); + Reg32 = zext(KReg_rm[0,8]); } # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56188 @ifdef IA64 :KMOVQ Reg64, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W1); byte=0x93; Reg64 & KReg_rm { - Reg64 = KReg_rm; + Reg64 = KReg_rm[0,64]; } @endif # KMOVW/KMOVB/KMOVQ/KMOVD 3-499 PAGE 1069 LINE 56190 :KMOVD Reg32, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_F2) & $(VEX_0F) & $(VEX_W0); byte=0x93; Reg32 & KReg_rm { - Reg32 = KReg_rm(0); + Reg32 = KReg_rm[0,32]; +} + +# KNOTW/KNOTB/KNOTQ/KNOTD 3-501 PAGE 1071 LINE 56266 +:KNOTW KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x44; KReg_reg & KReg_rm +{ + KReg_reg = zext(~KReg_rm[0,16]); +} + +# KNOTW/KNOTB/KNOTQ/KNOTD 3-501 PAGE 1071 LINE 56268 +:KNOTB KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x44; KReg_reg & KReg_rm +{ + KReg_reg = zext(~KReg_rm[0,8]); +} + +# KNOTW/KNOTB/KNOTQ/KNOTD 3-501 PAGE 1071 LINE 56270 +:KNOTQ KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1); byte=0x44; KReg_reg & KReg_rm +{ + KReg_reg = zext(~KReg_rm[0,64]); +} + +# KNOTW/KNOTB/KNOTQ/KNOTD 3-501 PAGE 1071 LINE 56272 +:KNOTD KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x44; KReg_reg & KReg_rm +{ + KReg_reg = zext(~KReg_rm[0,32]); +} + +# KORW/KORB/KORQ/KORD 3-502 PAGE 1072 LINE 56325 +:KORW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x45; KReg_reg & KReg_rm +{ + local tmp:2 = vex1VVV_KReg[0,16] | KReg_rm[0,16]; + KReg_reg = zext(tmp); +} + +# KORW/KORB/KORQ/KORD 3-502 PAGE 1072 LINE 56327 +:KORB KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x45; KReg_reg & KReg_rm +{ + local tmp:1 = vex1VVV_KReg[0,8] | KReg_rm[0,8]; + KReg_reg = zext(tmp); +} + +# KORW/KORB/KORQ/KORD 3-502 PAGE 1072 LINE 56329 +:KORQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x45; KReg_reg & KReg_rm +{ + local tmp:8 = vex1VVV_KReg[0,64] | KReg_rm[0,64]; + KReg_reg = zext(tmp); +} + +# KORW/KORB/KORQ/KORD 3-502 PAGE 1072 LINE 56331 +:KORD KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x45; KReg_reg & KReg_rm +{ + local tmp:4 = vex1VVV_KReg[0,32] | KReg_rm[0,32]; + KReg_reg = zext(tmp); +} + +# KORTESTW/KORTESTB/KORTESTQ/KORTESTD 3-503 PAGE 1073 LINE 56385 +:KORTESTW KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x98; KReg_reg & KReg_rm +{ + local tmp:2 = KReg_reg[0,16] | KReg_rm[0,16]; + ZF = (tmp == 0); + CF = (tmp == 0xffff); +} + +# KORTESTW/KORTESTB/KORTESTQ/KORTESTD 3-503 PAGE 1073 LINE 56387 +:KORTESTB KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x98; KReg_reg & KReg_rm +{ + local tmp:1 = KReg_reg[0,8] | KReg_rm[0,8]; + ZF = (tmp == 0); + CF = (tmp == 0xff); +} + +# KORTESTW/KORTESTB/KORTESTQ/KORTESTD 3-503 PAGE 1073 LINE 56389 +:KORTESTQ KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1); byte=0x98; KReg_reg & KReg_rm +{ + local tmp:8 = KReg_reg[0,64] | KReg_rm[0,64]; + ZF = (tmp == 0); + CF = (tmp == 0xffffffffffffffff); +} + +# KORTESTW/KORTESTB/KORTESTQ/KORTESTD 3-503 PAGE 1073 LINE 56391 +:KORTESTD KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x98; KReg_reg & KReg_rm +{ + local tmp:4 = KReg_reg[0,32] | KReg_rm[0,32]; + ZF = (tmp == 0); + CF = (tmp == 0xffffffff); +} + +# KSHIFTLW/KSHIFTLB/KSHIFTLQ/KSHIFTLD 3-505 PAGE 1075 LINE 56481 +:KSHIFTLW KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1); byte=0x32; KReg_reg & KReg_rm; imm8 +{ + local tmp:2 = KReg_rm[0,16] << imm8:1; + KReg_reg = zext(tmp); +} + +# KSHIFTLW/KSHIFTLB/KSHIFTLQ/KSHIFTLD 3-505 PAGE 1075 LINE 56483 +:KSHIFTLB KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x32; KReg_reg & KReg_rm; imm8 +{ + local tmp:1 = KReg_rm[0,8] << imm8:1; + KReg_reg = zext(tmp); +} + +# KSHIFTLW/KSHIFTLB/KSHIFTLQ/KSHIFTLD 3-505 PAGE 1075 LINE 56485 +:KSHIFTLQ KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1); byte=0x33; KReg_reg & KReg_rm; imm8 +{ + local tmp:8 = KReg_rm[0,64] << imm8:1; + KReg_reg = zext(tmp); +} + +# KSHIFTLW/KSHIFTLB/KSHIFTLQ/KSHIFTLD 3-505 PAGE 1075 LINE 56487 +:KSHIFTLD KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x33; KReg_reg & KReg_rm; imm8 +{ + local tmp:4 = KReg_reg[0,32] << imm8:1; + KReg_reg = zext(tmp); +} + +# KSHIFTRW/KSHIFTRB/KSHIFTRQ/KSHIFTRD 3-507 PAGE 1077 LINE 56562 +:KSHIFTRW KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1); byte=0x30; KReg_reg & KReg_rm; imm8 +{ + local tmp:2 = KReg_rm[0,16] >> imm8:1; + KReg_reg = zext(tmp); +} + +# KSHIFTRW/KSHIFTRB/KSHIFTRQ/KSHIFTRD 3-507 PAGE 1077 LINE 56564 +:KSHIFTRB KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x30; KReg_reg & KReg_rm; imm8 +{ + local tmp:1 = KReg_rm[0,8] >> imm8:1; + KReg_reg = zext(tmp); +} + +# KSHIFTRW/KSHIFTRB/KSHIFTRQ/KSHIFTRD 3-507 PAGE 1077 LINE 56566 +:KSHIFTRQ KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W1); byte=0x31; KReg_reg & KReg_rm; imm8 +{ + local tmp:8 = KReg_rm[0,64] >> imm8:1; + KReg_reg = zext(tmp); +} + +# KSHIFTRW/KSHIFTRB/KSHIFTRQ/KSHIFTRD 3-507 PAGE 1077 LINE 56568 +:KSHIFTRD KReg_reg, KReg_rm, imm8 is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0); byte=0x31; KReg_reg & KReg_rm; imm8 +{ + local tmp:4 = KReg_rm[0,32] >> imm8:1; + KReg_reg = zext(tmp); +} + +# KTESTW/KTESTB/KTESTQ/KTESTD 3-509 PAGE 1079 LINE 56643 +:KTESTW KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0); byte=0x99; KReg_reg & KReg_rm +{ + local tmp:2 = KReg_reg[0,16] & KReg_rm[0,16]; + ZF = (tmp == 0); + tmp = KReg_reg[0,16] & ~KReg_rm[0,16]; + CF = (tmp == 0); + AF = 0; + OF = 0; + PF = 0; + SF = 0; +} + +# KTESTW/KTESTB/KTESTQ/KTESTD 3-509 PAGE 1079 LINE 56645 +:KTESTB KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0); byte=0x99; KReg_reg & KReg_rm +{ + local tmp:1 = KReg_reg[0,8] & KReg_rm[0,8]; + ZF = (tmp == 0); + tmp = KReg_reg[0,8] & ~KReg_rm[0,8]; + CF = (tmp == 0); + AF = 0; + OF = 0; + PF = 0; + SF = 0; +} + +# KTESTW/KTESTB/KTESTQ/KTESTD 3-509 PAGE 1079 LINE 56647 +:KTESTQ KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1); byte=0x99; KReg_reg & KReg_rm +{ + local tmp:8 = KReg_reg[0,64] & KReg_rm[0,64]; + ZF = (tmp == 0); + tmp = KReg_reg[0,64] & ~KReg_rm[0,64]; + CF = (tmp == 0); + AF = 0; + OF = 0; + PF = 0; + SF = 0; +} + +# KTESTW/KTESTB/KTESTQ/KTESTD 3-509 PAGE 1079 LINE 56649 +:KTESTD KReg_reg, KReg_rm is $(VEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1); byte=0x99; KReg_reg & KReg_rm +{ + local tmp:4 = KReg_reg[0,32] & KReg_rm[0,32]; + ZF = (tmp == 0); + tmp = KReg_reg[0,32] & ~KReg_rm[0,32]; + CF = (tmp == 0); + AF = 0; + OF = 0; + PF = 0; + SF = 0; +} + +# KUNPCKBW/KUNPCKWD/KUNPCKDQ 3-511 PAGE 1081 LINE 56747 +:KUNPCKBW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x4B; KReg_reg & KReg_rm +{ + local src1:1 = vex1VVV_KReg[0,8]; + local src2:1 = KReg_rm[0,8]; + KReg_reg = 0; + KReg_reg[0,8] = src2; + KReg_reg[8,8] = src1; +} + +# KUNPCKBW/KUNPCKWD/KUNPCKDQ 3-511 PAGE 1081 LINE 56749 +:KUNPCKWD KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x4B; KReg_reg & KReg_rm +{ + local src1:2 = vex1VVV_KReg[0,16]; + local src2:2 = KReg_rm[0,16]; + KReg_reg = 0; + KReg_reg[0,16] = src2; + KReg_reg[16,16] = src1; +} + +# KUNPCKBW/KUNPCKWD/KUNPCKDQ 3-511 PAGE 1081 LINE 56751 +:KUNPCKDQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x4B; KReg_reg & KReg_rm +{ + local src1:4 = vex1VVV_KReg[0,32]; + local src2:4 = KReg_rm[0,32]; + KReg_reg = 0; + KReg_reg[0,32] = src2; + KReg_reg[32,32] = src1; +} + +# KXNORW/KXNORB/KXNORQ/KXNORD 3-512 PAGE 1082 LINE 56806 +:KXNORW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x46; KReg_reg & KReg_rm +{ + local tmp:2 = ~(vex1VVV_KReg[0,16] ^ KReg_rm[0,16]); + KReg_reg = zext(tmp); +} + +# KXNORW/KXNORB/KXNORQ/KXNORD 3-512 PAGE 1082 LINE 56808 +:KXNORB KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x46; KReg_reg & KReg_rm +{ + local tmp:1 = ~(vex1VVV_KReg[0,8] ^ KReg_rm[0,8]); + KReg_reg = zext(tmp); +} + +# KXNORW/KXNORB/KXNORQ/KXNORD 3-512 PAGE 1082 LINE 56810 +:KXNORQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x46; KReg_reg & KReg_rm +{ + local tmp:8 = ~(vex1VVV_KReg[0,64] ^ KReg_rm[0,64]); + KReg_reg = zext(tmp); +} + +# KXNORW/KXNORB/KXNORQ/KXNORD 3-512 PAGE 1082 LINE 56812 +:KXNORD KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x46; KReg_reg & KReg_rm +{ + local tmp:4 = ~(vex1VVV_KReg[0,32] ^ KReg_rm[0,32]); + KReg_reg = zext(tmp); +} + +# KXORW/KXORB/KXORQ/KXORD 3-513 PAGE 1083 LINE 56866 +:KXORW KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NDS) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x47; KReg_reg & KReg_rm +{ + local tmp:2 = vex1VVV_KReg[0,16] ^ KReg_rm[0,16]; + KReg_reg = zext(tmp); +} + +# KXORW/KXORB/KXORQ/KXORD 3-513 PAGE 1083 LINE 56868 +:KXORB KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W0) & vex1VVV_KReg; byte=0x47; KReg_reg & KReg_rm +{ + local tmp:1 = vex1VVV_KReg[0,8] ^ KReg_rm[0,8]; + KReg_reg = zext(tmp); +} + +# KXORW/KXORB/KXORQ/KXORD 3-513 PAGE 1083 LINE 56870 +:KXORQ KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_NONE) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x47; KReg_reg & KReg_rm +{ + local tmp:8 = vex1VVV_KReg[0,64] ^ KReg_rm[0,64]; + KReg_reg = zext(tmp); +} + +# KXORW/KXORB/KXORQ/KXORD 3-513 PAGE 1083 LINE 56872 +:KXORD KReg_reg, vex1VVV_KReg, KReg_rm is $(VEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F) & $(VEX_W1) & vex1VVV_KReg; byte=0x47; KReg_reg & KReg_rm +{ + local tmp:4 = vex1VVV_KReg[0,32] ^ KReg_rm[0,32]; + KReg_reg = zext(tmp); } # VCVTPS2PH 5-37 PAGE 1861 LINE 96116 define pcodeop vcvtps2ph_avx512vl ; -:VCVTPS2PH XmmReg2^KWriteMask, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x1D; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2; imm8 +:VCVTPS2PH XmmReg2 XmmOpMask, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & XmmOpMask; byte=0x1D; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2; imm8 { - XmmReg2 = vcvtps2ph_avx512vl( XmmReg1, imm8:1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vcvtps2ph_avx512vl( XmmReg1, imm8:1 ); + XmmMask = XmmReg2; + build XmmOpMask; + XmmResult[0,16] = (zext(XmmOpMask[0,1]) * XmmResult[0,16]) + (zext(!XmmOpMask[0,1]) * XmmMask[0,16]); + XmmResult[16,16] = (zext(XmmOpMask[1,1]) * XmmResult[16,16]) + (zext(!XmmOpMask[1,1]) * XmmMask[16,16]); + XmmResult[32,16] = (zext(XmmOpMask[2,1]) * XmmResult[32,16]) + (zext(!XmmOpMask[2,1]) * XmmMask[32,16]); + XmmResult[48,16] = (zext(XmmOpMask[3,1]) * XmmResult[48,16]) + (zext(!XmmOpMask[3,1]) * XmmMask[48,16]); + ZmmReg2 = zext(XmmResult[0,64]); } -:VCVTPS2PH m64^KWriteMask, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x1D; XmmReg1 ... & m64; imm8 +:VCVTPS2PH m64 XmmOpMask, XmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & XmmOpMask; byte=0x1D; XmmReg1 ... & m64; imm8 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m64 = vcvtps2ph_avx512vl( XmmReg1, imm8:1 ); + XmmResult = vcvtps2ph_avx512vl( XmmReg1, imm8:1 ); + XmmMask = zext(m64); + build XmmOpMask; + XmmResult[0,16] = (zext(XmmOpMask[0,1]) * XmmResult[0,16]) + (zext(!XmmOpMask[0,1]) * XmmMask[0,16]); + XmmResult[16,16] = (zext(XmmOpMask[1,1]) * XmmResult[16,16]) + (zext(!XmmOpMask[1,1]) * XmmMask[16,16]); + XmmResult[32,16] = (zext(XmmOpMask[2,1]) * XmmResult[32,16]) + (zext(!XmmOpMask[2,1]) * XmmMask[32,16]); + XmmResult[48,16] = (zext(XmmOpMask[3,1]) * XmmResult[48,16]) + (zext(!XmmOpMask[3,1]) * XmmMask[48,16]); + m64 = XmmResult[0,64]; } # VCVTPS2PH 5-37 PAGE 1861 LINE 96119 -:VCVTPS2PH XmmReg2^KWriteMask, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x1D; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2; imm8 +:VCVTPS2PH XmmReg2 XmmOpMask, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & XmmOpMask; byte=0x1D; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2; imm8 { - XmmReg2 = vcvtps2ph_avx512vl( YmmReg1, imm8:1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vcvtps2ph_avx512vl( YmmReg1, imm8:1 ); + XmmMask = XmmReg2; + build XmmOpMask; + XmmResult[0,16] = (zext(XmmOpMask[0,1]) * XmmResult[0,16]) + (zext(!XmmOpMask[0,1]) * XmmMask[0,16]); + XmmResult[16,16] = (zext(XmmOpMask[1,1]) * XmmResult[16,16]) + (zext(!XmmOpMask[1,1]) * XmmMask[16,16]); + XmmResult[32,16] = (zext(XmmOpMask[2,1]) * XmmResult[32,16]) + (zext(!XmmOpMask[2,1]) * XmmMask[32,16]); + XmmResult[48,16] = (zext(XmmOpMask[3,1]) * XmmResult[48,16]) + (zext(!XmmOpMask[3,1]) * XmmMask[48,16]); + XmmResult[64,16] = (zext(XmmOpMask[4,1]) * XmmResult[64,16]) + (zext(!XmmOpMask[4,1]) * XmmMask[64,16]); + XmmResult[80,16] = (zext(XmmOpMask[5,1]) * XmmResult[80,16]) + (zext(!XmmOpMask[5,1]) * XmmMask[80,16]); + XmmResult[96,16] = (zext(XmmOpMask[6,1]) * XmmResult[96,16]) + (zext(!XmmOpMask[6,1]) * XmmMask[96,16]); + XmmResult[112,16] = (zext(XmmOpMask[7,1]) * XmmResult[112,16]) + (zext(!XmmOpMask[7,1]) * XmmMask[112,16]); + ZmmReg2 = zext(XmmResult[0,64]); } # VCVTPS2PH 5-37 PAGE 1861 LINE 96119 -:VCVTPS2PH m128^KWriteMask, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x1D; YmmReg1 ... & m128; imm8 +:VCVTPS2PH m128 XmmOpMask, YmmReg1, imm8 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & XmmOpMask; byte=0x1D; YmmReg1 ... & m128; imm8 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m128 = vcvtps2ph_avx512vl( YmmReg1, imm8:1 ); + XmmResult = vcvtps2ph_avx512vl( YmmReg1, imm8:1 ); + XmmMask = m128; + build XmmOpMask; + XmmResult[0,16] = (zext(XmmOpMask[0,1]) * XmmResult[0,16]) + (zext(!XmmOpMask[0,1]) * XmmMask[0,16]); + XmmResult[16,16] = (zext(XmmOpMask[1,1]) * XmmResult[16,16]) + (zext(!XmmOpMask[1,1]) * XmmMask[16,16]); + XmmResult[32,16] = (zext(XmmOpMask[2,1]) * XmmResult[32,16]) + (zext(!XmmOpMask[2,1]) * XmmMask[32,16]); + XmmResult[48,16] = (zext(XmmOpMask[3,1]) * XmmResult[48,16]) + (zext(!XmmOpMask[3,1]) * XmmMask[48,16]); + XmmResult[64,16] = (zext(XmmOpMask[4,1]) * XmmResult[64,16]) + (zext(!XmmOpMask[4,1]) * XmmMask[64,16]); + XmmResult[80,16] = (zext(XmmOpMask[5,1]) * XmmResult[80,16]) + (zext(!XmmOpMask[5,1]) * XmmMask[80,16]); + XmmResult[96,16] = (zext(XmmOpMask[6,1]) * XmmResult[96,16]) + (zext(!XmmOpMask[6,1]) * XmmMask[96,16]); + XmmResult[112,16] = (zext(XmmOpMask[7,1]) * XmmResult[112,16]) + (zext(!XmmOpMask[7,1]) * XmmMask[112,16]); + m128 = XmmResult; } # VCVTPS2PH 5-37 PAGE 1861 LINE 96122 define pcodeop vcvtps2ph_avx512f ; -:VCVTPS2PH YmmReg2^KWriteMask, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x1D; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2; imm8 +:VCVTPS2PH YmmReg2 YmmOpMask, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & YmmOpMask; byte=0x1D; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2; imm8 { - YmmReg2 = vcvtps2ph_avx512f( ZmmReg1, imm8:1 ); - ZmmReg2 = zext(YmmReg2); + YmmResult = vcvtps2ph_avx512f( ZmmReg1, imm8:1 ); + YmmMask = YmmReg2; + build YmmOpMask; + YmmResult[0,16] = (zext(YmmOpMask[0,1]) * YmmResult[0,16]) + (zext(!YmmOpMask[0,1]) * YmmMask[0,16]); + YmmResult[16,16] = (zext(YmmOpMask[1,1]) * YmmResult[16,16]) + (zext(!YmmOpMask[1,1]) * YmmMask[16,16]); + YmmResult[32,16] = (zext(YmmOpMask[2,1]) * YmmResult[32,16]) + (zext(!YmmOpMask[2,1]) * YmmMask[32,16]); + YmmResult[48,16] = (zext(YmmOpMask[3,1]) * YmmResult[48,16]) + (zext(!YmmOpMask[3,1]) * YmmMask[48,16]); + YmmResult[64,16] = (zext(YmmOpMask[4,1]) * YmmResult[64,16]) + (zext(!YmmOpMask[4,1]) * YmmMask[64,16]); + YmmResult[80,16] = (zext(YmmOpMask[5,1]) * YmmResult[80,16]) + (zext(!YmmOpMask[5,1]) * YmmMask[80,16]); + YmmResult[96,16] = (zext(YmmOpMask[6,1]) * YmmResult[96,16]) + (zext(!YmmOpMask[6,1]) * YmmMask[96,16]); + YmmResult[112,16] = (zext(YmmOpMask[7,1]) * YmmResult[112,16]) + (zext(!YmmOpMask[7,1]) * YmmMask[112,16]); + YmmResult[128,16] = (zext(YmmOpMask[8,1]) * YmmResult[128,16]) + (zext(!YmmOpMask[8,1]) * YmmMask[128,16]); + YmmResult[144,16] = (zext(YmmOpMask[9,1]) * YmmResult[144,16]) + (zext(!YmmOpMask[9,1]) * YmmMask[144,16]); + YmmResult[160,16] = (zext(YmmOpMask[10,1]) * YmmResult[160,16]) + (zext(!YmmOpMask[10,1]) * YmmMask[160,16]); + YmmResult[176,16] = (zext(YmmOpMask[11,1]) * YmmResult[176,16]) + (zext(!YmmOpMask[11,1]) * YmmMask[176,16]); + YmmResult[192,16] = (zext(YmmOpMask[12,1]) * YmmResult[192,16]) + (zext(!YmmOpMask[12,1]) * YmmMask[192,16]); + YmmResult[208,16] = (zext(YmmOpMask[13,1]) * YmmResult[208,16]) + (zext(!YmmOpMask[13,1]) * YmmMask[208,16]); + YmmResult[224,16] = (zext(YmmOpMask[14,1]) * YmmResult[224,16]) + (zext(!YmmOpMask[14,1]) * YmmMask[224,16]); + YmmResult[240,16] = (zext(YmmOpMask[15,1]) * YmmResult[240,16]) + (zext(!YmmOpMask[15,1]) * YmmMask[240,16]); + ZmmReg2 = zext(YmmResult); } -:VCVTPS2PH m256^KWriteMask, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & KWriteMask; byte=0x1D; ZmmReg1 ... & m256; imm8 +:VCVTPS2PH m256 YmmOpMask, ZmmReg1, imm8 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_66) & $(VEX_0F3A) & $(VEX_W0) & YmmOpMask; byte=0x1D; ZmmReg1 ... & m256; imm8 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m256 = vcvtps2ph_avx512f( ZmmReg1, imm8:1 ); + YmmResult = vcvtps2ph_avx512f( ZmmReg1, imm8:1 ); + YmmMask = m256; + build YmmOpMask; + YmmResult[0,16] = (zext(YmmOpMask[0,1]) * YmmResult[0,16]) + (zext(!YmmOpMask[0,1]) * YmmMask[0,16]); + YmmResult[16,16] = (zext(YmmOpMask[1,1]) * YmmResult[16,16]) + (zext(!YmmOpMask[1,1]) * YmmMask[16,16]); + YmmResult[32,16] = (zext(YmmOpMask[2,1]) * YmmResult[32,16]) + (zext(!YmmOpMask[2,1]) * YmmMask[32,16]); + YmmResult[48,16] = (zext(YmmOpMask[3,1]) * YmmResult[48,16]) + (zext(!YmmOpMask[3,1]) * YmmMask[48,16]); + YmmResult[64,16] = (zext(YmmOpMask[4,1]) * YmmResult[64,16]) + (zext(!YmmOpMask[4,1]) * YmmMask[64,16]); + YmmResult[80,16] = (zext(YmmOpMask[5,1]) * YmmResult[80,16]) + (zext(!YmmOpMask[5,1]) * YmmMask[80,16]); + YmmResult[96,16] = (zext(YmmOpMask[6,1]) * YmmResult[96,16]) + (zext(!YmmOpMask[6,1]) * YmmMask[96,16]); + YmmResult[112,16] = (zext(YmmOpMask[7,1]) * YmmResult[112,16]) + (zext(!YmmOpMask[7,1]) * YmmMask[112,16]); + YmmResult[128,16] = (zext(YmmOpMask[8,1]) * YmmResult[128,16]) + (zext(!YmmOpMask[8,1]) * YmmMask[128,16]); + YmmResult[144,16] = (zext(YmmOpMask[9,1]) * YmmResult[144,16]) + (zext(!YmmOpMask[9,1]) * YmmMask[144,16]); + YmmResult[160,16] = (zext(YmmOpMask[10,1]) * YmmResult[160,16]) + (zext(!YmmOpMask[10,1]) * YmmMask[160,16]); + YmmResult[176,16] = (zext(YmmOpMask[11,1]) * YmmResult[176,16]) + (zext(!YmmOpMask[11,1]) * YmmMask[176,16]); + YmmResult[192,16] = (zext(YmmOpMask[12,1]) * YmmResult[192,16]) + (zext(!YmmOpMask[12,1]) * YmmMask[192,16]); + YmmResult[208,16] = (zext(YmmOpMask[13,1]) * YmmResult[208,16]) + (zext(!YmmOpMask[13,1]) * YmmMask[208,16]); + YmmResult[224,16] = (zext(YmmOpMask[14,1]) * YmmResult[224,16]) + (zext(!YmmOpMask[14,1]) * YmmMask[224,16]); + YmmResult[240,16] = (zext(YmmOpMask[15,1]) * YmmResult[240,16]) + (zext(!YmmOpMask[15,1]) * YmmMask[240,16]); + m256 = YmmResult; } # VPMOVDB/VPMOVSDB/VPMOVUSDB 5-418 PAGE 2242 LINE 115319 define pcodeop vpmovdb_avx512vl ; -:VPMOVDB XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x31; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVDB XmmReg2 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x31; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovdb_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovdb_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,32]); } -:VPMOVDB m32^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x31; XmmReg1 ... & m32 +:VPMOVDB m32 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x31; XmmReg1 ... & m32 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m32 = vpmovdb_avx512vl( XmmReg1 ); + XmmResult = vpmovdb_avx512vl( XmmReg1 ); + XmmMask = zext(m32); + build XmmOpMask8; + m32 = XmmResult[0,32]; } # VPMOVDB/VPMOVSDB/VPMOVUSDB 5-418 PAGE 2242 LINE 115322 define pcodeop vpmovsdb_avx512vl ; -:VPMOVSDB XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x21; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSDB XmmReg2 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x21; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsdb_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsdb_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,32]); } -:VPMOVSDB m32^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x21; XmmReg1 ... & m32 +:VPMOVSDB m32 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x21; XmmReg1 ... & m32 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m32 = vpmovsdb_avx512vl( XmmReg1 ); + XmmResult = vpmovsdb_avx512vl( XmmReg1 ); + XmmMask = zext(m32); + build XmmOpMask8; + m32 = XmmResult[0,32]; } # VPMOVDB/VPMOVSDB/VPMOVUSDB 5-418 PAGE 2242 LINE 115326 define pcodeop vpmovusdb_avx512vl ; -:VPMOVUSDB XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x11; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSDB XmmReg2 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x11; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusdb_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusdb_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,32]); } -:VPMOVUSDB m32^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x11; XmmReg1 ... & m32 +:VPMOVUSDB m32 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x11; XmmReg1 ... & m32 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m32 = vpmovusdb_avx512vl( XmmReg1 ); + XmmResult = vpmovusdb_avx512vl( XmmReg1 ); + XmmMask = zext(m32); + build XmmOpMask8; + m32 = XmmResult[0,32]; } # VPMOVDB/VPMOVSDB/VPMOVUSDB 5-418 PAGE 2242 LINE 115330 -:VPMOVDB XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x31; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVDB XmmReg2 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x31; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovdb_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovdb_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVDB m64^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x31; YmmReg1 ... & m64 +:VPMOVDB m64 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x31; YmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m64 = vpmovdb_avx512vl( YmmReg1 ); + XmmResult = vpmovdb_avx512vl( YmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask8; + m64 = XmmResult[0,64]; } # VPMOVDB/VPMOVSDB/VPMOVUSDB 5-418 PAGE 2242 LINE 115333 -:VPMOVSDB XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x21; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSDB XmmReg2 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x21; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsdb_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsdb_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVSDB m64^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x21; YmmReg1 ... & m64 +:VPMOVSDB m64 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x21; YmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m64 = vpmovsdb_avx512vl( YmmReg1 ); + XmmResult = vpmovsdb_avx512vl( YmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask8; + m64 = XmmResult[0,64]; } # VPMOVDB/VPMOVSDB/VPMOVUSDB 5-418 PAGE 2242 LINE 115337 -:VPMOVUSDB XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x11; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSDB XmmReg2 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x11; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusdb_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusdb_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVUSDB m64^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x11; YmmReg1 ... & m64 +:VPMOVUSDB m64 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x11; YmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m64 = vpmovusdb_avx512vl( YmmReg1 ); + XmmResult = vpmovusdb_avx512vl( YmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask8; + m64 = XmmResult[0,64]; } # VPMOVDB/VPMOVSDB/VPMOVUSDB 5-418 PAGE 2242 LINE 115341 define pcodeop vpmovdb_avx512f ; -:VPMOVDB XmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x31; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVDB XmmReg2 XmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x31; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovdb_avx512f( ZmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovdb_avx512f( ZmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult); } -:VPMOVDB m128^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x31; ZmmReg1 ... & m128 +:VPMOVDB m128 XmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x31; ZmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m128 = vpmovdb_avx512f( ZmmReg1 ); + XmmResult = vpmovdb_avx512f( ZmmReg1 ); + XmmMask = m128; + build XmmOpMask8; + m128 = XmmResult; } # VPMOVDB/VPMOVSDB/VPMOVUSDB 5-418 PAGE 2242 LINE 115344 define pcodeop vpmovsdb_avx512f ; -:VPMOVSDB XmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x21; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSDB XmmReg2 XmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x21; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsdb_avx512f( ZmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsdb_avx512f( ZmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult); } -:VPMOVSDB m128^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x21; ZmmReg1 ... & m128 +:VPMOVSDB m128 XmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x21; ZmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m128 = vpmovsdb_avx512f( ZmmReg1 ); + XmmResult = vpmovsdb_avx512f( ZmmReg1 ); + XmmMask = m128; + build XmmOpMask8; + m128 = XmmResult; } # VPMOVDB/VPMOVSDB/VPMOVUSDB 5-418 PAGE 2242 LINE 115348 define pcodeop vpmovusdb_avx512f ; -:VPMOVUSDB XmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x11; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSDB XmmReg2 XmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x11; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusdb_avx512f( ZmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusdb_avx512f( ZmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult); } -:VPMOVUSDB m128^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x11; ZmmReg1 ... & m128 +:VPMOVUSDB m128 XmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x11; ZmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m128 = vpmovusdb_avx512f( ZmmReg1 ); + XmmResult = vpmovusdb_avx512f( ZmmReg1 ); + XmmMask = m128; + build XmmOpMask8; + m128 = XmmResult; } # VPMOVDW/VPMOVSDW/VPMOVUSDW 5-422 PAGE 2246 LINE 115532 define pcodeop vpmovdw_avx512vl ; -:VPMOVDW XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x33; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVDW XmmReg2 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x33; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovdw_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovdw_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVDW m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x33; XmmReg1 ... & m64 +:VPMOVDW m64 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x33; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m64 = vpmovdw_avx512vl( XmmReg1 ); + XmmResult = vpmovdw_avx512vl( XmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask16; + m64 = XmmResult[0,64]; } # VPMOVDW/VPMOVSDW/VPMOVUSDW 5-422 PAGE 2246 LINE 115535 define pcodeop vpmovsdw_avx512vl ; -:VPMOVSDW XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x23; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSDW XmmReg2 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x23; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsdw_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsdw_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVSDW m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x23; XmmReg1 ... & m64 +:VPMOVSDW m64 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x23; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m64 = vpmovsdw_avx512vl( XmmReg1 ); + XmmResult = vpmovsdw_avx512vl( XmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask16; + m64 = XmmResult[0,64]; } # VPMOVDW/VPMOVSDW/VPMOVUSDW 5-422 PAGE 2246 LINE 115539 define pcodeop vpmovusdw_avx512vl ; -:VPMOVUSDW XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x13; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSDW XmmReg2 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x13; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusdw_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusdw_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVUSDW m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x13; XmmReg1 ... & m64 +:VPMOVUSDW m64 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x13; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m64 = vpmovusdw_avx512vl( XmmReg1 ); + XmmResult = vpmovusdw_avx512vl( XmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask16; + m64 = XmmResult[0,64]; } # VPMOVDW/VPMOVSDW/VPMOVUSDW 5-422 PAGE 2246 LINE 115543 -:VPMOVDW XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x33; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVDW XmmReg2 XmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x33; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovdw_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovdw_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVDW m128^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x33; YmmReg1 ... & m128 +:VPMOVDW m128 XmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x33; YmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m128 = vpmovdw_avx512vl( YmmReg1 ); + XmmResult = vpmovdw_avx512vl( YmmReg1 ); + XmmMask = m128; + build XmmOpMask16; + m128 = XmmResult; } # VPMOVDW/VPMOVSDW/VPMOVUSDW 5-422 PAGE 2246 LINE 115546 -:VPMOVSDW XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x23; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSDW XmmReg2 XmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x23; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsdw_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsdw_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult); } -:VPMOVSDW m128^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x23; YmmReg1 ... & m128 +:VPMOVSDW m128 XmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x23; YmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m128 = vpmovsdw_avx512vl( YmmReg1 ); + XmmResult = vpmovsdw_avx512vl( YmmReg1 ); + XmmMask = m128; + build XmmOpMask16; + m128 = XmmResult; } # VPMOVDW/VPMOVSDW/VPMOVUSDW 5-422 PAGE 2246 LINE 115550 -:VPMOVUSDW XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x13; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSDW XmmReg2 XmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x13; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusdw_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusdw_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult); } -:VPMOVUSDW m128^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x13; YmmReg1 ... & m128 +:VPMOVUSDW m128 XmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x13; YmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m128 = vpmovusdw_avx512vl( YmmReg1 ); + XmmResult = vpmovusdw_avx512vl( YmmReg1 ); + XmmMask = m128; + build XmmOpMask16; + m128 = XmmResult; } # VPMOVDW/VPMOVSDW/VPMOVUSDW 5-422 PAGE 2246 LINE 115554 define pcodeop vpmovdw_avx512f ; -:VPMOVDW YmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x33; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 +:VPMOVDW YmmReg2 YmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask16; byte=0x33; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 { - YmmReg2 = vpmovdw_avx512f( ZmmReg1 ); - ZmmReg2 = zext(YmmReg2); + YmmResult = vpmovdw_avx512f( ZmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask16; + ZmmReg2 = zext(YmmResult); } -:VPMOVDW m256^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x33; ZmmReg1 ... & m256 +:VPMOVDW m256 YmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask16; byte=0x33; ZmmReg1 ... & m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m256 = vpmovdw_avx512f( ZmmReg1 ); + YmmResult = vpmovdw_avx512f( ZmmReg1 ); + YmmMask = m256; + build YmmOpMask16; + m256 = zext(YmmResult); } # VPMOVDW/VPMOVSDW/VPMOVUSDW 5-422 PAGE 2246 LINE 115557 define pcodeop vpmovsdw_avx512f ; -:VPMOVSDW YmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x23; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 +:VPMOVSDW YmmReg2 YmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask16; byte=0x23; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 { - YmmReg2 = vpmovsdw_avx512f( ZmmReg1 ); - ZmmReg2 = zext(YmmReg2); + YmmResult = vpmovsdw_avx512f( ZmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask16; + ZmmReg2 = zext(YmmResult); } -:VPMOVSDW m256^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x23; ZmmReg1 ... & m256 +:VPMOVSDW m256 YmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask16; byte=0x23; ZmmReg1 ... & m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m256 = vpmovsdw_avx512f( ZmmReg1 ); + YmmResult = vpmovsdw_avx512f( ZmmReg1 ); + YmmMask = m256; + build YmmOpMask16; + m256 = zext(YmmResult); } # VPMOVDW/VPMOVSDW/VPMOVUSDW 5-422 PAGE 2246 LINE 115561 define pcodeop vpmovusdw_avx512f ; -:VPMOVUSDW YmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x13; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 +:VPMOVUSDW YmmReg2 YmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask16; byte=0x13; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 { - YmmReg2 = vpmovusdw_avx512f( ZmmReg1 ); - ZmmReg2 = zext(YmmReg2); + YmmResult = vpmovusdw_avx512f( ZmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask16; + ZmmReg2 = zext(YmmResult); } -:VPMOVUSDW m256^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x13; ZmmReg1 ... & m256 +:VPMOVUSDW m256 YmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask16; byte=0x13; ZmmReg1 ... & m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m256 = vpmovusdw_avx512f( ZmmReg1 ); + YmmResult = vpmovusdw_avx512f( ZmmReg1 ); + YmmMask = m256; + build YmmOpMask16; + m256 = zext(YmmResult); } # VPMOVQB/VPMOVSQB/VPMOVUSQB 5-406 PAGE 2230 LINE 114671 define pcodeop vpmovqb_avx512vl ; -:VPMOVQB XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x32; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVQB XmmReg2 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x32; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovqb_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovqb_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,16]); } -:VPMOVQB m16^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x32; XmmReg1 ... & m16 +:VPMOVQB m16 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x32; XmmReg1 ... & m16 [ evexD8Type = 1; evexTType = 11; ] # (TupleType OVM) { - m16 = vpmovqb_avx512vl( XmmReg1 ); + XmmResult = vpmovqb_avx512vl( XmmReg1 ); + XmmMask = zext(m16); + build XmmOpMask8; + m16 = XmmResult[0,16]; } # VPMOVQB/VPMOVSQB/VPMOVUSQB 5-406 PAGE 2230 LINE 114674 define pcodeop vpmovsqb_avx512vl ; -:VPMOVSQB XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x22; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSQB XmmReg2 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x22; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsqb_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsqb_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,16]); } -:VPMOVSQB m16^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x22; XmmReg1 ... & m16 +:VPMOVSQB m16 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x22; XmmReg1 ... & m16 [ evexD8Type = 1; evexTType = 11; ] # (TupleType OVM) { - m16 = vpmovsqb_avx512vl( XmmReg1 ); + XmmResult = vpmovsqb_avx512vl( XmmReg1 ); + XmmMask = zext(m16); + build XmmOpMask8; + m16 = zext(XmmResult[0,16]); } # VPMOVQB/VPMOVSQB/VPMOVUSQB 5-406 PAGE 2230 LINE 114678 define pcodeop vpmovusqb_avx512vl ; -:VPMOVUSQB XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x12; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSQB XmmReg2 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x12; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusqb_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusqb_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,16]); } -:VPMOVUSQB m16^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x12; XmmReg1 ... & m16 +:VPMOVUSQB m16 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x12; XmmReg1 ... & m16 [ evexD8Type = 1; evexTType = 11; ] # (TupleType OVM) { - m16 = vpmovusqb_avx512vl( XmmReg1 ); + XmmResult = vpmovusqb_avx512vl( XmmReg1 ); + XmmMask = zext(m16); + build XmmOpMask8; + m16 = zext(XmmResult[0,16]); } # VPMOVQB/VPMOVSQB/VPMOVUSQB 5-406 PAGE 2230 LINE 114682 -:VPMOVQB XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x32; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVQB XmmReg2 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x32; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovqb_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovqb_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,32]); } -:VPMOVQB m32^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x32; YmmReg1 ... & m32 +:VPMOVQB m32 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x32; YmmReg1 ... & m32 [ evexD8Type = 1; evexTType = 11; ] # (TupleType OVM) { - m32 = vpmovqb_avx512vl( YmmReg1 ); + XmmResult = vpmovqb_avx512vl( YmmReg1 ); + XmmMask = zext(m32); + build XmmOpMask8; + m32 = XmmResult[0,32]; } # VPMOVQB/VPMOVSQB/VPMOVUSQB 5-406 PAGE 2230 LINE 114685 -:VPMOVSQB XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x22; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSQB XmmReg2 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x22; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsqb_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsqb_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,32]); } -:VPMOVSQB m32^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x22; YmmReg1 ... & m32 +:VPMOVSQB m32 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x22; YmmReg1 ... & m32 [ evexD8Type = 1; evexTType = 11; ] # (TupleType OVM) { - m32 = vpmovsqb_avx512vl( YmmReg1 ); + XmmResult = vpmovsqb_avx512vl( YmmReg1 ); + XmmMask = zext(m32); + build XmmOpMask8; + m32 = XmmResult[0,32]; } # VPMOVQB/VPMOVSQB/VPMOVUSQB 5-406 PAGE 2230 LINE 114689 -:VPMOVUSQB XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x12; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSQB XmmReg2 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x12; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusqb_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusqb_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,32]); } -:VPMOVUSQB m32^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x12; YmmReg1 ... & m32 +:VPMOVUSQB m32 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x12; YmmReg1 ... & m32 [ evexD8Type = 1; evexTType = 11; ] # (TupleType OVM) { - m32 = vpmovusqb_avx512vl( YmmReg1 ); + XmmResult = vpmovusqb_avx512vl( YmmReg1 ); + XmmMask = zext(m32); + build XmmOpMask8; + m32 = XmmResult[0,32]; } # VPMOVQB/VPMOVSQB/VPMOVUSQB 5-406 PAGE 2230 LINE 114693 define pcodeop vpmovqb_avx512f ; -:VPMOVQB XmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x32; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVQB XmmReg2 XmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x32; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovqb_avx512f( ZmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovqb_avx512f( ZmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVQB m64^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x32; ZmmReg1 ... & m64 +:VPMOVQB m64 XmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x32; ZmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 11; ] # (TupleType OVM) { - m64 = vpmovqb_avx512f( ZmmReg1 ); + XmmResult = vpmovqb_avx512f( ZmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask8; + m64 = XmmResult[0,64]; } # VPMOVQB/VPMOVSQB/VPMOVUSQB 5-406 PAGE 2230 LINE 114696 define pcodeop vpmovsqb_avx512f ; -:VPMOVSQB XmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x22; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSQB XmmReg2 XmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x22; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsqb_avx512f( ZmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsqb_avx512f( ZmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVSQB m64^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x22; ZmmReg1 ... & m64 +:VPMOVSQB m64 XmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x22; ZmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 11; ] # (TupleType OVM) { - m64 = vpmovsqb_avx512f( ZmmReg1 ); + XmmResult = vpmovsqb_avx512f( ZmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask8; + m64 = XmmResult[0,64]; } # VPMOVQB/VPMOVSQB/VPMOVUSQB 5-406 PAGE 2230 LINE 114700 define pcodeop vpmovusqb_avx512f ; -:VPMOVUSQB XmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x12; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSQB XmmReg2 XmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x12; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusqb_avx512f( ZmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusqb_avx512f( ZmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVUSQB m64^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x12; ZmmReg1 ... & m64 +:VPMOVUSQB m64 XmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x12; ZmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 11; ] # (TupleType OVM) { - m64 = vpmovusqb_avx512f( ZmmReg1 ); + XmmResult = vpmovusqb_avx512f( ZmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask8; + m64 = XmmResult[0,64]; } # VPMOVQW/VPMOVSQW/VPMOVUSQW 5-410 PAGE 2234 LINE 114887 define pcodeop vpmovqw_avx512vl ; -:VPMOVQW XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x34; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVQW XmmReg2 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x34; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovqw_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovqw_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult[0,32]); } -:VPMOVQW m32^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x34; XmmReg1 ... & m32 +:VPMOVQW m32 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x34; XmmReg1 ... & m32 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m32 = vpmovqw_avx512vl( XmmReg1 ); + XmmResult = vpmovqw_avx512vl( XmmReg1 ); + XmmMask = zext(m32); + build XmmOpMask16; + m32 = zext(XmmResult[0,32]); } # VPMOVQW/VPMOVSQW/VPMOVUSQW 5-410 PAGE 2234 LINE 114890 define pcodeop vpmovsqw_avx512vl ; -:VPMOVSQW XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x24; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSQW XmmReg2 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x24; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsqw_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsqw_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult[0,32]); } -:VPMOVSQW m32^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x24; XmmReg1 ... & m32 +:VPMOVSQW m32 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x24; XmmReg1 ... & m32 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m32 = vpmovsqw_avx512vl( XmmReg1 ); + XmmResult = vpmovsqw_avx512vl( XmmReg1 ); + XmmMask = zext(m32); + build XmmOpMask16; + m32 = zext(XmmResult[0,32]); } # VPMOVQW/VPMOVSQW/VPMOVUSQW 5-410 PAGE 2234 LINE 114894 define pcodeop vpmovusqw_avx512vl ; -:VPMOVUSQW XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x14; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSQW XmmReg2 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x14; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusqw_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusqw_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult[0,32]); } -:VPMOVUSQW m32^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x14; XmmReg1 ... & m32 +:VPMOVUSQW m32 XmmOpMask16, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x14; XmmReg1 ... & m32 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m32 = vpmovusqw_avx512vl( XmmReg1 ); + XmmResult = vpmovusqw_avx512vl( XmmReg1 ); + XmmMask = zext(m32); + build XmmOpMask16; + m32 = zext(XmmResult[0,32]); } # VPMOVQW/VPMOVSQW/VPMOVUSQW 5-410 PAGE 2234 LINE 114898 -:VPMOVQW XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x34; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVQW XmmReg2 XmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x34; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovqw_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovqw_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVQW m64^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x34; YmmReg1 ... & m64 +:VPMOVQW m64 XmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x34; YmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m64 = vpmovqw_avx512vl( YmmReg1 ); + XmmResult = vpmovqw_avx512vl( YmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask16; + m64 = zext(XmmResult[0,64]); } # VPMOVQW/VPMOVSQW/VPMOVUSQW 5-410 PAGE 2234 LINE 114901 -:VPMOVSQW XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x24; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSQW XmmReg2 XmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x24; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsqw_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsqw_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVSQW m64^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x24; YmmReg1 ... & m64 +:VPMOVSQW m64 XmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x24; YmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m64 = vpmovsqw_avx512vl( YmmReg1 ); + XmmResult = vpmovsqw_avx512vl( YmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask16; + m64 = zext(XmmResult[0,64]); } # VPMOVQW/VPMOVSQW/VPMOVUSQW 5-410 PAGE 2234 LINE 114905 -:VPMOVUSQW XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x14; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSQW XmmReg2 XmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x14; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusqw_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusqw_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVUSQW m64^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x14; YmmReg1 ... & m64 +:VPMOVUSQW m64 XmmOpMask16, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x14; YmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m64 = vpmovusqw_avx512vl( YmmReg1 ); + XmmResult = vpmovusqw_avx512vl( YmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask16; + m64 = zext(XmmResult[0,64]); } # VPMOVQW/VPMOVSQW/VPMOVUSQW 5-410 PAGE 2234 LINE 114909 define pcodeop vpmovqw_avx512f ; -:VPMOVQW XmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x34; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVQW XmmReg2 XmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x34; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovqw_avx512f( ZmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovqw_avx512f( ZmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult); } -:VPMOVQW m128^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x34; ZmmReg1 ... & m128 +:VPMOVQW m128 XmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x34; ZmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m128 = vpmovqw_avx512f( ZmmReg1 ); + XmmResult = vpmovqw_avx512f( ZmmReg1 ); + XmmMask = m128; + build XmmOpMask16; + m128 = XmmResult; } # VPMOVQW/VPMOVSQW/VPMOVUSQW 5-410 PAGE 2234 LINE 114912 define pcodeop vpmovsqw_avx512f ; -:VPMOVSQW XmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x24; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSQW XmmReg2 XmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x24; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsqw_avx512f( ZmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsqw_avx512f( ZmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult); } -:VPMOVSQW m128^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x24; ZmmReg1 ... & m128 +:VPMOVSQW m128 XmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x24; ZmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m128 = vpmovsqw_avx512f( ZmmReg1 ); + XmmResult = vpmovsqw_avx512f( ZmmReg1 ); + XmmMask = m128; + build XmmOpMask16; + m128 = XmmResult; } # VPMOVQW/VPMOVSQW/VPMOVUSQW 5-410 PAGE 2234 LINE 114916 define pcodeop vpmovusqw_avx512f ; -:VPMOVUSQW XmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x14; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSQW XmmReg2 XmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x14; mod=3 & ZmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusqw_avx512f( ZmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusqw_avx512f( ZmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask16; + ZmmReg2 = zext(XmmResult); } -:VPMOVUSQW m128^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x14; ZmmReg1 ... & m128 +:VPMOVUSQW m128 XmmOpMask16, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask16; byte=0x14; ZmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 10; ] # (TupleType QVM) { - m128 = vpmovusqw_avx512f( ZmmReg1 ); + XmmResult = vpmovusqw_avx512f( ZmmReg1 ); + XmmMask = m128; + build XmmOpMask16; + m128 = XmmResult; } # VPMOVQD/VPMOVSQD/VPMOVUSQD 5-414 PAGE 2238 LINE 115104 define pcodeop vpmovqd_avx512vl ; -:VPMOVQD XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x35; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVQD XmmReg2 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x35; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovqd_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovqd_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult); } -:VPMOVQD m128^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x35; XmmReg1 ... & m128 +:VPMOVQD m128 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x35; XmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m128 = vpmovqd_avx512vl( XmmReg1 ); + XmmResult = vpmovqd_avx512vl( XmmReg1 ); + XmmMask = m128; + build XmmOpMask32; + m128 = XmmResult; } # VPMOVQD/VPMOVSQD/VPMOVUSQD 5-414 PAGE 2238 LINE 115108 define pcodeop vpmovsqd_avx512vl ; -:VPMOVSQD XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x25; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSQD XmmReg2 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x25; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsqd_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsqd_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVSQD m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x25; XmmReg1 ... & m64 +:VPMOVSQD m64 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x25; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m64 = vpmovsqd_avx512vl( XmmReg1 ); + XmmResult = vpmovsqd_avx512vl( XmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask32; + m64 = XmmResult[0,64]; } # VPMOVQD/VPMOVSQD/VPMOVUSQD 5-414 PAGE 2238 LINE 115113 define pcodeop vpmovusqd_avx512vl ; -:VPMOVUSQD XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x15; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSQD XmmReg2 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x15; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusqd_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusqd_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVUSQD m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x15; XmmReg1 ... & m64 +:VPMOVUSQD m64 XmmOpMask32, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x15; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { m64 = vpmovusqd_avx512vl( XmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask32; + m64 = XmmResult[0,64]; } # VPMOVQD/VPMOVSQD/VPMOVUSQD 5-414 PAGE 2238 LINE 115118 -:VPMOVQD XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x35; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVQD XmmReg2 XmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x35; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovqd_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovqd_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult); } -:VPMOVQD m128^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x35; YmmReg1 ... & m128 +:VPMOVQD m128 XmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x35; YmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m128 = vpmovqd_avx512vl( YmmReg1 ); + XmmResult = vpmovqd_avx512vl( YmmReg1 ); + XmmMask = m128; + build XmmOpMask32; + m128 = XmmResult; } # VPMOVQD/VPMOVSQD/VPMOVUSQD 5-414 PAGE 2238 LINE 115122 -:VPMOVSQD XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x25; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSQD XmmReg2 XmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x25; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovsqd_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovsqd_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult); } -:VPMOVSQD m128^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x25; YmmReg1 ... & m128 +:VPMOVSQD m128 XmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x25; YmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m128 = vpmovsqd_avx512vl( YmmReg1 ); + XmmResult = vpmovsqd_avx512vl( YmmReg1 ); + XmmMask = m128; + build XmmOpMask32; + m128 = XmmResult; } # VPMOVQD/VPMOVSQD/VPMOVUSQD 5-414 PAGE 2238 LINE 115127 -:VPMOVUSQD XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x15; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSQD XmmReg2 XmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x15; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovusqd_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovusqd_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask32; + ZmmReg2 = zext(XmmResult); } -:VPMOVUSQD m128^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x15; YmmReg1 ... & m128 +:VPMOVUSQD m128 XmmOpMask32, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask32; byte=0x15; YmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m128 = vpmovusqd_avx512vl( YmmReg1 ); + XmmResult = vpmovusqd_avx512vl( YmmReg1 ); + XmmMask = m128; + build XmmOpMask32; + m128 = XmmResult; } # VPMOVQD/VPMOVSQD/VPMOVUSQD 5-414 PAGE 2238 LINE 115131 define pcodeop vpmovqd_avx512f ; -:VPMOVQD YmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x35; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 +:VPMOVQD YmmReg2 YmmOpMask32, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask32; byte=0x35; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 { - YmmReg2 = vpmovqd_avx512f( ZmmReg1 ); - ZmmReg2 = zext(YmmReg2); + YmmResult = vpmovqd_avx512f( ZmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask32; + ZmmReg2 = zext(YmmResult); } -:VPMOVQD m256^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x35; ZmmReg1 ... & m256 +:VPMOVQD m256 YmmOpMask32, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask32; byte=0x35; ZmmReg1 ... & m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m256 = vpmovqd_avx512f( ZmmReg1 ); + YmmResult = vpmovqd_avx512f( ZmmReg1 ); + YmmMask = m256; + build YmmOpMask32; + m256 = zext(YmmResult); } # VPMOVQD/VPMOVSQD/VPMOVUSQD 5-414 PAGE 2238 LINE 115134 define pcodeop vpmovsqd_avx512f ; -:VPMOVSQD YmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x25; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 +:VPMOVSQD YmmReg2 YmmOpMask32, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask32; byte=0x25; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 { - YmmReg2 = vpmovsqd_avx512f( ZmmReg1 ); - ZmmReg2 = zext(YmmReg2); + YmmResult = vpmovsqd_avx512f( ZmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask32; + ZmmReg2 = zext(YmmResult); } -:VPMOVSQD m256^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x25; ZmmReg1 ... & m256 +:VPMOVSQD m256 YmmOpMask32, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask32; byte=0x25; ZmmReg1 ... & m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m256 = vpmovsqd_avx512f( ZmmReg1 ); + YmmResult = vpmovsqd_avx512f( ZmmReg1 ); + YmmMask = m256; + build YmmOpMask32; + m256 = zext(YmmResult); } # VPMOVQD/VPMOVSQD/VPMOVUSQD 5-414 PAGE 2238 LINE 115138 define pcodeop vpmovusqd_avx512f ; -:VPMOVUSQD YmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x15; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 +:VPMOVUSQD YmmReg2 YmmOpMask32, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask32; byte=0x15; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 { - YmmReg2 = vpmovusqd_avx512f( ZmmReg1 ); - ZmmReg2 = zext(YmmReg2); + YmmResult = vpmovusqd_avx512f( ZmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask32; + ZmmReg2 = zext(YmmResult); } -:VPMOVUSQD m256^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x15; ZmmReg1 ... & m256 +:VPMOVUSQD m256 YmmOpMask32, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask32; byte=0x15; ZmmReg1 ... & m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m256 = vpmovusqd_avx512f( ZmmReg1 ); + YmmResult = vpmovusqd_avx512f( ZmmReg1 ); + YmmMask = m256; + build YmmOpMask32; + m256 = zext(YmmResult); } # VPMOVWB/VPMOVSWB/VPMOVUSWB 5-426 PAGE 2250 LINE 115748 define pcodeop vpmovwb_avx512vl ; -:VPMOVWB XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x30; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVWB XmmReg2 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x30; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovwb_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovwb_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVWB m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x30; XmmReg1 ... & m64 +:VPMOVWB m64 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x30; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m64 = vpmovwb_avx512vl( XmmReg1 ); + XmmResult = vpmovwb_avx512vl( XmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask8; + m64 = XmmResult[0,64]; } # VPMOVWB/VPMOVSWB/VPMOVUSWB 5-426 PAGE 2250 LINE 115751 define pcodeop vpmovswb_avx512vl ; -:VPMOVSWB XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x20; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSWB XmmReg2 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x20; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovswb_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovswb_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVSWB m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x20; XmmReg1 ... & m64 +:VPMOVSWB m64 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x20; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m64 = vpmovswb_avx512vl( XmmReg1 ); + XmmResult = vpmovswb_avx512vl( XmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask8; + m64 = XmmResult[0,64]; } # VPMOVWB/VPMOVSWB/VPMOVUSWB 5-426 PAGE 2250 LINE 115754 define pcodeop vpmovuswb_avx512vl ; -:VPMOVUSWB XmmReg2^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x10; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSWB XmmReg2 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x10; mod=3 & XmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovuswb_avx512vl( XmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovuswb_avx512vl( XmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult[0,64]); } -:VPMOVUSWB m64^KWriteMask, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x10; XmmReg1 ... & m64 +:VPMOVUSWB m64 XmmOpMask8, XmmReg1 is $(EVEX_NONE) & $(VEX_L128) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x10; XmmReg1 ... & m64 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m64 = vpmovuswb_avx512vl( XmmReg1 ); + XmmResult = vpmovuswb_avx512vl( XmmReg1 ); + XmmMask = zext(m64); + build XmmOpMask8; + m64 = XmmResult[0,64]; } # VPMOVWB/VPMOVSWB/VPMOVUSWB 5-426 PAGE 2250 LINE 115757 -:VPMOVWB XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x30; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVWB XmmReg2 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x30; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovwb_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovwb_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult); + } -:VPMOVWB m128^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x30; YmmReg1 ... & m128 +:VPMOVWB m128 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x30; YmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m128 = vpmovwb_avx512vl( YmmReg1 ); + XmmResult = vpmovwb_avx512vl( YmmReg1 ); + XmmMask = m128; + build XmmOpMask8; + m128 = XmmResult; } # VPMOVWB/VPMOVSWB/VPMOVUSWB 5-426 PAGE 2250 LINE 115760 -:VPMOVSWB XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x20; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVSWB XmmReg2 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x20; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovswb_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovswb_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult); } -:VPMOVSWB m128^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x20; YmmReg1 ... & m128 +:VPMOVSWB m128 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x20; YmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m128 = vpmovswb_avx512vl( YmmReg1 ); + XmmResult = vpmovswb_avx512vl( YmmReg1 ); + XmmMask = m128; + build XmmOpMask8; + m128 = XmmResult; } # VPMOVWB/VPMOVSWB/VPMOVUSWB 5-426 PAGE 2250 LINE 115763 -:VPMOVUSWB XmmReg2^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x10; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 +:VPMOVUSWB XmmReg2 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x10; mod=3 & YmmReg1 & XmmReg2 & ZmmReg2 { - XmmReg2 = vpmovuswb_avx512vl( YmmReg1 ); - ZmmReg2 = zext(XmmReg2); + XmmResult = vpmovuswb_avx512vl( YmmReg1 ); + XmmMask = XmmReg2; + build XmmOpMask8; + ZmmReg2 = zext(XmmResult); } -:VPMOVUSWB m128^KWriteMask, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x10; YmmReg1 ... & m128 +:VPMOVUSWB m128 XmmOpMask8, YmmReg1 is $(EVEX_NONE) & $(VEX_L256) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & XmmOpMask8; byte=0x10; YmmReg1 ... & m128 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m128 = vpmovuswb_avx512vl( YmmReg1 ); + XmmResult = vpmovuswb_avx512vl( YmmReg1 ); + XmmMask = m128; + build XmmOpMask8; + m128 = XmmResult; } # VPMOVWB/VPMOVSWB/VPMOVUSWB 5-426 PAGE 2250 LINE 115766 define pcodeop vpmovwb_avx512bw ; -:VPMOVWB YmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x30; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 +:VPMOVWB YmmReg2 YmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask8; byte=0x30; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 { - YmmReg2 = vpmovwb_avx512bw( ZmmReg1 ); - ZmmReg2 = zext(YmmReg2); + YmmResult = vpmovwb_avx512bw( ZmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask8; + ZmmReg2 = zext(YmmResult); } -:VPMOVWB m256^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x30; ZmmReg1 ... & m256 +:VPMOVWB m256 YmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask8; byte=0x30; ZmmReg1 ... & m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m256 = vpmovwb_avx512bw( ZmmReg1 ); + YmmResult = vpmovwb_avx512bw( ZmmReg1 ); + YmmMask = m256; + build YmmOpMask8; + m256 = zext(YmmResult); } # VPMOVWB/VPMOVSWB/VPMOVUSWB 5-426 PAGE 2250 LINE 115769 define pcodeop vpmovswb_avx512bw ; -:VPMOVSWB YmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x20; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 +:VPMOVSWB YmmReg2 YmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask8; byte=0x20; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 { - YmmReg2 = vpmovswb_avx512bw( ZmmReg1 ); - ZmmReg2 = zext(YmmReg2); + YmmResult = vpmovswb_avx512bw( ZmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask8; + ZmmReg2 = zext(YmmResult); } -:VPMOVSWB m256^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x20; ZmmReg1 ... & m256 +:VPMOVSWB m256 YmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask8; byte=0x20; ZmmReg1 ... & m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m256 = vpmovswb_avx512bw( ZmmReg1 ); + YmmResult = vpmovswb_avx512bw( ZmmReg1 ); + YmmMask = m256; + build YmmOpMask8; + m256 = zext(YmmResult); } # VPMOVWB/VPMOVSWB/VPMOVUSWB 5-426 PAGE 2250 LINE 115772 define pcodeop vpmovuswb_avx512bw ; -:VPMOVUSWB YmmReg2^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x10; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 +:VPMOVUSWB YmmReg2 YmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask8; byte=0x10; mod=3 & ZmmReg1 & YmmReg2 & ZmmReg2 { - YmmReg2 = vpmovuswb_avx512bw( ZmmReg1 ); - ZmmReg2 = zext(YmmReg2); + YmmResult = vpmovuswb_avx512bw( ZmmReg1 ); + YmmMask = YmmReg2; + build YmmOpMask8; + ZmmReg2 = zext(YmmResult); } -:VPMOVUSWB m256^KWriteMask, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & KWriteMask; byte=0x10; ZmmReg1 ... & m256 +:VPMOVUSWB m256 YmmOpMask8, ZmmReg1 is $(EVEX_NONE) & $(EVEX_L512) & $(VEX_PRE_F3) & $(VEX_0F38) & $(VEX_W0) & YmmOpMask8; byte=0x10; ZmmReg1 ... & m256 [ evexD8Type = 1; evexTType = 9; ] # (TupleType HVM) { - m256 = vpmovuswb_avx512bw( ZmmReg1 ); + YmmResult = vpmovuswb_avx512bw( ZmmReg1 ); + YmmMask = m256; + build YmmOpMask8; + m256 = zext(YmmResult); } \ No newline at end of file diff --git a/Ghidra/Processors/x86/data/languages/ia.sinc b/Ghidra/Processors/x86/data/languages/ia.sinc index f7a29889a9..3762962541 100644 --- a/Ghidra/Processors/x86/data/languages/ia.sinc +++ b/Ghidra/Processors/x86/data/languages/ia.sinc @@ -378,6 +378,11 @@ define register offset=2200 size=16 [ BCST16 ]; define register offset=2200 size=32 [ BCST32 ]; define register offset=2200 size=64 [ BCST64 ]; +define register offset=2300 size=16 [ XmmResult _ _ _ XmmMask ]; +define register offset=2300 size=32 [ YmmResult _ YmmMask ]; +define register offset=2300 size=64 [ ZmmResult ZmmMask ]; + + # # # This context layout is important: the 32 bit version sees addrsize as just the @@ -781,6 +786,8 @@ define pcodeop vmxon; # Enter VMX operation; opcode f3 0f C7 /6 @define BYTE_80_82 "(byte=0x80 | byte=0x82)" @endif +@include "macros.sinc" + @ifdef IA64 Reg8: reg8 is rexprefix=0 & reg8 { export reg8; } Reg8: reg8_x0 is rexprefix=1 & rexRprefix=0 & reg8_x0 { export reg8_x0; } @@ -859,6 +866,7 @@ ZmmReg1: zmmreg1 is zmmreg1 { export zmmreg1; } ZmmReg2: zmmreg2 is zmmreg2 { export zmmreg2; } Xmm_vsib: xmm_vsib is xmm_vsib { export xmm_vsib; } Ymm_vsib: ymm_vsib is ymm_vsib { export ymm_vsib; } +Zmm_vsib: zmm_vsib is zmm_vsib { export zmm_vsib; } @endif # signed immediate value subconstructors @@ -1122,6 +1130,11 @@ vaddr32y: [Ymm_vsib*ss + simm32_32] is mod=0 & r_m=4; Ymm_vsib & base=5 & ss; vaddr32y: [Base + Ymm_vsib*ss + simm8_32] is mod=1 & r_m=4; Ymm_vsib & Base & ss; simm8_32 { } vaddr32y: [Base + Ymm_vsib*ss + simm32_32] is mod=2 & r_m=4; Ymm_vsib & Base & ss; simm32_32 { } +vaddr32z: [Base + Zmm_vsib*ss] is mod=0 & r_m=4; Zmm_vsib & Base & ss { } +vaddr32z: [Zmm_vsib*ss + simm32_32] is mod=0 & r_m=4; Zmm_vsib & base=5 & ss; simm32_32 { } +vaddr32z: [Base + Zmm_vsib*ss + simm8_32] is mod=1 & r_m=4; Zmm_vsib & Base & ss; simm8_32 { } +vaddr32z: [Base + Zmm_vsib*ss + simm32_32] is mod=2 & r_m=4; Zmm_vsib & Base & ss; simm32_32 { } + @ifdef IA64 vaddr64x: [Base64 + Xmm_vsib*ss] is mod=0 & r_m=4; Xmm_vsib & Base64 & ss { } vaddr64x: [Xmm_vsib*ss + simm32_64] is mod=0 & r_m=4; Xmm_vsib & base64=5 & ss; simm32_64 { } @@ -1132,6 +1145,11 @@ vaddr64y: [Base64 + Ymm_vsib*ss] is mod=0 & r_m=4; Ymm_vsib & Base64 & ss vaddr64y: [Ymm_vsib*ss + simm32_64] is mod=0 & r_m=4; Ymm_vsib & base64=5 & ss; simm32_64 { } vaddr64y: [Base64 + Ymm_vsib*ss + simm8_64] is mod=1 & r_m=4; Ymm_vsib & Base64 & ss; simm8_64 { } vaddr64y: [Base64 + Ymm_vsib*ss + simm32_64] is mod=2 & r_m=4; Ymm_vsib & Base64 & ss; simm32_64 { } + +vaddr64z: [Base64 + Zmm_vsib*ss] is mod=0 & r_m=4; Zmm_vsib & Base64 & ss { } +vaddr64z: [Zmm_vsib*ss + simm32_64] is mod=0 & r_m=4; Zmm_vsib & base64=5 & ss; simm32_64 { } +vaddr64z: [Base64 + Zmm_vsib*ss + simm8_64] is mod=1 & r_m=4; Zmm_vsib & Base64 & ss; simm8_64 { } +vaddr64z: [Base64 + Zmm_vsib*ss + simm32_64] is mod=2 & r_m=4; Zmm_vsib & Base64 & ss; simm32_64 { } @endif @@ -1141,6 +1159,9 @@ vMem32x: segWide^vaddr32x is addrsize=1 & segWide & highseg=1; vaddr32x { } vMem32y: segWide^vaddr32y is addrsize=1 & segWide; vaddr32y { } vMem32y: segWide^vaddr32y is addrsize=1 & segWide & highseg=1; vaddr32y { } +vMem32z: segWide^vaddr32z is addrsize=1 & segWide; vaddr32z { } +vMem32z: segWide^vaddr32z is addrsize=1 & segWide & highseg=1; vaddr32z { } + @ifdef IA64 # GAS always inserts a 0x67 prefix before a VSIB instruction with a 32-bit base. # Behavior is coded to match Binutils; exceeds what the manual indicates is possible. @@ -1152,6 +1173,11 @@ vMem32x: segWide^vaddr64x is addrsize=2 & segWide & highseg=1; vaddr64x { } vMem32y: segWide^vaddr64y is addrsize=2 & segWide; vaddr64y { } vMem32y: segWide^vaddr64y is addrsize=2 & segWide & highseg=1; vaddr64y { } +# GAS always inserts a 0x67 prefix before a VSIB instruction with a 32-bit base. +# Behavior is coded to match Binutils; exceeds what the manual indicates is possible. +vMem32z: segWide^vaddr64z is addrsize=2 & segWide; vaddr64z { } +vMem32z: segWide^vaddr64z is addrsize=2 & segWide & highseg=1; vaddr64z { } + # GAS always inserts a 0x67 prefix before a VSIB instruction with a 32-bit base. # Behavior is coded to match Binutils; exceeds what the manual indicates is possible. vMem64x: segWide^vaddr32x is addrsize=1 & segWide; vaddr32x { } @@ -1167,24 +1193,46 @@ vMem64y: segWide^vaddr32y is addrsize=1 & segWide & highseg=1; vaddr32y { } vMem64y: segWide^vaddr64y is addrsize=2 & segWide; vaddr64y { } vMem64y: segWide^vaddr64y is addrsize=2 & segWide & highseg=1; vaddr64y { } + +# GAS always inserts a 0x67 prefix before a VSIB instruction with a 32-bit base. +# Behavior is coded to match Binutils; exceeds what the manual indicates is possible. +vMem64z: segWide^vaddr32z is addrsize=1 & segWide; vaddr32z { } +vMem64z: segWide^vaddr32z is addrsize=1 & segWide & highseg=1; vaddr32z { } + +vMem64z: segWide^vaddr64z is addrsize=2 & segWide; vaddr64z { } +vMem64z: segWide^vaddr64z is addrsize=2 & segWide & highseg=1; vaddr64z { } @endif d_vm32x: "dword ptr "^vMem32x is vMem32x { } d_vm32y: "dword ptr "^vMem32y is vMem32y { } +# not used d_vm32z: "dword ptr "^vMem32z is vMem32z { } @ifdef IA64 d_vm64x: "dword ptr "^vMem64x is vMem64x { } d_vm64y: "dword ptr "^vMem64y is vMem64y { } +# not used d_vm64z: "dword ptr "^vMem64z is vMem64z { } @endif q_vm32x: "qword ptr "^vMem32x is vMem32x { } # not used q_vm32y: "qword ptr "^vMem32y is vMem32y { } +# not used q_vm32z: "qword ptr "^vMem32z is vMem32z { } @ifdef IA64 q_vm64x: "qword ptr "^vMem64x is vMem64x { } q_vm64y: "qword ptr "^vMem64y is vMem64y { } +q_vm64z: "qword ptr "^vMem64z is vMem64z { } +@endif + +x_vm32x: "xmmword ptr "^vMem32x is vMem32x { export vMem32x; } +y_vm32y: "ymmword ptr "^vMem32y is vMem32y { export vMem32y; } +z_vm32z: "zmmword ptr "^vMem32z is vMem32z { export vMem32z; } + +@ifdef IA64 +x_vm64x: "xmmword ptr "^vMem64x is vMem64x { } +y_vm64y: "ymmword ptr "^vMem64y is vMem64y { } +z_vm64z: "zmmword ptr "^vMem64z is vMem64z { } @endif Reg32_m8: Rmr32 is mod=3 & Rmr32 { export Rmr32; } @@ -1454,25 +1502,339 @@ unlock: is epsilon { } KReg_reg: opmaskreg is opmaskreg { export opmaskreg; } KReg_rm: opmaskrm is opmaskrm { export opmaskrm; } -vexVVVV_KReg: evexVopmask is evexVopmask { export evexVopmask; } +# not used vexVVVV_KReg: evexVopmask is evexVopmask { export evexVopmask; } vex1VVV_KReg: evexVopmask is evexVopmask & vexHighV=1 { export evexVopmask; } -KWriteMask: "{"^evexOpmask^"}" is evexOpmask { export evexOpmask; } -KWriteMask: is evexOpmask=0 { local tmp:8 = 0; export tmp; } +XmmMaskMode: is evexZ=0 { } +XmmMaskMode: "{z}" is evexZ=1 { XmmMask=0; } -RegK_m8: KReg_rm is mod=3 & KReg_rm { export KReg_rm; } -RegK_m8: m8 is m8 { tmp:8 = zext(m8); export tmp; } -RegK_m16: KReg_rm is mod=3 & KReg_rm { export KReg_rm; } -RegK_m16: m16 is m16 { tmp:8 = zext(m16); export tmp; } -RegK_m32: KReg_rm is mod=3 & KReg_rm { export KReg_rm; } -RegK_m32: m32 is m32 { tmp:8 = zext(m32); export tmp; } +YmmMaskMode: is evexZ=0 { } +YmmMaskMode: "{z}" is evexZ=1 { YmmMask=0; } + +ZmmMaskMode: is evexZ=0 { } +ZmmMaskMode: "{z}" is evexZ=1 { ZmmMask=0; } + +AVXOpMask: "{"^evexOpmask^"}" is evexOpmask { export evexOpmask; } + +# Z=0: merge masking +# Z=1: zero masking +XmmOpMask: AVXOpMask^XmmMaskMode is AVXOpMask & XmmMaskMode { + export AVXOpMask; +} + +XmmOpMask8: AVXOpMask^XmmMaskMode is AVXOpMask & XmmMaskMode { + local mask = AVXOpMask; + conditionalAssign(XmmResult[0,8], mask[0,1], XmmResult[0,8], XmmMask[0,8]); + conditionalAssign(XmmResult[8,8], mask[1,1],XmmResult[8,8], XmmMask[8,8]); + conditionalAssign(XmmResult[16,8], mask[2,1], XmmResult[16,8], XmmMask[16,8]); + conditionalAssign(XmmResult[24,8], mask[3,1], XmmResult[24,8], XmmMask[24,8]); + conditionalAssign(XmmResult[32,8], mask[4,1], XmmResult[32,8], XmmMask[32,8]); + conditionalAssign(XmmResult[40,8], mask[5,1], XmmResult[40,8], XmmMask[40,8]); + conditionalAssign(XmmResult[48,8], mask[6,1], XmmResult[48,8], XmmMask[48,8]); + conditionalAssign(XmmResult[56,8], mask[7,1], XmmResult[56,8], XmmMask[56,8]); + conditionalAssign(XmmResult[64,8], mask[8,1], XmmResult[64,8], XmmMask[64,8]); + conditionalAssign(XmmResult[72,8], mask[9,1], XmmResult[72,8], XmmMask[72,8]); + conditionalAssign(XmmResult[80,8], mask[10,1], XmmResult[80,8], XmmMask[80,8]); + conditionalAssign(XmmResult[88,8], mask[11,1], XmmResult[88,8], XmmMask[88,8]); + conditionalAssign(XmmResult[96,8], mask[12,1], XmmResult[96,8], XmmMask[96,8]); + conditionalAssign(XmmResult[104,8], mask[13,1], XmmResult[104,8], XmmMask[104,8]); + conditionalAssign(XmmResult[112,8], mask[14,1], XmmResult[112,8], XmmMask[112,8]); + conditionalAssign(XmmResult[120,8], mask[15,1], XmmResult[120,8], XmmMask[120,8]); +} + +XmmOpMask8: is evexOpmask=0 { +} + +XmmOpMask16: AVXOpMask^XmmMaskMode is AVXOpMask & XmmMaskMode { + local mask = AVXOpMask; + conditionalAssign(XmmResult[0,16], mask[0,1], XmmResult[0,16], XmmMask[0,16]); + conditionalAssign(XmmResult[16,16], mask[1,1], XmmResult[16,16], XmmMask[16,16]); + conditionalAssign(XmmResult[32,16], mask[2,1], XmmResult[32,16], XmmMask[32,16]); + conditionalAssign(XmmResult[48,16], mask[3,1], XmmResult[48,16], XmmMask[48,16]); + conditionalAssign(XmmResult[64,16], mask[4,1], XmmResult[64,16], XmmMask[64,16]); + conditionalAssign(XmmResult[80,16], mask[5,1], XmmResult[80,16], XmmMask[80,16]); + conditionalAssign(XmmResult[96,16], mask[6,1], XmmResult[96,16], XmmMask[96,16]); + conditionalAssign(XmmResult[112,16], mask[7,1], XmmResult[112,16], XmmMask[112,16]); +} + +XmmOpMask16: is evexOpmask=0 { +} + + +XmmOpMask32: AVXOpMask^XmmMaskMode is AVXOpMask & XmmMaskMode { + local mask = AVXOpMask; + conditionalAssign(XmmResult[0,32], mask[0,1], XmmResult[0,32], XmmMask[0,32]); + conditionalAssign(XmmResult[32,32], mask[1,1], XmmResult[32,32], XmmMask[32,32]); + conditionalAssign(XmmResult[64,32], mask[2,1], XmmResult[64,32], XmmMask[64,32]); + conditionalAssign(XmmResult[96,32], mask[3,1], XmmResult[96,32], XmmMask[96,32]); +} + +XmmOpMask32: is evexOpmask=0 { +} + +XmmOpMask64: AVXOpMask^XmmMaskMode is AVXOpMask & XmmMaskMode { + local mask = AVXOpMask; + conditionalAssign(XmmResult[0,64], mask[0,1], XmmResult[0,64], XmmMask[0,64]); + conditionalAssign(XmmResult[64,64], mask[1,1], XmmResult[64,64], XmmMask[64,64]); +} + +XmmOpMask64: is evexOpmask=0 { +} + +YmmOpMask: AVXOpMask^YmmMaskMode is AVXOpMask & YmmMaskMode { + export AVXOpMask; +} + +YmmOpMask8: AVXOpMask^YmmMaskMode is AVXOpMask & YmmMaskMode { + local mask = AVXOpMask; + conditionalAssign(YmmResult[0,8], mask[0,1], YmmResult[0,8], YmmMask[0,8]); + conditionalAssign(YmmResult[8,8], mask[1,1], YmmResult[8,8], YmmMask[8,8]); + conditionalAssign(YmmResult[16,8], mask[2,1], YmmResult[16,8], YmmMask[16,8]); + conditionalAssign(YmmResult[24,8], mask[3,1], YmmResult[24,8], YmmMask[24,8]); + conditionalAssign(YmmResult[32,8], mask[4,1], YmmResult[32,8], YmmMask[32,8]); + conditionalAssign(YmmResult[40,8], mask[5,1], YmmResult[40,8], YmmMask[40,8]); + conditionalAssign(YmmResult[48,8], mask[6,1], YmmResult[48,8], YmmMask[48,8]); + conditionalAssign(YmmResult[56,8], mask[7,1], YmmResult[56,8], YmmMask[56,8]); + conditionalAssign(YmmResult[64,8], mask[8,1], YmmResult[64,8], YmmMask[64,8]); + conditionalAssign(YmmResult[72,8], mask[9,1], YmmResult[72,8], YmmMask[72,8]); + conditionalAssign(YmmResult[80,8], mask[10,1], YmmResult[80,8], YmmMask[80,8]); + conditionalAssign(YmmResult[88,8], mask[11,1], YmmResult[88,8], YmmMask[88,8]); + conditionalAssign(YmmResult[96,8], mask[12,1], YmmResult[96,8], YmmMask[96,8]); + conditionalAssign(YmmResult[104,8], mask[13,1], YmmResult[104,8], YmmMask[104,8]); + conditionalAssign(YmmResult[112,8], mask[14,1], YmmResult[112,8], YmmMask[112,8]); + conditionalAssign(YmmResult[120,8], mask[15,1], YmmResult[120,8], YmmMask[120,8]); + conditionalAssign(YmmResult[128,8], mask[16,1], YmmResult[128,8], YmmMask[128,8]); + conditionalAssign(YmmResult[136,8], mask[17,1], YmmResult[136,8], YmmMask[136,8]); + conditionalAssign(YmmResult[144,8], mask[18,1], YmmResult[144,8], YmmMask[144,8]); + conditionalAssign(YmmResult[152,8], mask[19,1], YmmResult[152,8], YmmMask[152,8]); + conditionalAssign(YmmResult[160,8], mask[20,1], YmmResult[160,8], YmmMask[160,8]); + conditionalAssign(YmmResult[168,8], mask[21,1], YmmResult[168,8], YmmMask[168,8]); + conditionalAssign(YmmResult[176,8], mask[22,1], YmmResult[176,8], YmmMask[176,8]); + conditionalAssign(YmmResult[184,8], mask[23,1], YmmResult[184,8], YmmMask[184,8]); + conditionalAssign(YmmResult[192,8], mask[24,1], YmmResult[192,8], YmmMask[192,8]); + conditionalAssign(YmmResult[200,8], mask[25,1], YmmResult[200,8], YmmMask[200,8]); + conditionalAssign(YmmResult[208,8], mask[26,1], YmmResult[208,8], YmmMask[208,8]); + conditionalAssign(YmmResult[216,8], mask[27,1], YmmResult[216,8], YmmMask[216,8]); + conditionalAssign(YmmResult[224,8], mask[28,1], YmmResult[224,8], YmmMask[224,8]); + conditionalAssign(YmmResult[232,8], mask[29,1], YmmResult[232,8], YmmMask[232,8]); +} + +YmmOpMask8: is evexOpmask=0 { +} + +YmmOpMask16: AVXOpMask^YmmMaskMode is AVXOpMask & YmmMaskMode { + local mask = AVXOpMask; + conditionalAssign(YmmResult[0,16], mask[0,1], YmmResult[0,16], YmmMask[0,16]); + conditionalAssign(YmmResult[16,16], mask[1,1], YmmResult[16,16], YmmMask[16,16]); + conditionalAssign(YmmResult[32,16], mask[2,1], YmmResult[32,16], YmmMask[32,16]); + conditionalAssign(YmmResult[48,16], mask[3,1], YmmResult[48,16], YmmMask[48,16]); + conditionalAssign(YmmResult[64,16], mask[4,1], YmmResult[64,16], YmmMask[64,16]); + conditionalAssign(YmmResult[80,16], mask[5,1], YmmResult[80,16], YmmMask[80,16]); + conditionalAssign(YmmResult[96,16], mask[6,1], YmmResult[96,16], YmmMask[96,16]); + conditionalAssign(YmmResult[112,16], mask[7,1], YmmResult[112,16], YmmMask[112,16]); + conditionalAssign(YmmResult[128,16], mask[8,1], YmmResult[128,16], YmmMask[128,16]); + conditionalAssign(YmmResult[144,16], mask[9,1], YmmResult[144,16], YmmMask[144,16]); + conditionalAssign(YmmResult[160,16], mask[10,1], YmmResult[160,16], YmmMask[160,16]); + conditionalAssign(YmmResult[176,16], mask[11,1], YmmResult[176,16], YmmMask[176,16]); + conditionalAssign(YmmResult[192,16], mask[12,1], YmmResult[192,16], YmmMask[192,16]); + conditionalAssign(YmmResult[208,16], mask[13,1], YmmResult[208,16], YmmMask[208,16]); + conditionalAssign(YmmResult[224,16], mask[14,1], YmmResult[224,16], YmmMask[224,16]); + conditionalAssign(YmmResult[240,16], mask[15,1], YmmResult[240,16], YmmMask[240,16]); +} + +YmmOpMask16: is evexOpmask=0 { +} + +YmmOpMask32: AVXOpMask^YmmMaskMode is AVXOpMask & YmmMaskMode { + local mask = AVXOpMask; + conditionalAssign(YmmResult[0,32], mask[0,1], YmmResult[0,32], YmmMask[0,32]); + conditionalAssign(YmmResult[32,32], mask[1,1], YmmResult[32,32], YmmMask[32,32]); + conditionalAssign(YmmResult[64,32], mask[2,1], YmmResult[64,32], YmmMask[64,32]); + conditionalAssign(YmmResult[96,32], mask[3,1], YmmResult[96,32], YmmMask[96,32]); + conditionalAssign(YmmResult[128,32], mask[4,1], YmmResult[128,32], YmmMask[128,32]); + conditionalAssign(YmmResult[160,32], mask[5,1], YmmResult[160,32], YmmMask[160,32]); + conditionalAssign(YmmResult[192,32], mask[6,1], YmmResult[192,32], YmmMask[192,32]); + conditionalAssign(YmmResult[224,32], mask[7,1], YmmResult[224,32], YmmMask[224,32]); +} + +YmmOpMask32: is evexOpmask=0 { +} + +YmmOpMask64: AVXOpMask^YmmMaskMode is AVXOpMask & YmmMaskMode { + local mask = AVXOpMask; + conditionalAssign(YmmResult[0,64], mask[0,1], YmmResult[0,64], YmmMask[0,64]); + conditionalAssign(YmmResult[64,64], mask[1,1], YmmResult[64,64], YmmMask[64,64]); + conditionalAssign(YmmResult[128,64], mask[2,1], YmmResult[128,64], YmmMask[128,64]); + conditionalAssign(YmmResult[192,64], mask[3,1], YmmResult[192,64], YmmMask[192,64]); +} + +YmmOpMask64: is evexOpmask=0 { +} + +ZmmOpMask: AVXOpMask^ZmmMaskMode is AVXOpMask & ZmmMaskMode { + export AVXOpMask; +} + +ZmmOpMask8: AVXOpMask^ZmmMaskMode is AVXOpMask & ZmmMaskMode { + local mask = AVXOpMask; + conditionalAssign(ZmmResult[0,8], mask[0,1], ZmmResult[0,8], ZmmMask[0,8]); + conditionalAssign(ZmmResult[8,8], mask[1,1], ZmmResult[8,8], ZmmMask[8,8]); + conditionalAssign(ZmmResult[16,8], mask[2,1], ZmmResult[16,8], ZmmMask[16,8]); + conditionalAssign(ZmmResult[24,8], mask[3,1], ZmmResult[24,8], ZmmMask[24,8]); + conditionalAssign(ZmmResult[32,8], mask[4,1], ZmmResult[32,8], ZmmMask[32,8]); + conditionalAssign(ZmmResult[40,8], mask[5,1], ZmmResult[40,8], ZmmMask[40,8]); + conditionalAssign(ZmmResult[48,8], mask[6,1], ZmmResult[48,8], ZmmMask[48,8]); + conditionalAssign(ZmmResult[56,8], mask[7,1], ZmmResult[56,8], ZmmMask[56,8]); + conditionalAssign(ZmmResult[64,8], mask[8,1], ZmmResult[64,8], ZmmMask[64,8]); + conditionalAssign(ZmmResult[72,8], mask[9,1], ZmmResult[72,8], ZmmMask[72,8]); + conditionalAssign(ZmmResult[80,8], mask[10,1], ZmmResult[80,8], ZmmMask[80,8]); + conditionalAssign(ZmmResult[88,8], mask[11,1], ZmmResult[88,8], ZmmMask[88,8]); + conditionalAssign(ZmmResult[96,8], mask[12,1], ZmmResult[96,8], ZmmMask[96,8]); + conditionalAssign(ZmmResult[104,8], mask[13,1], ZmmResult[104,8], ZmmMask[104,8]); + conditionalAssign(ZmmResult[112,8], mask[14,1], ZmmResult[112,8], ZmmMask[112,8]); + conditionalAssign(ZmmResult[120,8], mask[15,1], ZmmResult[120,8], ZmmMask[120,8]); + conditionalAssign(ZmmResult[128,8], mask[16,1], ZmmResult[128,8], ZmmMask[128,8]); + conditionalAssign(ZmmResult[136,8], mask[17,1], ZmmResult[136,8], ZmmMask[136,8]); + conditionalAssign(ZmmResult[144,8], mask[18,1], ZmmResult[144,8], ZmmMask[144,8]); + conditionalAssign(ZmmResult[152,8], mask[19,1], ZmmResult[152,8], ZmmMask[152,8]); + conditionalAssign(ZmmResult[160,8], mask[20,1], ZmmResult[160,8], ZmmMask[160,8]); + conditionalAssign(ZmmResult[168,8], mask[21,1], ZmmResult[168,8], ZmmMask[168,8]); + conditionalAssign(ZmmResult[176,8], mask[22,1], ZmmResult[176,8], ZmmMask[176,8]); + conditionalAssign(ZmmResult[184,8], mask[23,1], ZmmResult[184,8], ZmmMask[184,8]); + conditionalAssign(ZmmResult[192,8], mask[24,1], ZmmResult[192,8], ZmmMask[192,8]); + conditionalAssign(ZmmResult[200,8], mask[25,1], ZmmResult[200,8], ZmmMask[200,8]); + conditionalAssign(ZmmResult[208,8], mask[26,1], ZmmResult[208,8], ZmmMask[208,8]); + conditionalAssign(ZmmResult[216,8], mask[27,1], ZmmResult[216,8], ZmmMask[216,8]); + conditionalAssign(ZmmResult[224,8], mask[28,1], ZmmResult[224,8], ZmmMask[224,8]); + conditionalAssign(ZmmResult[232,8], mask[29,1], ZmmResult[232,8], ZmmMask[232,8]); + conditionalAssign(ZmmResult[240,8], mask[30,1], ZmmResult[240,8], ZmmMask[240,8]); + conditionalAssign(ZmmResult[248,8], mask[31,1], ZmmResult[248,8], ZmmMask[248,8]); + conditionalAssign(ZmmResult[256,8], mask[32,1], ZmmResult[256,8], ZmmMask[256,8]); + conditionalAssign(ZmmResult[264,8], mask[33,1], ZmmResult[264,8], ZmmMask[264,8]); + conditionalAssign(ZmmResult[272,8], mask[34,1], ZmmResult[272,8], ZmmMask[272,8]); + conditionalAssign(ZmmResult[280,8], mask[35,1], ZmmResult[280,8], ZmmMask[280,8]); + conditionalAssign(ZmmResult[288,8], mask[36,1], ZmmResult[288,8], ZmmMask[288,8]); + conditionalAssign(ZmmResult[296,8], mask[37,1], ZmmResult[296,8], ZmmMask[296,8]); + conditionalAssign(ZmmResult[304,8], mask[38,1], ZmmResult[304,8], ZmmMask[304,8]); + conditionalAssign(ZmmResult[312,8], mask[39,1], ZmmResult[312,8], ZmmMask[312,8]); + conditionalAssign(ZmmResult[320,8], mask[40,1], ZmmResult[320,8], ZmmMask[320,8]); + conditionalAssign(ZmmResult[328,8], mask[41,1], ZmmResult[328,8], ZmmMask[328,8]); + conditionalAssign(ZmmResult[336,8], mask[42,1], ZmmResult[336,8], ZmmMask[336,8]); + conditionalAssign(ZmmResult[344,8], mask[43,1], ZmmResult[344,8], ZmmMask[344,8]); + conditionalAssign(ZmmResult[352,8], mask[44,1], ZmmResult[352,8], ZmmMask[352,8]); + conditionalAssign(ZmmResult[360,8], mask[45,1], ZmmResult[360,8], ZmmMask[360,8]); + conditionalAssign(ZmmResult[368,8], mask[46,1], ZmmResult[368,8], ZmmMask[368,8]); + conditionalAssign(ZmmResult[376,8], mask[47,1], ZmmResult[376,8], ZmmMask[376,8]); + conditionalAssign(ZmmResult[384,8], mask[48,1], ZmmResult[384,8], ZmmMask[384,8]); + conditionalAssign(ZmmResult[392,8], mask[49,1], ZmmResult[392,8], ZmmMask[392,8]); + conditionalAssign(ZmmResult[400,8], mask[50,1], ZmmResult[400,8], ZmmMask[400,8]); + conditionalAssign(ZmmResult[408,8], mask[51,1], ZmmResult[408,8], ZmmMask[408,8]); + conditionalAssign(ZmmResult[416,8], mask[52,1], ZmmResult[416,8], ZmmMask[416,8]); + conditionalAssign(ZmmResult[424,8], mask[53,1], ZmmResult[424,8], ZmmMask[424,8]); + conditionalAssign(ZmmResult[432,8], mask[54,1], ZmmResult[432,8], ZmmMask[432,8]); + conditionalAssign(ZmmResult[440,8], mask[55,1], ZmmResult[440,8], ZmmMask[440,8]); + conditionalAssign(ZmmResult[448,8], mask[56,1], ZmmResult[448,8], ZmmMask[448,8]); + conditionalAssign(ZmmResult[456,8], mask[57,1], ZmmResult[456,8], ZmmMask[456,8]); + conditionalAssign(ZmmResult[464,8], mask[58,1], ZmmResult[464,8], ZmmMask[464,8]); + conditionalAssign(ZmmResult[472,8], mask[59,1], ZmmResult[472,8], ZmmMask[472,8]); + conditionalAssign(ZmmResult[480,8], mask[60,1], ZmmResult[480,8], ZmmMask[480,8]); + conditionalAssign(ZmmResult[488,8], mask[61,1], ZmmResult[488,8], ZmmMask[488,8]); + conditionalAssign(ZmmResult[496,8], mask[62,1], ZmmResult[496,8], ZmmMask[496,8]); + conditionalAssign(ZmmResult[504,8], mask[63,1], ZmmResult[504,8], ZmmMask[504,8]); +} + +ZmmOpMask8: is evexOpmask=0 { +} + +ZmmOpMask16: AVXOpMask^ZmmMaskMode is AVXOpMask & ZmmMaskMode { + local mask = AVXOpMask; + conditionalAssign(ZmmResult[0,16], mask[0,1], ZmmResult[0,16], ZmmMask[0,16]); + conditionalAssign(ZmmResult[16,16], mask[1,1], ZmmResult[16,16], ZmmMask[16,16]); + conditionalAssign(ZmmResult[32,16], mask[2,1], ZmmResult[32,16], ZmmMask[32,16]); + conditionalAssign(ZmmResult[48,16], mask[3,1], ZmmResult[48,16], ZmmMask[48,16]); + conditionalAssign(ZmmResult[64,16], mask[4,1], ZmmResult[64,16], ZmmMask[64,16]); + conditionalAssign(ZmmResult[80,16], mask[5,1], ZmmResult[80,16], ZmmMask[80,16]); + conditionalAssign(ZmmResult[96,16], mask[6,1], ZmmResult[96,16], ZmmMask[96,16]); + conditionalAssign(ZmmResult[112,16], mask[7,1], ZmmResult[112,16], ZmmMask[112,16]); + conditionalAssign(ZmmResult[128,16], mask[8,1], ZmmResult[128,16], ZmmMask[128,16]); + conditionalAssign(ZmmResult[144,16], mask[9,1], ZmmResult[144,16], ZmmMask[144,16]); + conditionalAssign(ZmmResult[160,16], mask[10,1], ZmmResult[160,16], ZmmMask[160,16]); + conditionalAssign(ZmmResult[176,16], mask[11,1], ZmmResult[176,16], ZmmMask[176,16]); + conditionalAssign(ZmmResult[192,16], mask[12,1], ZmmResult[192,16], ZmmMask[192,16]); + conditionalAssign(ZmmResult[208,16], mask[13,1], ZmmResult[208,16], ZmmMask[208,16]); + conditionalAssign(ZmmResult[224,16], mask[14,1], ZmmResult[224,16], ZmmMask[224,16]); + conditionalAssign(ZmmResult[240,16], mask[15,1], ZmmResult[240,16], ZmmMask[240,16]); + conditionalAssign(ZmmResult[256,16], mask[16,1], ZmmResult[256,16], ZmmMask[256,16]); + conditionalAssign(ZmmResult[272,16], mask[17,1], ZmmResult[272,16], ZmmMask[272,16]); + conditionalAssign(ZmmResult[288,16], mask[18,1], ZmmResult[288,16], ZmmMask[288,16]); + conditionalAssign(ZmmResult[304,16], mask[19,1], ZmmResult[304,16], ZmmMask[304,16]); + conditionalAssign(ZmmResult[320,16], mask[20,1], ZmmResult[320,16], ZmmMask[320,16]); + conditionalAssign(ZmmResult[336,16], mask[21,1], ZmmResult[336,16], ZmmMask[336,16]); + conditionalAssign(ZmmResult[352,16], mask[22,1], ZmmResult[352,16], ZmmMask[352,16]); + conditionalAssign(ZmmResult[368,16], mask[23,1], ZmmResult[368,16], ZmmMask[368,16]); + conditionalAssign(ZmmResult[384,16], mask[24,1], ZmmResult[384,16], ZmmMask[384,16]); + conditionalAssign(ZmmResult[400,16], mask[25,1], ZmmResult[400,16], ZmmMask[400,16]); + conditionalAssign(ZmmResult[416,16], mask[26,1], ZmmResult[416,16], ZmmMask[416,16]); + conditionalAssign(ZmmResult[432,16], mask[27,1], ZmmResult[432,16], ZmmMask[432,16]); + conditionalAssign(ZmmResult[448,16], mask[28,1], ZmmResult[448,16], ZmmMask[448,16]); + conditionalAssign(ZmmResult[464,16], mask[29,1], ZmmResult[464,16], ZmmMask[464,16]); + conditionalAssign(ZmmResult[480,16], mask[30,1], ZmmResult[480,16], ZmmMask[480,16]); + conditionalAssign(ZmmResult[496,16], mask[31,1], ZmmResult[496,16], ZmmMask[496,16]); +} + +ZmmOpMask16: is evexOpmask=0 { +} + +ZmmOpMask32: AVXOpMask^ZmmMaskMode is AVXOpMask & ZmmMaskMode { + local mask = AVXOpMask; + conditionalAssign(ZmmResult[0,32], mask[0,1], ZmmResult[0,32], ZmmMask[0,32]); + conditionalAssign(ZmmResult[32,32], mask[1,1], ZmmResult[32,32], ZmmMask[32,32]); + conditionalAssign(ZmmResult[64,32], mask[2,1], ZmmResult[64,32], ZmmMask[64,32]); + conditionalAssign(ZmmResult[96,32], mask[3,1], ZmmResult[96,32], ZmmMask[96,32]); + conditionalAssign(ZmmResult[128,32], mask[4,1], ZmmResult[128,32], ZmmMask[128,32]); + conditionalAssign(ZmmResult[160,32], mask[5,1], ZmmResult[160,32], ZmmMask[160,32]); + conditionalAssign(ZmmResult[192,32], mask[6,1], ZmmResult[192,32], ZmmMask[192,32]); + conditionalAssign(ZmmResult[224,32], mask[7,1], ZmmResult[224,32], ZmmMask[224,32]); + conditionalAssign(ZmmResult[256,32], mask[8,1], ZmmResult[256,32], ZmmMask[256,32]); + conditionalAssign(ZmmResult[288,32], mask[9,1], ZmmResult[288,32], ZmmMask[288,32]); + conditionalAssign(ZmmResult[320,32], mask[10,1], ZmmResult[320,32], ZmmMask[320,32]); + conditionalAssign(ZmmResult[352,32], mask[11,1], ZmmResult[352,32], ZmmMask[352,32]); + conditionalAssign(ZmmResult[384,32], mask[12,1], ZmmResult[384,32], ZmmMask[384,32]); + conditionalAssign(ZmmResult[416,32], mask[13,1], ZmmResult[416,32], ZmmMask[416,32]); + conditionalAssign(ZmmResult[448,32], mask[14,1], ZmmResult[448,32], ZmmMask[448,32]); + conditionalAssign(ZmmResult[480,32], mask[15,1], ZmmResult[480,32], ZmmMask[480,32]); +} + +ZmmOpMask32: is evexOpmask=0 { +} + +ZmmOpMask64: AVXOpMask^ZmmMaskMode is AVXOpMask & ZmmMaskMode { + local mask = AVXOpMask; + conditionalAssign(ZmmResult[0,64], mask[0,1], ZmmResult[0,64], ZmmMask[0,64]); + conditionalAssign(ZmmResult[64,64], mask[1,1], ZmmResult[64,64], ZmmMask[64,64]); + conditionalAssign(ZmmResult[128,64], mask[2,1], ZmmResult[128,64], ZmmMask[128,64]); + conditionalAssign(ZmmResult[192,64], mask[3,1], ZmmResult[192,64], ZmmMask[192,64]); + conditionalAssign(ZmmResult[256,64], mask[4,1], ZmmResult[256,64], ZmmMask[256,64]); + conditionalAssign(ZmmResult[320,64], mask[5,1], ZmmResult[320,64], ZmmMask[320,64]); + conditionalAssign(ZmmResult[384,64], mask[6,1], ZmmResult[384,64], ZmmMask[384,64]); + conditionalAssign(ZmmResult[448,64], mask[7,1], ZmmResult[448,64], ZmmMask[448,64]); +} + +ZmmOpMask64: is evexOpmask=0 { +} + + +RegK_m8: KReg_rm is mod=3 & KReg_rm { tmp:1 = KReg_rm[0,8]; export tmp; } +RegK_m8: m8 is m8 { tmp:1 = m8; export tmp; } +RegK_m16: KReg_rm is mod=3 & KReg_rm { tmp:2 = KReg_rm[0,16]; export tmp;} +RegK_m16: m16 is m16 { tmp:2 = m16; export tmp; } +RegK_m32: KReg_rm is mod=3 & KReg_rm { tmp:4 = KReg_rm[0,32]; export tmp; } +RegK_m32: m32 is m32 { tmp:4 = m32; export tmp; } RegK_m64: KReg_rm is mod=3 & KReg_rm { export KReg_rm; } RegK_m64: m64 is m64 { export m64; } # Some macros -@include "macros.sinc" - macro ptr2(r,x) { r = zext(x); }