mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2025-10-03 09:49:23 +02:00
AA64: fix ld<n>r registers
This commit is contained in:
parent
cae9190c13
commit
8473b248a1
1 changed files with 270 additions and 270 deletions
|
@ -2799,14 +2799,14 @@ is b_31=0 & b_30=0 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b110 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:1 = 0;
|
local tmpv:1 = 0;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rtt_VPR64[0,8] = tmpv;
|
Rt_VPR64[0,8] = tmpv;
|
||||||
Rtt_VPR64[8,8] = tmpv;
|
Rt_VPR64[8,8] = tmpv;
|
||||||
Rtt_VPR64[16,8] = tmpv;
|
Rt_VPR64[16,8] = tmpv;
|
||||||
Rtt_VPR64[24,8] = tmpv;
|
Rt_VPR64[24,8] = tmpv;
|
||||||
Rtt_VPR64[32,8] = tmpv;
|
Rt_VPR64[32,8] = tmpv;
|
||||||
Rtt_VPR64[40,8] = tmpv;
|
Rt_VPR64[40,8] = tmpv;
|
||||||
Rtt_VPR64[48,8] = tmpv;
|
Rt_VPR64[48,8] = tmpv;
|
||||||
Rtt_VPR64[56,8] = tmpv;
|
Rt_VPR64[56,8] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 1;
|
tmp_ldXn = tmp_ldXn + 1;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rtt_VPR64[0,8] = tmpv;
|
Rtt_VPR64[0,8] = tmpv;
|
||||||
|
@ -2835,10 +2835,10 @@ is b_31=0 & b_30=0 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b110 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:2 = 0;
|
local tmpv:2 = 0;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rtt_VPR64[0,16] = tmpv;
|
Rt_VPR64[0,16] = tmpv;
|
||||||
Rtt_VPR64[16,16] = tmpv;
|
Rt_VPR64[16,16] = tmpv;
|
||||||
Rtt_VPR64[32,16] = tmpv;
|
Rt_VPR64[32,16] = tmpv;
|
||||||
Rtt_VPR64[48,16] = tmpv;
|
Rt_VPR64[48,16] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 2;
|
tmp_ldXn = tmp_ldXn + 2;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rtt_VPR64[0,16] = tmpv;
|
Rtt_VPR64[0,16] = tmpv;
|
||||||
|
@ -2863,8 +2863,8 @@ is b_31=0 & b_30=0 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b110 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:4 = 0;
|
local tmpv:4 = 0;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rtt_VPR64[0,32] = tmpv;
|
Rt_VPR64[0,32] = tmpv;
|
||||||
Rtt_VPR64[32,32] = tmpv;
|
Rt_VPR64[32,32] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 4;
|
tmp_ldXn = tmp_ldXn + 4;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rtt_VPR64[0,32] = tmpv;
|
Rtt_VPR64[0,32] = tmpv;
|
||||||
|
@ -2887,7 +2887,7 @@ is b_31=0 & b_30=0 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b110 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:8 = 0;
|
local tmpv:8 = 0;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rtt_VPR64[0,64] = tmpv;
|
Rt_VPR64[0,64] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 8;
|
tmp_ldXn = tmp_ldXn + 8;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rtt_VPR64[0,64] = tmpv;
|
Rtt_VPR64[0,64] = tmpv;
|
||||||
|
@ -2909,22 +2909,22 @@ is b_31=0 & b_30=1 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b110 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:1 = 0;
|
local tmpv:1 = 0;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rtt_VPR128[0,8] = tmpv;
|
Rt_VPR128[0,8] = tmpv;
|
||||||
Rtt_VPR128[8,8] = tmpv;
|
Rt_VPR128[8,8] = tmpv;
|
||||||
Rtt_VPR128[16,8] = tmpv;
|
Rt_VPR128[16,8] = tmpv;
|
||||||
Rtt_VPR128[24,8] = tmpv;
|
Rt_VPR128[24,8] = tmpv;
|
||||||
Rtt_VPR128[32,8] = tmpv;
|
Rt_VPR128[32,8] = tmpv;
|
||||||
Rtt_VPR128[40,8] = tmpv;
|
Rt_VPR128[40,8] = tmpv;
|
||||||
Rtt_VPR128[48,8] = tmpv;
|
Rt_VPR128[48,8] = tmpv;
|
||||||
Rtt_VPR128[56,8] = tmpv;
|
Rt_VPR128[56,8] = tmpv;
|
||||||
Rtt_VPR128[64,8] = tmpv;
|
Rt_VPR128[64,8] = tmpv;
|
||||||
Rtt_VPR128[72,8] = tmpv;
|
Rt_VPR128[72,8] = tmpv;
|
||||||
Rtt_VPR128[80,8] = tmpv;
|
Rt_VPR128[80,8] = tmpv;
|
||||||
Rtt_VPR128[88,8] = tmpv;
|
Rt_VPR128[88,8] = tmpv;
|
||||||
Rtt_VPR128[96,8] = tmpv;
|
Rt_VPR128[96,8] = tmpv;
|
||||||
Rtt_VPR128[104,8] = tmpv;
|
Rt_VPR128[104,8] = tmpv;
|
||||||
Rtt_VPR128[112,8] = tmpv;
|
Rt_VPR128[112,8] = tmpv;
|
||||||
Rtt_VPR128[120,8] = tmpv;
|
Rt_VPR128[120,8] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 1;
|
tmp_ldXn = tmp_ldXn + 1;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rtt_VPR128[0,8] = tmpv;
|
Rtt_VPR128[0,8] = tmpv;
|
||||||
|
@ -2961,14 +2961,14 @@ is b_31=0 & b_30=1 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b110 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:2 = 0;
|
local tmpv:2 = 0;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rtt_VPR128[0,16] = tmpv;
|
Rt_VPR128[0,16] = tmpv;
|
||||||
Rtt_VPR128[16,16] = tmpv;
|
Rt_VPR128[16,16] = tmpv;
|
||||||
Rtt_VPR128[32,16] = tmpv;
|
Rt_VPR128[32,16] = tmpv;
|
||||||
Rtt_VPR128[48,16] = tmpv;
|
Rt_VPR128[48,16] = tmpv;
|
||||||
Rtt_VPR128[64,16] = tmpv;
|
Rt_VPR128[64,16] = tmpv;
|
||||||
Rtt_VPR128[80,16] = tmpv;
|
Rt_VPR128[80,16] = tmpv;
|
||||||
Rtt_VPR128[96,16] = tmpv;
|
Rt_VPR128[96,16] = tmpv;
|
||||||
Rtt_VPR128[112,16] = tmpv;
|
Rt_VPR128[112,16] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 2;
|
tmp_ldXn = tmp_ldXn + 2;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rtt_VPR128[0,16] = tmpv;
|
Rtt_VPR128[0,16] = tmpv;
|
||||||
|
@ -2997,10 +2997,10 @@ is b_31=0 & b_30=1 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b110 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:4 = 0;
|
local tmpv:4 = 0;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rtt_VPR128[0,32] = tmpv;
|
Rt_VPR128[0,32] = tmpv;
|
||||||
Rtt_VPR128[32,32] = tmpv;
|
Rt_VPR128[32,32] = tmpv;
|
||||||
Rtt_VPR128[64,32] = tmpv;
|
Rt_VPR128[64,32] = tmpv;
|
||||||
Rtt_VPR128[96,32] = tmpv;
|
Rt_VPR128[96,32] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 4;
|
tmp_ldXn = tmp_ldXn + 4;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rtt_VPR128[0,32] = tmpv;
|
Rtt_VPR128[0,32] = tmpv;
|
||||||
|
@ -3025,8 +3025,8 @@ is b_31=0 & b_30=1 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b110 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:8 = 0;
|
local tmpv:8 = 0;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rtt_VPR128[0,64] = tmpv;
|
Rt_VPR128[0,64] = tmpv;
|
||||||
Rtt_VPR128[64,64] = tmpv;
|
Rt_VPR128[64,64] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 8;
|
tmp_ldXn = tmp_ldXn + 8;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rtt_VPR128[0,64] = tmpv;
|
Rtt_VPR128[0,64] = tmpv;
|
||||||
|
@ -3974,24 +3974,24 @@ is b_31=0 & b_30=0 & b_2429=0b001101 & b_22=1 & b_21=0 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:1 = 0;
|
local tmpv:1 = 0;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rttt_VPR64[0,8] = tmpv;
|
Rt_VPR64[0,8] = tmpv;
|
||||||
Rttt_VPR64[8,8] = tmpv;
|
Rt_VPR64[8,8] = tmpv;
|
||||||
Rttt_VPR64[16,8] = tmpv;
|
Rt_VPR64[16,8] = tmpv;
|
||||||
Rttt_VPR64[24,8] = tmpv;
|
Rt_VPR64[24,8] = tmpv;
|
||||||
Rttt_VPR64[32,8] = tmpv;
|
Rt_VPR64[32,8] = tmpv;
|
||||||
Rttt_VPR64[40,8] = tmpv;
|
Rt_VPR64[40,8] = tmpv;
|
||||||
Rttt_VPR64[48,8] = tmpv;
|
Rt_VPR64[48,8] = tmpv;
|
||||||
Rttt_VPR64[56,8] = tmpv;
|
Rt_VPR64[56,8] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 1;
|
tmp_ldXn = tmp_ldXn + 1;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rttt_VPR64[0,8] = tmpv;
|
Rtt_VPR64[0,8] = tmpv;
|
||||||
Rttt_VPR64[8,8] = tmpv;
|
Rtt_VPR64[8,8] = tmpv;
|
||||||
Rttt_VPR64[16,8] = tmpv;
|
Rtt_VPR64[16,8] = tmpv;
|
||||||
Rttt_VPR64[24,8] = tmpv;
|
Rtt_VPR64[24,8] = tmpv;
|
||||||
Rttt_VPR64[32,8] = tmpv;
|
Rtt_VPR64[32,8] = tmpv;
|
||||||
Rttt_VPR64[40,8] = tmpv;
|
Rtt_VPR64[40,8] = tmpv;
|
||||||
Rttt_VPR64[48,8] = tmpv;
|
Rtt_VPR64[48,8] = tmpv;
|
||||||
Rttt_VPR64[56,8] = tmpv;
|
Rtt_VPR64[56,8] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 1;
|
tmp_ldXn = tmp_ldXn + 1;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rttt_VPR64[0,8] = tmpv;
|
Rttt_VPR64[0,8] = tmpv;
|
||||||
|
@ -4020,16 +4020,16 @@ is b_31=0 & b_30=0 & b_2429=0b001101 & b_22=1 & b_21=0 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:2 = 0;
|
local tmpv:2 = 0;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rttt_VPR64[0,16] = tmpv;
|
Rt_VPR64[0,16] = tmpv;
|
||||||
Rttt_VPR64[16,16] = tmpv;
|
Rt_VPR64[16,16] = tmpv;
|
||||||
Rttt_VPR64[32,16] = tmpv;
|
Rt_VPR64[32,16] = tmpv;
|
||||||
Rttt_VPR64[48,16] = tmpv;
|
Rt_VPR64[48,16] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 2;
|
tmp_ldXn = tmp_ldXn + 2;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rttt_VPR64[0,16] = tmpv;
|
Rtt_VPR64[0,16] = tmpv;
|
||||||
Rttt_VPR64[16,16] = tmpv;
|
Rtt_VPR64[16,16] = tmpv;
|
||||||
Rttt_VPR64[32,16] = tmpv;
|
Rtt_VPR64[32,16] = tmpv;
|
||||||
Rttt_VPR64[48,16] = tmpv;
|
Rtt_VPR64[48,16] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 2;
|
tmp_ldXn = tmp_ldXn + 2;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rttt_VPR64[0,16] = tmpv;
|
Rttt_VPR64[0,16] = tmpv;
|
||||||
|
@ -4054,12 +4054,12 @@ is b_31=0 & b_30=0 & b_2429=0b001101 & b_22=1 & b_21=0 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:4 = 0;
|
local tmpv:4 = 0;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rttt_VPR64[0,32] = tmpv;
|
Rt_VPR64[0,32] = tmpv;
|
||||||
Rttt_VPR64[32,32] = tmpv;
|
Rt_VPR64[32,32] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 4;
|
tmp_ldXn = tmp_ldXn + 4;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rttt_VPR64[0,32] = tmpv;
|
Rtt_VPR64[0,32] = tmpv;
|
||||||
Rttt_VPR64[32,32] = tmpv;
|
Rtt_VPR64[32,32] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 4;
|
tmp_ldXn = tmp_ldXn + 4;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rttt_VPR64[0,32] = tmpv;
|
Rttt_VPR64[0,32] = tmpv;
|
||||||
|
@ -4082,10 +4082,10 @@ is b_31=0 & b_30=0 & b_2429=0b001101 & b_22=1 & b_21=0 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:8 = 0;
|
local tmpv:8 = 0;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rttt_VPR64[0,64] = tmpv;
|
Rt_VPR64[0,64] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 8;
|
tmp_ldXn = tmp_ldXn + 8;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rttt_VPR64[0,64] = tmpv;
|
Rtt_VPR64[0,64] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 8;
|
tmp_ldXn = tmp_ldXn + 8;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rttt_VPR64[0,64] = tmpv;
|
Rttt_VPR64[0,64] = tmpv;
|
||||||
|
@ -4107,40 +4107,40 @@ is b_31=0 & b_30=1 & b_2429=0b001101 & b_22=1 & b_21=0 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:1 = 0;
|
local tmpv:1 = 0;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rttt_VPR128[0,8] = tmpv;
|
Rt_VPR128[0,8] = tmpv;
|
||||||
Rttt_VPR128[8,8] = tmpv;
|
Rt_VPR128[8,8] = tmpv;
|
||||||
Rttt_VPR128[16,8] = tmpv;
|
Rt_VPR128[16,8] = tmpv;
|
||||||
Rttt_VPR128[24,8] = tmpv;
|
Rt_VPR128[24,8] = tmpv;
|
||||||
Rttt_VPR128[32,8] = tmpv;
|
Rt_VPR128[32,8] = tmpv;
|
||||||
Rttt_VPR128[40,8] = tmpv;
|
Rt_VPR128[40,8] = tmpv;
|
||||||
Rttt_VPR128[48,8] = tmpv;
|
Rt_VPR128[48,8] = tmpv;
|
||||||
Rttt_VPR128[56,8] = tmpv;
|
Rt_VPR128[56,8] = tmpv;
|
||||||
Rttt_VPR128[64,8] = tmpv;
|
Rt_VPR128[64,8] = tmpv;
|
||||||
Rttt_VPR128[72,8] = tmpv;
|
Rt_VPR128[72,8] = tmpv;
|
||||||
Rttt_VPR128[80,8] = tmpv;
|
Rt_VPR128[80,8] = tmpv;
|
||||||
Rttt_VPR128[88,8] = tmpv;
|
Rt_VPR128[88,8] = tmpv;
|
||||||
Rttt_VPR128[96,8] = tmpv;
|
Rt_VPR128[96,8] = tmpv;
|
||||||
Rttt_VPR128[104,8] = tmpv;
|
Rt_VPR128[104,8] = tmpv;
|
||||||
Rttt_VPR128[112,8] = tmpv;
|
Rt_VPR128[112,8] = tmpv;
|
||||||
Rttt_VPR128[120,8] = tmpv;
|
Rt_VPR128[120,8] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 1;
|
tmp_ldXn = tmp_ldXn + 1;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rttt_VPR128[0,8] = tmpv;
|
Rtt_VPR128[0,8] = tmpv;
|
||||||
Rttt_VPR128[8,8] = tmpv;
|
Rtt_VPR128[8,8] = tmpv;
|
||||||
Rttt_VPR128[16,8] = tmpv;
|
Rtt_VPR128[16,8] = tmpv;
|
||||||
Rttt_VPR128[24,8] = tmpv;
|
Rtt_VPR128[24,8] = tmpv;
|
||||||
Rttt_VPR128[32,8] = tmpv;
|
Rtt_VPR128[32,8] = tmpv;
|
||||||
Rttt_VPR128[40,8] = tmpv;
|
Rtt_VPR128[40,8] = tmpv;
|
||||||
Rttt_VPR128[48,8] = tmpv;
|
Rtt_VPR128[48,8] = tmpv;
|
||||||
Rttt_VPR128[56,8] = tmpv;
|
Rtt_VPR128[56,8] = tmpv;
|
||||||
Rttt_VPR128[64,8] = tmpv;
|
Rtt_VPR128[64,8] = tmpv;
|
||||||
Rttt_VPR128[72,8] = tmpv;
|
Rtt_VPR128[72,8] = tmpv;
|
||||||
Rttt_VPR128[80,8] = tmpv;
|
Rtt_VPR128[80,8] = tmpv;
|
||||||
Rttt_VPR128[88,8] = tmpv;
|
Rtt_VPR128[88,8] = tmpv;
|
||||||
Rttt_VPR128[96,8] = tmpv;
|
Rtt_VPR128[96,8] = tmpv;
|
||||||
Rttt_VPR128[104,8] = tmpv;
|
Rtt_VPR128[104,8] = tmpv;
|
||||||
Rttt_VPR128[112,8] = tmpv;
|
Rtt_VPR128[112,8] = tmpv;
|
||||||
Rttt_VPR128[120,8] = tmpv;
|
Rtt_VPR128[120,8] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 1;
|
tmp_ldXn = tmp_ldXn + 1;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rttt_VPR128[0,8] = tmpv;
|
Rttt_VPR128[0,8] = tmpv;
|
||||||
|
@ -4177,24 +4177,24 @@ is b_31=0 & b_30=1 & b_2429=0b001101 & b_22=1 & b_21=0 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:2 = 0;
|
local tmpv:2 = 0;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rttt_VPR128[0,16] = tmpv;
|
Rt_VPR128[0,16] = tmpv;
|
||||||
Rttt_VPR128[16,16] = tmpv;
|
Rt_VPR128[16,16] = tmpv;
|
||||||
Rttt_VPR128[32,16] = tmpv;
|
Rt_VPR128[32,16] = tmpv;
|
||||||
Rttt_VPR128[48,16] = tmpv;
|
Rt_VPR128[48,16] = tmpv;
|
||||||
Rttt_VPR128[64,16] = tmpv;
|
Rt_VPR128[64,16] = tmpv;
|
||||||
Rttt_VPR128[80,16] = tmpv;
|
Rt_VPR128[80,16] = tmpv;
|
||||||
Rttt_VPR128[96,16] = tmpv;
|
Rt_VPR128[96,16] = tmpv;
|
||||||
Rttt_VPR128[112,16] = tmpv;
|
Rt_VPR128[112,16] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 2;
|
tmp_ldXn = tmp_ldXn + 2;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rttt_VPR128[0,16] = tmpv;
|
Rtt_VPR128[0,16] = tmpv;
|
||||||
Rttt_VPR128[16,16] = tmpv;
|
Rtt_VPR128[16,16] = tmpv;
|
||||||
Rttt_VPR128[32,16] = tmpv;
|
Rtt_VPR128[32,16] = tmpv;
|
||||||
Rttt_VPR128[48,16] = tmpv;
|
Rtt_VPR128[48,16] = tmpv;
|
||||||
Rttt_VPR128[64,16] = tmpv;
|
Rtt_VPR128[64,16] = tmpv;
|
||||||
Rttt_VPR128[80,16] = tmpv;
|
Rtt_VPR128[80,16] = tmpv;
|
||||||
Rttt_VPR128[96,16] = tmpv;
|
Rtt_VPR128[96,16] = tmpv;
|
||||||
Rttt_VPR128[112,16] = tmpv;
|
Rtt_VPR128[112,16] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 2;
|
tmp_ldXn = tmp_ldXn + 2;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rttt_VPR128[0,16] = tmpv;
|
Rttt_VPR128[0,16] = tmpv;
|
||||||
|
@ -4223,16 +4223,16 @@ is b_31=0 & b_30=1 & b_2429=0b001101 & b_22=1 & b_21=0 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:4 = 0;
|
local tmpv:4 = 0;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rttt_VPR128[0,32] = tmpv;
|
Rt_VPR128[0,32] = tmpv;
|
||||||
Rttt_VPR128[32,32] = tmpv;
|
Rt_VPR128[32,32] = tmpv;
|
||||||
Rttt_VPR128[64,32] = tmpv;
|
Rt_VPR128[64,32] = tmpv;
|
||||||
Rttt_VPR128[96,32] = tmpv;
|
Rt_VPR128[96,32] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 4;
|
tmp_ldXn = tmp_ldXn + 4;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rttt_VPR128[0,32] = tmpv;
|
Rtt_VPR128[0,32] = tmpv;
|
||||||
Rttt_VPR128[32,32] = tmpv;
|
Rtt_VPR128[32,32] = tmpv;
|
||||||
Rttt_VPR128[64,32] = tmpv;
|
Rtt_VPR128[64,32] = tmpv;
|
||||||
Rttt_VPR128[96,32] = tmpv;
|
Rtt_VPR128[96,32] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 4;
|
tmp_ldXn = tmp_ldXn + 4;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rttt_VPR128[0,32] = tmpv;
|
Rttt_VPR128[0,32] = tmpv;
|
||||||
|
@ -4257,12 +4257,12 @@ is b_31=0 & b_30=1 & b_2429=0b001101 & b_22=1 & b_21=0 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:8 = 0;
|
local tmpv:8 = 0;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rttt_VPR128[0,64] = tmpv;
|
Rt_VPR128[0,64] = tmpv;
|
||||||
Rttt_VPR128[64,64] = tmpv;
|
Rt_VPR128[64,64] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 8;
|
tmp_ldXn = tmp_ldXn + 8;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rttt_VPR128[0,64] = tmpv;
|
Rtt_VPR128[0,64] = tmpv;
|
||||||
Rttt_VPR128[64,64] = tmpv;
|
Rtt_VPR128[64,64] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 8;
|
tmp_ldXn = tmp_ldXn + 8;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rttt_VPR128[0,64] = tmpv;
|
Rttt_VPR128[0,64] = tmpv;
|
||||||
|
@ -5358,34 +5358,34 @@ is b_31=0 & b_30=0 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:1 = 0;
|
local tmpv:1 = 0;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,8] = tmpv;
|
Rt_VPR64[0,8] = tmpv;
|
||||||
Rtttt_VPR64[8,8] = tmpv;
|
Rt_VPR64[8,8] = tmpv;
|
||||||
Rtttt_VPR64[16,8] = tmpv;
|
Rt_VPR64[16,8] = tmpv;
|
||||||
Rtttt_VPR64[24,8] = tmpv;
|
Rt_VPR64[24,8] = tmpv;
|
||||||
Rtttt_VPR64[32,8] = tmpv;
|
Rt_VPR64[32,8] = tmpv;
|
||||||
Rtttt_VPR64[40,8] = tmpv;
|
Rt_VPR64[40,8] = tmpv;
|
||||||
Rtttt_VPR64[48,8] = tmpv;
|
Rt_VPR64[48,8] = tmpv;
|
||||||
Rtttt_VPR64[56,8] = tmpv;
|
Rt_VPR64[56,8] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 1;
|
tmp_ldXn = tmp_ldXn + 1;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,8] = tmpv;
|
Rtt_VPR64[0,8] = tmpv;
|
||||||
Rtttt_VPR64[8,8] = tmpv;
|
Rtt_VPR64[8,8] = tmpv;
|
||||||
Rtttt_VPR64[16,8] = tmpv;
|
Rtt_VPR64[16,8] = tmpv;
|
||||||
Rtttt_VPR64[24,8] = tmpv;
|
Rtt_VPR64[24,8] = tmpv;
|
||||||
Rtttt_VPR64[32,8] = tmpv;
|
Rtt_VPR64[32,8] = tmpv;
|
||||||
Rtttt_VPR64[40,8] = tmpv;
|
Rtt_VPR64[40,8] = tmpv;
|
||||||
Rtttt_VPR64[48,8] = tmpv;
|
Rtt_VPR64[48,8] = tmpv;
|
||||||
Rtttt_VPR64[56,8] = tmpv;
|
Rtt_VPR64[56,8] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 1;
|
tmp_ldXn = tmp_ldXn + 1;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,8] = tmpv;
|
Rttt_VPR64[0,8] = tmpv;
|
||||||
Rtttt_VPR64[8,8] = tmpv;
|
Rttt_VPR64[8,8] = tmpv;
|
||||||
Rtttt_VPR64[16,8] = tmpv;
|
Rttt_VPR64[16,8] = tmpv;
|
||||||
Rtttt_VPR64[24,8] = tmpv;
|
Rttt_VPR64[24,8] = tmpv;
|
||||||
Rtttt_VPR64[32,8] = tmpv;
|
Rttt_VPR64[32,8] = tmpv;
|
||||||
Rtttt_VPR64[40,8] = tmpv;
|
Rttt_VPR64[40,8] = tmpv;
|
||||||
Rtttt_VPR64[48,8] = tmpv;
|
Rttt_VPR64[48,8] = tmpv;
|
||||||
Rtttt_VPR64[56,8] = tmpv;
|
Rttt_VPR64[56,8] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 1;
|
tmp_ldXn = tmp_ldXn + 1;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,8] = tmpv;
|
Rtttt_VPR64[0,8] = tmpv;
|
||||||
|
@ -5414,22 +5414,22 @@ is b_31=0 & b_30=0 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:2 = 0;
|
local tmpv:2 = 0;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,16] = tmpv;
|
Rt_VPR64[0,16] = tmpv;
|
||||||
Rtttt_VPR64[16,16] = tmpv;
|
Rt_VPR64[16,16] = tmpv;
|
||||||
Rtttt_VPR64[32,16] = tmpv;
|
Rt_VPR64[32,16] = tmpv;
|
||||||
Rtttt_VPR64[48,16] = tmpv;
|
Rt_VPR64[48,16] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 2;
|
tmp_ldXn = tmp_ldXn + 2;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,16] = tmpv;
|
Rtt_VPR64[0,16] = tmpv;
|
||||||
Rtttt_VPR64[16,16] = tmpv;
|
Rtt_VPR64[16,16] = tmpv;
|
||||||
Rtttt_VPR64[32,16] = tmpv;
|
Rtt_VPR64[32,16] = tmpv;
|
||||||
Rtttt_VPR64[48,16] = tmpv;
|
Rtt_VPR64[48,16] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 2;
|
tmp_ldXn = tmp_ldXn + 2;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,16] = tmpv;
|
Rttt_VPR64[0,16] = tmpv;
|
||||||
Rtttt_VPR64[16,16] = tmpv;
|
Rttt_VPR64[16,16] = tmpv;
|
||||||
Rtttt_VPR64[32,16] = tmpv;
|
Rttt_VPR64[32,16] = tmpv;
|
||||||
Rtttt_VPR64[48,16] = tmpv;
|
Rttt_VPR64[48,16] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 2;
|
tmp_ldXn = tmp_ldXn + 2;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,16] = tmpv;
|
Rtttt_VPR64[0,16] = tmpv;
|
||||||
|
@ -5454,16 +5454,16 @@ is b_31=0 & b_30=0 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:4 = 0;
|
local tmpv:4 = 0;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,32] = tmpv;
|
Rt_VPR64[0,32] = tmpv;
|
||||||
Rtttt_VPR64[32,32] = tmpv;
|
Rt_VPR64[32,32] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 4;
|
tmp_ldXn = tmp_ldXn + 4;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,32] = tmpv;
|
Rtt_VPR64[0,32] = tmpv;
|
||||||
Rtttt_VPR64[32,32] = tmpv;
|
Rtt_VPR64[32,32] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 4;
|
tmp_ldXn = tmp_ldXn + 4;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,32] = tmpv;
|
Rttt_VPR64[0,32] = tmpv;
|
||||||
Rtttt_VPR64[32,32] = tmpv;
|
Rttt_VPR64[32,32] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 4;
|
tmp_ldXn = tmp_ldXn + 4;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,32] = tmpv;
|
Rtttt_VPR64[0,32] = tmpv;
|
||||||
|
@ -5486,13 +5486,13 @@ is b_31=0 & b_30=0 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:8 = 0;
|
local tmpv:8 = 0;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,64] = tmpv;
|
Rt_VPR64[0,64] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 8;
|
tmp_ldXn = tmp_ldXn + 8;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,64] = tmpv;
|
Rtt_VPR64[0,64] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 8;
|
tmp_ldXn = tmp_ldXn + 8;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,64] = tmpv;
|
Rttt_VPR64[0,64] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 8;
|
tmp_ldXn = tmp_ldXn + 8;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rtttt_VPR64[0,64] = tmpv;
|
Rtttt_VPR64[0,64] = tmpv;
|
||||||
|
@ -5514,58 +5514,58 @@ is b_31=0 & b_30=1 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:1 = 0;
|
local tmpv:1 = 0;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,8] = tmpv;
|
Rt_VPR128[0,8] = tmpv;
|
||||||
Rtttt_VPR128[8,8] = tmpv;
|
Rt_VPR128[8,8] = tmpv;
|
||||||
Rtttt_VPR128[16,8] = tmpv;
|
Rt_VPR128[16,8] = tmpv;
|
||||||
Rtttt_VPR128[24,8] = tmpv;
|
Rt_VPR128[24,8] = tmpv;
|
||||||
Rtttt_VPR128[32,8] = tmpv;
|
Rt_VPR128[32,8] = tmpv;
|
||||||
Rtttt_VPR128[40,8] = tmpv;
|
Rt_VPR128[40,8] = tmpv;
|
||||||
Rtttt_VPR128[48,8] = tmpv;
|
Rt_VPR128[48,8] = tmpv;
|
||||||
Rtttt_VPR128[56,8] = tmpv;
|
Rt_VPR128[56,8] = tmpv;
|
||||||
Rtttt_VPR128[64,8] = tmpv;
|
Rt_VPR128[64,8] = tmpv;
|
||||||
Rtttt_VPR128[72,8] = tmpv;
|
Rt_VPR128[72,8] = tmpv;
|
||||||
Rtttt_VPR128[80,8] = tmpv;
|
Rt_VPR128[80,8] = tmpv;
|
||||||
Rtttt_VPR128[88,8] = tmpv;
|
Rt_VPR128[88,8] = tmpv;
|
||||||
Rtttt_VPR128[96,8] = tmpv;
|
Rt_VPR128[96,8] = tmpv;
|
||||||
Rtttt_VPR128[104,8] = tmpv;
|
Rt_VPR128[104,8] = tmpv;
|
||||||
Rtttt_VPR128[112,8] = tmpv;
|
Rt_VPR128[112,8] = tmpv;
|
||||||
Rtttt_VPR128[120,8] = tmpv;
|
Rt_VPR128[120,8] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 1;
|
tmp_ldXn = tmp_ldXn + 1;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,8] = tmpv;
|
Rtt_VPR128[0,8] = tmpv;
|
||||||
Rtttt_VPR128[8,8] = tmpv;
|
Rtt_VPR128[8,8] = tmpv;
|
||||||
Rtttt_VPR128[16,8] = tmpv;
|
Rtt_VPR128[16,8] = tmpv;
|
||||||
Rtttt_VPR128[24,8] = tmpv;
|
Rtt_VPR128[24,8] = tmpv;
|
||||||
Rtttt_VPR128[32,8] = tmpv;
|
Rtt_VPR128[32,8] = tmpv;
|
||||||
Rtttt_VPR128[40,8] = tmpv;
|
Rtt_VPR128[40,8] = tmpv;
|
||||||
Rtttt_VPR128[48,8] = tmpv;
|
Rtt_VPR128[48,8] = tmpv;
|
||||||
Rtttt_VPR128[56,8] = tmpv;
|
Rtt_VPR128[56,8] = tmpv;
|
||||||
Rtttt_VPR128[64,8] = tmpv;
|
Rtt_VPR128[64,8] = tmpv;
|
||||||
Rtttt_VPR128[72,8] = tmpv;
|
Rtt_VPR128[72,8] = tmpv;
|
||||||
Rtttt_VPR128[80,8] = tmpv;
|
Rtt_VPR128[80,8] = tmpv;
|
||||||
Rtttt_VPR128[88,8] = tmpv;
|
Rtt_VPR128[88,8] = tmpv;
|
||||||
Rtttt_VPR128[96,8] = tmpv;
|
Rtt_VPR128[96,8] = tmpv;
|
||||||
Rtttt_VPR128[104,8] = tmpv;
|
Rtt_VPR128[104,8] = tmpv;
|
||||||
Rtttt_VPR128[112,8] = tmpv;
|
Rtt_VPR128[112,8] = tmpv;
|
||||||
Rtttt_VPR128[120,8] = tmpv;
|
Rtt_VPR128[120,8] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 1;
|
tmp_ldXn = tmp_ldXn + 1;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,8] = tmpv;
|
Rttt_VPR128[0,8] = tmpv;
|
||||||
Rtttt_VPR128[8,8] = tmpv;
|
Rttt_VPR128[8,8] = tmpv;
|
||||||
Rtttt_VPR128[16,8] = tmpv;
|
Rttt_VPR128[16,8] = tmpv;
|
||||||
Rtttt_VPR128[24,8] = tmpv;
|
Rttt_VPR128[24,8] = tmpv;
|
||||||
Rtttt_VPR128[32,8] = tmpv;
|
Rttt_VPR128[32,8] = tmpv;
|
||||||
Rtttt_VPR128[40,8] = tmpv;
|
Rttt_VPR128[40,8] = tmpv;
|
||||||
Rtttt_VPR128[48,8] = tmpv;
|
Rttt_VPR128[48,8] = tmpv;
|
||||||
Rtttt_VPR128[56,8] = tmpv;
|
Rttt_VPR128[56,8] = tmpv;
|
||||||
Rtttt_VPR128[64,8] = tmpv;
|
Rttt_VPR128[64,8] = tmpv;
|
||||||
Rtttt_VPR128[72,8] = tmpv;
|
Rttt_VPR128[72,8] = tmpv;
|
||||||
Rtttt_VPR128[80,8] = tmpv;
|
Rttt_VPR128[80,8] = tmpv;
|
||||||
Rtttt_VPR128[88,8] = tmpv;
|
Rttt_VPR128[88,8] = tmpv;
|
||||||
Rtttt_VPR128[96,8] = tmpv;
|
Rttt_VPR128[96,8] = tmpv;
|
||||||
Rtttt_VPR128[104,8] = tmpv;
|
Rttt_VPR128[104,8] = tmpv;
|
||||||
Rtttt_VPR128[112,8] = tmpv;
|
Rttt_VPR128[112,8] = tmpv;
|
||||||
Rtttt_VPR128[120,8] = tmpv;
|
Rttt_VPR128[120,8] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 1;
|
tmp_ldXn = tmp_ldXn + 1;
|
||||||
tmpv = *:1 tmp_ldXn;
|
tmpv = *:1 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,8] = tmpv;
|
Rtttt_VPR128[0,8] = tmpv;
|
||||||
|
@ -5602,34 +5602,34 @@ is b_31=0 & b_30=1 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:2 = 0;
|
local tmpv:2 = 0;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,16] = tmpv;
|
Rt_VPR128[0,16] = tmpv;
|
||||||
Rtttt_VPR128[16,16] = tmpv;
|
Rt_VPR128[16,16] = tmpv;
|
||||||
Rtttt_VPR128[32,16] = tmpv;
|
Rt_VPR128[32,16] = tmpv;
|
||||||
Rtttt_VPR128[48,16] = tmpv;
|
Rt_VPR128[48,16] = tmpv;
|
||||||
Rtttt_VPR128[64,16] = tmpv;
|
Rt_VPR128[64,16] = tmpv;
|
||||||
Rtttt_VPR128[80,16] = tmpv;
|
Rt_VPR128[80,16] = tmpv;
|
||||||
Rtttt_VPR128[96,16] = tmpv;
|
Rt_VPR128[96,16] = tmpv;
|
||||||
Rtttt_VPR128[112,16] = tmpv;
|
Rt_VPR128[112,16] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 2;
|
tmp_ldXn = tmp_ldXn + 2;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,16] = tmpv;
|
Rtt_VPR128[0,16] = tmpv;
|
||||||
Rtttt_VPR128[16,16] = tmpv;
|
Rtt_VPR128[16,16] = tmpv;
|
||||||
Rtttt_VPR128[32,16] = tmpv;
|
Rtt_VPR128[32,16] = tmpv;
|
||||||
Rtttt_VPR128[48,16] = tmpv;
|
Rtt_VPR128[48,16] = tmpv;
|
||||||
Rtttt_VPR128[64,16] = tmpv;
|
Rtt_VPR128[64,16] = tmpv;
|
||||||
Rtttt_VPR128[80,16] = tmpv;
|
Rtt_VPR128[80,16] = tmpv;
|
||||||
Rtttt_VPR128[96,16] = tmpv;
|
Rtt_VPR128[96,16] = tmpv;
|
||||||
Rtttt_VPR128[112,16] = tmpv;
|
Rtt_VPR128[112,16] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 2;
|
tmp_ldXn = tmp_ldXn + 2;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,16] = tmpv;
|
Rttt_VPR128[0,16] = tmpv;
|
||||||
Rtttt_VPR128[16,16] = tmpv;
|
Rttt_VPR128[16,16] = tmpv;
|
||||||
Rtttt_VPR128[32,16] = tmpv;
|
Rttt_VPR128[32,16] = tmpv;
|
||||||
Rtttt_VPR128[48,16] = tmpv;
|
Rttt_VPR128[48,16] = tmpv;
|
||||||
Rtttt_VPR128[64,16] = tmpv;
|
Rttt_VPR128[64,16] = tmpv;
|
||||||
Rtttt_VPR128[80,16] = tmpv;
|
Rttt_VPR128[80,16] = tmpv;
|
||||||
Rtttt_VPR128[96,16] = tmpv;
|
Rttt_VPR128[96,16] = tmpv;
|
||||||
Rtttt_VPR128[112,16] = tmpv;
|
Rttt_VPR128[112,16] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 2;
|
tmp_ldXn = tmp_ldXn + 2;
|
||||||
tmpv = *:2 tmp_ldXn;
|
tmpv = *:2 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,16] = tmpv;
|
Rtttt_VPR128[0,16] = tmpv;
|
||||||
|
@ -5658,22 +5658,22 @@ is b_31=0 & b_30=1 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:4 = 0;
|
local tmpv:4 = 0;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,32] = tmpv;
|
Rt_VPR128[0,32] = tmpv;
|
||||||
Rtttt_VPR128[32,32] = tmpv;
|
Rt_VPR128[32,32] = tmpv;
|
||||||
Rtttt_VPR128[64,32] = tmpv;
|
Rt_VPR128[64,32] = tmpv;
|
||||||
Rtttt_VPR128[96,32] = tmpv;
|
Rt_VPR128[96,32] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 4;
|
tmp_ldXn = tmp_ldXn + 4;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,32] = tmpv;
|
Rtt_VPR128[0,32] = tmpv;
|
||||||
Rtttt_VPR128[32,32] = tmpv;
|
Rtt_VPR128[32,32] = tmpv;
|
||||||
Rtttt_VPR128[64,32] = tmpv;
|
Rtt_VPR128[64,32] = tmpv;
|
||||||
Rtttt_VPR128[96,32] = tmpv;
|
Rtt_VPR128[96,32] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 4;
|
tmp_ldXn = tmp_ldXn + 4;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,32] = tmpv;
|
Rttt_VPR128[0,32] = tmpv;
|
||||||
Rtttt_VPR128[32,32] = tmpv;
|
Rttt_VPR128[32,32] = tmpv;
|
||||||
Rtttt_VPR128[64,32] = tmpv;
|
Rttt_VPR128[64,32] = tmpv;
|
||||||
Rtttt_VPR128[96,32] = tmpv;
|
Rttt_VPR128[96,32] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 4;
|
tmp_ldXn = tmp_ldXn + 4;
|
||||||
tmpv = *:4 tmp_ldXn;
|
tmpv = *:4 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,32] = tmpv;
|
Rtttt_VPR128[0,32] = tmpv;
|
||||||
|
@ -5698,16 +5698,16 @@ is b_31=0 & b_30=1 & b_2429=0b001101 & b_22=1 & b_21=1 & b_1315=0b111 & b_12=0 &
|
||||||
tmp_ldXn = Rn_GPR64xsp;
|
tmp_ldXn = Rn_GPR64xsp;
|
||||||
local tmpv:8 = 0;
|
local tmpv:8 = 0;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,64] = tmpv;
|
Rt_VPR128[0,64] = tmpv;
|
||||||
Rtttt_VPR128[64,64] = tmpv;
|
Rt_VPR128[64,64] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 8;
|
tmp_ldXn = tmp_ldXn + 8;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,64] = tmpv;
|
Rtt_VPR128[0,64] = tmpv;
|
||||||
Rtttt_VPR128[64,64] = tmpv;
|
Rtt_VPR128[64,64] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 8;
|
tmp_ldXn = tmp_ldXn + 8;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,64] = tmpv;
|
Rttt_VPR128[0,64] = tmpv;
|
||||||
Rtttt_VPR128[64,64] = tmpv;
|
Rttt_VPR128[64,64] = tmpv;
|
||||||
tmp_ldXn = tmp_ldXn + 8;
|
tmp_ldXn = tmp_ldXn + 8;
|
||||||
tmpv = *:8 tmp_ldXn;
|
tmpv = *:8 tmp_ldXn;
|
||||||
Rtttt_VPR128[0,64] = tmpv;
|
Rtttt_VPR128[0,64] = tmpv;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue