mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2025-10-05 10:49:34 +02:00
Merge remote-tracking branch 'origin/GP-1518_DecompilerUnions'
This commit is contained in:
commit
31b30adf2d
65 changed files with 5119 additions and 1068 deletions
|
@ -43,46 +43,32 @@ public class DynamicHash {
|
|||
public final static int transtable[] = { 0, PcodeOp.COPY, PcodeOp.LOAD, PcodeOp.STORE,
|
||||
PcodeOp.BRANCH, PcodeOp.CBRANCH, PcodeOp.BRANCHIND,
|
||||
|
||||
PcodeOp.CALL,
|
||||
PcodeOp.CALLIND,
|
||||
PcodeOp.CALLOTHER,
|
||||
PcodeOp.RETURN,
|
||||
PcodeOp.CALL, PcodeOp.CALLIND, PcodeOp.CALLOTHER, PcodeOp.RETURN,
|
||||
|
||||
PcodeOp.INT_EQUAL,
|
||||
PcodeOp.INT_EQUAL, // NOT_EQUAL hashes same as EQUAL
|
||||
PcodeOp.INT_SLESS,
|
||||
PcodeOp.INT_SLESS, // SLESSEQUAL hashes same as SLESS
|
||||
PcodeOp.INT_LESS,
|
||||
PcodeOp.INT_LESS, // LESSEQUAL hashes same as LESS
|
||||
PcodeOp.INT_EQUAL, PcodeOp.INT_EQUAL, // NOT_EQUAL hashes same as EQUAL
|
||||
PcodeOp.INT_SLESS, PcodeOp.INT_SLESS, // SLESSEQUAL hashes same as SLESS
|
||||
PcodeOp.INT_LESS, PcodeOp.INT_LESS, // LESSEQUAL hashes same as LESS
|
||||
|
||||
PcodeOp.INT_ZEXT,
|
||||
PcodeOp.INT_SEXT,
|
||||
PcodeOp.INT_ADD,
|
||||
PcodeOp.INT_ADD, // SUB hashes same as ADD
|
||||
PcodeOp.INT_ZEXT, PcodeOp.INT_SEXT, PcodeOp.INT_ADD, PcodeOp.INT_ADD, // SUB hashes same as ADD
|
||||
PcodeOp.INT_CARRY, PcodeOp.INT_SCARRY, PcodeOp.INT_SBORROW, PcodeOp.INT_2COMP,
|
||||
PcodeOp.INT_NEGATE,
|
||||
|
||||
PcodeOp.INT_XOR, PcodeOp.INT_AND,
|
||||
PcodeOp.INT_OR,
|
||||
PcodeOp.INT_MULT, // LEFT hashes same as MULT
|
||||
PcodeOp.INT_XOR, PcodeOp.INT_AND, PcodeOp.INT_OR, PcodeOp.INT_MULT, // LEFT hashes same as MULT
|
||||
PcodeOp.INT_RIGHT, PcodeOp.INT_SRIGHT, PcodeOp.INT_MULT, PcodeOp.INT_DIV, PcodeOp.INT_SDIV,
|
||||
PcodeOp.INT_REM, PcodeOp.INT_SREM,
|
||||
|
||||
PcodeOp.BOOL_NEGATE, PcodeOp.BOOL_XOR, PcodeOp.BOOL_AND, PcodeOp.BOOL_OR,
|
||||
|
||||
PcodeOp.FLOAT_EQUAL,
|
||||
PcodeOp.FLOAT_EQUAL, // NOTEQUAL hashes same as EQUAL
|
||||
PcodeOp.FLOAT_LESS,
|
||||
PcodeOp.FLOAT_LESS, // LESSEQUAL hashes same as EQUAL
|
||||
PcodeOp.FLOAT_EQUAL, PcodeOp.FLOAT_EQUAL, // NOTEQUAL hashes same as EQUAL
|
||||
PcodeOp.FLOAT_LESS, PcodeOp.FLOAT_LESS, // LESSEQUAL hashes same as EQUAL
|
||||
0, // Unused slot -- skip
|
||||
PcodeOp.FLOAT_NAN,
|
||||
|
||||
PcodeOp.FLOAT_ADD, PcodeOp.FLOAT_DIV, PcodeOp.FLOAT_MULT,
|
||||
PcodeOp.FLOAT_ADD, // SUB hashes same as ADD
|
||||
PcodeOp.FLOAT_ADD, PcodeOp.FLOAT_DIV, PcodeOp.FLOAT_MULT, PcodeOp.FLOAT_ADD, // SUB hashes same as ADD
|
||||
PcodeOp.FLOAT_NEG, PcodeOp.FLOAT_ABS, PcodeOp.FLOAT_SQRT,
|
||||
|
||||
PcodeOp.FLOAT_INT2FLOAT, PcodeOp.FLOAT_FLOAT2FLOAT, PcodeOp.FLOAT_TRUNC,
|
||||
PcodeOp.FLOAT_CEIL, PcodeOp.FLOAT_FLOOR, PcodeOp.FLOAT_ROUND,
|
||||
PcodeOp.FLOAT_INT2FLOAT, PcodeOp.FLOAT_FLOAT2FLOAT, PcodeOp.FLOAT_TRUNC, PcodeOp.FLOAT_CEIL,
|
||||
PcodeOp.FLOAT_FLOOR, PcodeOp.FLOAT_ROUND,
|
||||
|
||||
PcodeOp.MULTIEQUAL, PcodeOp.INDIRECT, PcodeOp.PIECE, PcodeOp.SUBPIECE,
|
||||
|
||||
|
@ -162,10 +148,10 @@ public class DynamicHash {
|
|||
private long hash;
|
||||
|
||||
private DynamicHash() {
|
||||
markop = new ArrayList<PcodeOp>();
|
||||
markvn = new ArrayList<Varnode>();
|
||||
vnedge = new ArrayList<Varnode>();
|
||||
opedge = new ArrayList<ToOpEdge>();
|
||||
markop = new ArrayList<>();
|
||||
markvn = new ArrayList<>();
|
||||
vnedge = new ArrayList<>();
|
||||
opedge = new ArrayList<>();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -181,7 +167,7 @@ public class DynamicHash {
|
|||
|
||||
/**
|
||||
* Construct a unique hash for the given Varnode, which must be in
|
||||
* a syntax tree. The hash method is cycled into a uniquely identifying one is found.
|
||||
* a syntax tree. The hash method is cycled until a uniquely identifying one is found.
|
||||
* @param root is the given Varnode
|
||||
* @param fd is the PcodeSyntaxTree containing the Varnode
|
||||
*/
|
||||
|
@ -190,6 +176,18 @@ public class DynamicHash {
|
|||
uniqueHash(root, fd);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a unique hash that allows recovery of a specific PcodeOp and slot from the
|
||||
* syntax tree. The hash method is cycled until a uniquely identifying one is found.
|
||||
* @param op is the specific PcodeOp to hash
|
||||
* @param slot is the specific slot (-1 is the output, >=0 is an input)
|
||||
* @param fd is the PcodeSyntaxTree containing the PcodeOp
|
||||
*/
|
||||
public DynamicHash(PcodeOp op, int slot, PcodeSyntaxTree fd) {
|
||||
this();
|
||||
uniqueHash(op, slot, fd);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a level 0 hash on the input Varnode to the given PcodeOp
|
||||
*
|
||||
|
@ -223,11 +221,47 @@ public class DynamicHash {
|
|||
opedge.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode a particular PcodeOp and slot
|
||||
* @param op is the PcodeOp to preserve
|
||||
* @param slot is the slot to preserve (-1 for output, >=0 for input)
|
||||
* @param method is the method to use for encoding (4, 5, or 6)
|
||||
*/
|
||||
private void calcHash(PcodeOp op, int slot, int method) {
|
||||
vnproc = 0;
|
||||
opproc = 0;
|
||||
opedgeproc = 0;
|
||||
markset = new HashSet<>();
|
||||
Varnode root = (slot < 0) ? op.getOutput() : op.getInput(slot);
|
||||
opedge.add(new ToOpEdge(op, slot));
|
||||
switch (method) {
|
||||
case 4:
|
||||
break;
|
||||
case 5:
|
||||
gatherUnmarkedOp();
|
||||
for (; opproc < markop.size(); ++opproc) {
|
||||
buildOpUp(markop.get(opproc));
|
||||
}
|
||||
gatherUnmarkedVn();
|
||||
break;
|
||||
case 6:
|
||||
gatherUnmarkedOp();
|
||||
for (; opproc < markop.size(); ++opproc) {
|
||||
buildOpDown(markop.get(opproc));
|
||||
}
|
||||
gatherUnmarkedVn();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
pieceTogetherHash(root, method);
|
||||
}
|
||||
|
||||
private void calcHash(Varnode root, int method) {
|
||||
vnproc = 0;
|
||||
opproc = 0;
|
||||
opedgeproc = 0;
|
||||
markset = new HashSet<Object>();
|
||||
markset = new HashSet<>();
|
||||
|
||||
vnedge.add(root);
|
||||
gatherUnmarkedVn();
|
||||
|
@ -278,6 +312,10 @@ public class DynamicHash {
|
|||
break;
|
||||
}
|
||||
|
||||
pieceTogetherHash(root, method);
|
||||
}
|
||||
|
||||
private void pieceTogetherHash(Varnode root, int method) {
|
||||
if (opedge.size() == 0) {
|
||||
hash = 0;
|
||||
addrresult = null;
|
||||
|
@ -296,8 +334,8 @@ public class DynamicHash {
|
|||
}
|
||||
}
|
||||
|
||||
for (int i = 0; i < opedge.size(); ++i) {
|
||||
reg = opedge.get(i).hash(reg);
|
||||
for (ToOpEdge element : opedge) {
|
||||
reg = element.hash(reg);
|
||||
}
|
||||
|
||||
// Build the final 64-bit hash
|
||||
|
@ -335,10 +373,74 @@ public class DynamicHash {
|
|||
addrresult = op.getSeqnum().getTarget();
|
||||
}
|
||||
|
||||
private void uniqueHash(PcodeOp op, int slot, PcodeSyntaxTree fd) {
|
||||
ArrayList<PcodeOp> oplist = new ArrayList<>();
|
||||
ArrayList<PcodeOp> oplist2 = new ArrayList<>();
|
||||
ArrayList<PcodeOp> champion = new ArrayList<>();
|
||||
int method;
|
||||
long tmphash = 0;
|
||||
Address tmpaddr = null;
|
||||
int maxduplicates = 8;
|
||||
|
||||
gatherOpsAtAddress(oplist, fd, op.getSeqnum().getTarget());
|
||||
for (method = 4; method < 7; ++method) {
|
||||
clear();
|
||||
calcHash(op, slot, method);
|
||||
if (hash == 0) {
|
||||
return; // Can't get a good hash
|
||||
}
|
||||
tmphash = hash;
|
||||
tmpaddr = addrresult;
|
||||
oplist2.clear();
|
||||
for (PcodeOp tmpop : oplist) {
|
||||
if (slot >= tmpop.getNumInputs()) {
|
||||
continue;
|
||||
}
|
||||
clear();
|
||||
calcHash(tmpop, slot, method);
|
||||
if (hash == tmphash) { // Hash collision
|
||||
oplist2.add(tmpop);
|
||||
if (oplist2.size() > maxduplicates) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (oplist2.size() <= maxduplicates) {
|
||||
if ((champion.size() == 0) || (oplist2.size() < champion.size())) {
|
||||
champion = oplist2;
|
||||
oplist2 = new ArrayList<>();
|
||||
if (champion.size() == 1) {
|
||||
break; // Current hash is unique
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (champion.size() == 0) {
|
||||
hash = 0;
|
||||
addrresult = Address.NO_ADDRESS; // Couldn't find a unique hash
|
||||
return;
|
||||
}
|
||||
int total = champion.size() - 1; // total is in range [0,maxduplicates-1]
|
||||
int pos;
|
||||
for (pos = 0; pos <= total; ++pos) {
|
||||
if (champion.get(pos) == op) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (pos > total) {
|
||||
hash = 0;
|
||||
addrresult = Address.NO_ADDRESS;
|
||||
return;
|
||||
}
|
||||
hash = tmphash | ((long) pos << 49); // Store three bits for position with list of duplicate hashes
|
||||
hash |= ((long) total << 52); // Store three bits for total number of duplicate hashes
|
||||
addrresult = tmpaddr;
|
||||
}
|
||||
|
||||
private void uniqueHash(Varnode root, PcodeSyntaxTree fd) {
|
||||
ArrayList<Varnode> vnlist = new ArrayList<Varnode>();
|
||||
ArrayList<Varnode> vnlist2 = new ArrayList<Varnode>();
|
||||
ArrayList<Varnode> champion = new ArrayList<Varnode>();
|
||||
ArrayList<Varnode> vnlist = new ArrayList<>();
|
||||
ArrayList<Varnode> vnlist2 = new ArrayList<>();
|
||||
ArrayList<Varnode> champion = new ArrayList<>();
|
||||
int method;
|
||||
long tmphash = 0;
|
||||
Address tmpaddr = null;
|
||||
|
@ -369,7 +471,7 @@ public class DynamicHash {
|
|||
if (vnlist2.size() <= maxduplicates) {
|
||||
if ((champion.size() == 0) || (vnlist2.size() < champion.size())) {
|
||||
champion = vnlist2;
|
||||
vnlist2 = new ArrayList<Varnode>();
|
||||
vnlist2 = new ArrayList<>();
|
||||
if (champion.size() == 1) {
|
||||
break; // Current hash is unique
|
||||
}
|
||||
|
@ -420,7 +522,7 @@ public class DynamicHash {
|
|||
return; // no descendants
|
||||
}
|
||||
|
||||
ArrayList<ToOpEdge> newedge = new ArrayList<ToOpEdge>();
|
||||
ArrayList<ToOpEdge> newedge = new ArrayList<>();
|
||||
|
||||
while (iter.hasNext()) {
|
||||
PcodeOp op = iter.next();
|
||||
|
@ -491,8 +593,8 @@ public class DynamicHash {
|
|||
int total = getTotalFromHash(h);
|
||||
int pos = getPositionFromHash(h);
|
||||
h = clearTotalPosition(h);
|
||||
ArrayList<Varnode> vnlist = new ArrayList<Varnode>();
|
||||
ArrayList<Varnode> vnlist2 = new ArrayList<Varnode>();
|
||||
ArrayList<Varnode> vnlist = new ArrayList<>();
|
||||
ArrayList<Varnode> vnlist2 = new ArrayList<>();
|
||||
gatherFirstLevelVars(vnlist, fd, addr, h);
|
||||
for (int i = 0; i < vnlist.size(); ++i) {
|
||||
Varnode tmpvn = vnlist.get(i);
|
||||
|
@ -508,6 +610,39 @@ public class DynamicHash {
|
|||
return vnlist2.get(pos);
|
||||
}
|
||||
|
||||
public static PcodeOp findOp(PcodeSyntaxTree fd, Address addr, long h) {
|
||||
DynamicHash dhash = new DynamicHash();
|
||||
int method = getMethodFromHash(h);
|
||||
int slot = getSlotFromHash(h);
|
||||
int total = getTotalFromHash(h);
|
||||
int pos = getPositionFromHash(h);
|
||||
h = clearTotalPosition(h);
|
||||
ArrayList<PcodeOp> oplist = new ArrayList<>();
|
||||
ArrayList<PcodeOp> oplist2 = new ArrayList<>();
|
||||
gatherOpsAtAddress(oplist, fd, addr);
|
||||
for (PcodeOp tmpop : oplist) {
|
||||
if (slot >= tmpop.getNumInputs())
|
||||
continue;
|
||||
dhash.clear();
|
||||
dhash.calcHash(tmpop, slot, method);
|
||||
if (dhash.getHash() == h) {
|
||||
oplist2.add(tmpop);
|
||||
}
|
||||
}
|
||||
if (total != oplist2.size()) {
|
||||
return null;
|
||||
}
|
||||
return oplist2.get(pos);
|
||||
}
|
||||
|
||||
public static void gatherOpsAtAddress(ArrayList<PcodeOp> oplist, PcodeSyntaxTree fd,
|
||||
Address addr) {
|
||||
Iterator<PcodeOpAST> iter = fd.getPcodeOps(addr);
|
||||
while (iter.hasNext()) {
|
||||
oplist.add(iter.next());
|
||||
}
|
||||
}
|
||||
|
||||
public static void gatherFirstLevelVars(ArrayList<Varnode> varlist, PcodeSyntaxTree fd,
|
||||
Address addr, long h) {
|
||||
int opc = getOpCodeFromHash(h);
|
||||
|
@ -615,7 +750,7 @@ public class DynamicHash {
|
|||
* @param value of the constant
|
||||
* @return array of hash values (may be zero length)
|
||||
*/
|
||||
public static long[] calcConstantHash(Instruction instr,long value) {
|
||||
public static long[] calcConstantHash(Instruction instr, long value) {
|
||||
long[] tmp = new long[2];
|
||||
int count = 0;
|
||||
for (PcodeOp op : instr.getPcode(true)) {
|
||||
|
@ -625,12 +760,12 @@ public class DynamicHash {
|
|||
matchWithPossibleExtension(inputs[i].getOffset(), inputs[i].getSize(), value)) {
|
||||
if (count >= tmp.length) {
|
||||
long[] newtmp = new long[count + 10];
|
||||
for(int j=0;j<tmp.length;++j) {
|
||||
for (int j = 0; j < tmp.length; ++j) {
|
||||
newtmp[j] = tmp[j];
|
||||
}
|
||||
tmp = newtmp;
|
||||
}
|
||||
DynamicHash dynamicHash = new DynamicHash(op,i);
|
||||
DynamicHash dynamicHash = new DynamicHash(op, i);
|
||||
tmp[count] = dynamicHash.getHash();
|
||||
if (tmp[count] != 0) {
|
||||
count += 1;
|
||||
|
@ -639,7 +774,7 @@ public class DynamicHash {
|
|||
}
|
||||
}
|
||||
long[] res = new long[count];
|
||||
for(int i=0;i<count;++i) {
|
||||
for (int i = 0; i < count; ++i) {
|
||||
res[i] = tmp[i];
|
||||
}
|
||||
return res;
|
||||
|
|
|
@ -738,4 +738,45 @@ public class HighFunctionDBUtil {
|
|||
}
|
||||
return storageAddress;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a union facet to the database (UnionFacetSymbol). Parameters provide the
|
||||
* pieces for building the dynamic LocalVariable. This method clears out any preexisting
|
||||
* union facet with the same dynamic hash and firstUseOffset.
|
||||
* @param function is the function affected by the union facet
|
||||
* @param dt is the parent data-type, either the union or a pointer to it
|
||||
* @param fieldNum is the ordinal of the desired union field
|
||||
* @param addr is the first use address of the facet
|
||||
* @param hash is the dynamic hash
|
||||
* @param source is the SourceType for the LocalVariable
|
||||
* @throws InvalidInputException if the LocalVariable cannot be created
|
||||
* @throws DuplicateNameException if the (auto-generated) name is used elsewhere
|
||||
*/
|
||||
public static void writeUnionFacet(Function function, DataType dt, int fieldNum, Address addr,
|
||||
long hash, SourceType source) throws InvalidInputException, DuplicateNameException {
|
||||
int firstUseOffset = (int) addr.subtract(function.getEntryPoint());
|
||||
Variable[] localVariables =
|
||||
function.getLocalVariables(VariableFilter.UNIQUE_VARIABLE_FILTER);
|
||||
Variable preexistingVar = null;
|
||||
for (Variable var : localVariables) {
|
||||
if (var.getFirstUseOffset() == firstUseOffset &&
|
||||
var.getFirstStorageVarnode().getOffset() == hash) {
|
||||
preexistingVar = var;
|
||||
break;
|
||||
}
|
||||
}
|
||||
String symbolName = UnionFacetSymbol.buildSymbolName(fieldNum, addr);
|
||||
if (preexistingVar != null) {
|
||||
if (preexistingVar.getName().equals(symbolName)) {
|
||||
return; // No change to make
|
||||
}
|
||||
preexistingVar.setName(symbolName, source); // Change the name
|
||||
return;
|
||||
}
|
||||
Program program = function.getProgram();
|
||||
VariableStorage storage =
|
||||
new VariableStorage(program, AddressSpace.HASH_SPACE.getAddress(hash), dt.getLength());
|
||||
Variable var = new LocalVariableImpl(symbolName, firstUseOffset, dt, storage, program);
|
||||
function.addLocalVariable(var, SourceType.USER_DEFINED);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ public class HighSymbol {
|
|||
protected SymbolEntry[] entryList; // List of mappings for this symbol
|
||||
|
||||
private HighVariable highVariable;
|
||||
private PcodeDataTypeManager dtmanage; // Datatype manager for XML generation
|
||||
protected PcodeDataTypeManager dtmanage; // Datatype manager for XML generation
|
||||
|
||||
/**
|
||||
* Constructor for use with restoreXML
|
||||
|
|
|
@ -453,7 +453,14 @@ public class LocalSymbolMap {
|
|||
if (id == 0) {
|
||||
id = getNextId();
|
||||
}
|
||||
HighSymbol sym = new HighSymbol(id, nm, dt, func);
|
||||
HighSymbol sym;
|
||||
if (DynamicHash.getMethodFromHash(hash) > 3) {
|
||||
int fieldNum = UnionFacetSymbol.extractFieldNumber(nm);
|
||||
sym = new UnionFacetSymbol(id, nm, dt, fieldNum, func);
|
||||
}
|
||||
else {
|
||||
sym = new HighSymbol(id, nm, dt, func);
|
||||
}
|
||||
DynamicEntry entry = new DynamicEntry(sym, pcaddr, hash);
|
||||
sym.addMapEntry(entry);
|
||||
insertSymbol(sym);
|
||||
|
|
|
@ -577,6 +577,9 @@ public class PcodeDataTypeManager {
|
|||
resBuf.append("</field>\n");
|
||||
}
|
||||
}
|
||||
else if (type instanceof Union) {
|
||||
buildUnion(resBuf, (Union) type);
|
||||
}
|
||||
else if (type instanceof Enum) {
|
||||
appendNameIdAttributes(resBuf, type);
|
||||
Enum enumDt = (Enum) type;
|
||||
|
@ -746,18 +749,51 @@ public class PcodeDataTypeManager {
|
|||
*
|
||||
* @return XML string document
|
||||
*/
|
||||
public StringBuilder buildStructTypeZeroSizeOveride(DataType type) {
|
||||
public StringBuilder buildCompositeZeroSizePlaceholder(DataType type) {
|
||||
StringBuilder resBuf = new StringBuilder();
|
||||
if (!(type instanceof Structure)) {
|
||||
String metaString;
|
||||
if (type instanceof Structure) {
|
||||
metaString = "struct";
|
||||
}
|
||||
else if (type instanceof Union) {
|
||||
metaString = "union";
|
||||
}
|
||||
else {
|
||||
return resBuf; //empty. Could throw AssertException.
|
||||
}
|
||||
resBuf.append("<type");
|
||||
SpecXmlUtils.xmlEscapeAttribute(resBuf, "name", type.getDisplayName());
|
||||
resBuf.append(" id=\"0x" + Long.toHexString(progDataTypes.getID(type)) + "\"");
|
||||
resBuf.append(" metatype=\"struct\" size=\"0\"></type>");
|
||||
resBuf.append(" id=\"0x" + Long.toHexString(progDataTypes.getID(type)) + "\" metatype=\"");
|
||||
resBuf.append(metaString);
|
||||
resBuf.append("\" size=\"0\"></type>");
|
||||
return resBuf;
|
||||
}
|
||||
|
||||
public void buildUnion(StringBuilder buffer, Union unionType) {
|
||||
appendNameIdAttributes(buffer, unionType);
|
||||
SpecXmlUtils.encodeStringAttribute(buffer, "metatype", "union");
|
||||
SpecXmlUtils.encodeSignedIntegerAttribute(buffer, "size", unionType.getLength());
|
||||
buffer.append(">\n");
|
||||
DataTypeComponent[] comps = unionType.getDefinedComponents();
|
||||
for (DataTypeComponent comp : comps) {
|
||||
if (comp.getLength() == 0) {
|
||||
continue;
|
||||
}
|
||||
buffer.append("<field");
|
||||
String field_name = comp.getFieldName();
|
||||
if (field_name == null) {
|
||||
field_name = comp.getDefaultFieldName();
|
||||
}
|
||||
SpecXmlUtils.xmlEscapeAttribute(buffer, "name", field_name);
|
||||
SpecXmlUtils.encodeSignedIntegerAttribute(buffer, "offset", comp.getOffset());
|
||||
SpecXmlUtils.encodeSignedIntegerAttribute(buffer, "id", comp.getOrdinal());
|
||||
buffer.append('>');
|
||||
DataType fieldtype = comp.getDataType();
|
||||
buildTypeRef(buffer, fieldtype, comp.getLength());
|
||||
buffer.append("</field>\n");
|
||||
}
|
||||
}
|
||||
|
||||
private void generateCoreTypes() {
|
||||
voidDt = new VoidDataType(progDataTypes);
|
||||
ArrayList<TypeMap> typeList = new ArrayList<>();
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.program.model.pcode;
|
||||
|
||||
import ghidra.program.model.address.Address;
|
||||
import ghidra.program.model.data.DataType;
|
||||
import ghidra.util.xml.SpecXmlUtils;
|
||||
|
||||
/**
|
||||
* A specialized HighSymbol that directs the decompiler to use a specific field of a union,
|
||||
* when interpreting a particular PcodeOp that accesses a Varnode whose data-type involves the
|
||||
* union. The symbol is stored as a dynamic variable annotation. The data-type must either be the
|
||||
* union itself or a pointer to the union. The firstUseOffset and dynamic hash
|
||||
* identify the particular PcodeOp and Varnode affected. The field number is the ordinal
|
||||
* of the desired field (DataTypeComponent) within the union. It is currently stored by
|
||||
* encoding it in the symbol name.
|
||||
*/
|
||||
public class UnionFacetSymbol extends HighSymbol {
|
||||
public static String BASENAME = "unionfacet";
|
||||
private int fieldNumber; // Ordinal of field within union being selected
|
||||
|
||||
public UnionFacetSymbol(long uniqueId, String nm, DataType dt, int fldNum, HighFunction func) {
|
||||
super(uniqueId, nm, dt, func);
|
||||
category = 2;
|
||||
fieldNumber = fldNum;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void saveXML(StringBuilder buf) {
|
||||
buf.append("<facetsymbol");
|
||||
saveXMLHeader(buf);
|
||||
SpecXmlUtils.encodeSignedIntegerAttribute(buf, "field", fieldNumber);
|
||||
buf.append(">\n");
|
||||
dtmanage.buildTypeRef(buf, type, getSize());
|
||||
buf.append("</facetsymbol>\n");
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate an automatic symbol name, given a field number and address
|
||||
* @param fldNum is the field number
|
||||
* @param addr is the Address
|
||||
* @return the name
|
||||
*/
|
||||
public static String buildSymbolName(int fldNum, Address addr) {
|
||||
StringBuilder buffer = new StringBuilder();
|
||||
buffer.append(BASENAME).append(fldNum + 1).append('_');
|
||||
buffer.append(Long.toHexString(addr.getOffset()));
|
||||
return buffer.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* The actual field number is encoded in the symbol name
|
||||
* @param nm is the symbol name
|
||||
* @return the field number or -1 if we cannot parse
|
||||
*/
|
||||
public static int extractFieldNumber(String nm) {
|
||||
int pos = nm.indexOf(BASENAME);
|
||||
if (pos < 0) {
|
||||
return -1;
|
||||
}
|
||||
int endpos = nm.indexOf('_', pos);
|
||||
if (endpos < 0) {
|
||||
return -1;
|
||||
}
|
||||
return Integer.decode(nm.substring(pos + BASENAME.length(), endpos)) - 1;
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue