mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2025-10-05 10:49:34 +02:00
Merge remote-tracking branch 'origin/caheckman_FillOutStructure'
This commit is contained in:
commit
1d73296e07
4 changed files with 537 additions and 117 deletions
|
@ -16,6 +16,7 @@
|
|||
package ghidra.app.plugin.core.decompile.actions;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import ghidra.app.cmd.label.RenameLabelCmd;
|
||||
import ghidra.app.decompiler.*;
|
||||
|
@ -45,6 +46,7 @@ import ghidra.util.task.TaskMonitor;
|
|||
*/
|
||||
public class FillOutStructureCmd extends BackgroundCommand {
|
||||
|
||||
|
||||
/**
|
||||
* Varnode with data-flow traceable to original pointer
|
||||
*/
|
||||
|
@ -61,11 +63,10 @@ public class FillOutStructureCmd extends BackgroundCommand {
|
|||
private static final String DEFAULT_BASENAME = "astruct";
|
||||
private static final String DEFAULT_CATEGORY = "/auto_structs";
|
||||
|
||||
private long maxOffset = 0;
|
||||
private int currentCallDepth = 0; // Current call depth (from root function)
|
||||
private int maxCallDepth = 1;
|
||||
|
||||
private HashMap<Long, DataType> offsetToDataTypeMap = new HashMap<>();
|
||||
private NoisyStructureBuilder componentMap = new NoisyStructureBuilder();
|
||||
private HashMap<Address, Integer> addressToCallInputMap = new HashMap<>();
|
||||
|
||||
private Program currentProgram;
|
||||
|
@ -136,10 +137,15 @@ public class FillOutStructureCmd extends BackgroundCommand {
|
|||
|
||||
boolean isThisParam =
|
||||
CreateStructureVariableAction.testForAutoParameterThis(var, rootFunction);
|
||||
Structure structDT =
|
||||
CreateStructureVariableAction.getStructureForExtending(var.getDataType());
|
||||
if (structDT != null) {
|
||||
componentMap.populateOriginalStructure(structDT);
|
||||
}
|
||||
|
||||
fillOutStructureDef(var);
|
||||
|
||||
Structure structDT = createStructure(var, rootFunction, isThisParam);
|
||||
structDT = createStructure(structDT, var, rootFunction, isThisParam);
|
||||
populateStructure(structDT);
|
||||
|
||||
pushIntoCalls(structDT);
|
||||
|
@ -364,18 +370,17 @@ public class FillOutStructureCmd extends BackgroundCommand {
|
|||
/**
|
||||
* Recover the structure associated with the given pointer variable, or if there is no structure,
|
||||
* create it. Resize the structure to be at least as large as the maxOffset seen so far.
|
||||
* @param structDT is the structure data-type to fill in, or null if a new Structure should be created
|
||||
* @param var is the given pointer variable
|
||||
* @param f is the function
|
||||
* @param isThisParam is true if the variable is a 'this' pointer
|
||||
* @return the Structure object
|
||||
*/
|
||||
private Structure createStructure(HighVariable var, Function f, boolean isThisParam) {
|
||||
|
||||
Structure structDT =
|
||||
CreateStructureVariableAction.getStructureForExtending(var.getDataType());
|
||||
private Structure createStructure(Structure structDT, HighVariable var, Function f,
|
||||
boolean isThisParam) {
|
||||
|
||||
if (structDT == null) {
|
||||
structDT = createNewStruct(var, (int) maxOffset, f, isThisParam);
|
||||
structDT = createNewStruct(var, (int) componentMap.getSize(), f, isThisParam);
|
||||
}
|
||||
else {
|
||||
int len;
|
||||
|
@ -385,8 +390,8 @@ public class FillOutStructureCmd extends BackgroundCommand {
|
|||
else {
|
||||
len = structDT.getLength();
|
||||
}
|
||||
if (maxOffset > len) {
|
||||
structDT.growStructure((int) maxOffset - len);
|
||||
if (componentMap.getSize() > len) {
|
||||
structDT.growStructure((int) componentMap.getSize() - len);
|
||||
}
|
||||
}
|
||||
return structDT;
|
||||
|
@ -398,10 +403,11 @@ public class FillOutStructureCmd extends BackgroundCommand {
|
|||
* @param structDT is the given structure
|
||||
*/
|
||||
private void populateStructure(Structure structDT) {
|
||||
Iterator<Long> iterator = offsetToDataTypeMap.keySet().iterator();
|
||||
Iterator<Entry<Long, DataType>> iterator = componentMap.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
Long key = iterator.next();
|
||||
DataType valDT = offsetToDataTypeMap.get(key);
|
||||
Entry<Long, DataType> entry = iterator.next();
|
||||
Long key = entry.getKey();
|
||||
DataType valDT = entry.getValue();
|
||||
if (key.intValue() < 0) {
|
||||
// println(" BAD OFFSET : " + key.intValue());
|
||||
continue;
|
||||
|
@ -413,7 +419,7 @@ public class FillOutStructureCmd extends BackgroundCommand {
|
|||
}
|
||||
|
||||
try {
|
||||
DataTypeComponent existing = structDT.getDataTypeAt(key.intValue());
|
||||
DataTypeComponent existing = structDT.getComponentAt(key.intValue());
|
||||
// try to preserve existing information.
|
||||
String name = null;
|
||||
String comment = null;
|
||||
|
@ -463,7 +469,8 @@ public class FillOutStructureCmd extends BackgroundCommand {
|
|||
String structName = createUniqueStructName(var, DEFAULT_CATEGORY, DEFAULT_BASENAME);
|
||||
|
||||
StructureDataType dt =
|
||||
new StructureDataType(new CategoryPath(DEFAULT_CATEGORY), structName, size);
|
||||
new StructureDataType(new CategoryPath(DEFAULT_CATEGORY), structName, size,
|
||||
f.getProgram().getDataTypeManager());
|
||||
return dt;
|
||||
}
|
||||
|
||||
|
@ -508,9 +515,41 @@ public class FillOutStructureCmd extends BackgroundCommand {
|
|||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the data-type associated with a Varnode. If the Varnode is produce by a CAST p-code
|
||||
* op, take the most specific data-type between what it was cast from and cast to.
|
||||
* @param vn is the Varnode to get the data-type for
|
||||
* @return the data-type
|
||||
*/
|
||||
public static DataType getDataTypeTraceBackward(Varnode vn) {
|
||||
DataType res = vn.getHigh().getDataType();
|
||||
PcodeOp op = vn.getDef();
|
||||
if (op != null && op.getOpcode() == PcodeOp.CAST) {
|
||||
Varnode otherVn = op.getInput(0);
|
||||
res = MetaDataType.getMostSpecificDataType(res, otherVn.getHigh().getDataType());
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the data-type associated with a Varnode. If the Varnode is input to a CAST p-code
|
||||
* op, take the most specific data-type between what it was cast from and cast to.
|
||||
* @param vn is the Varnode to get the data-type for
|
||||
* @return the data-type
|
||||
*/
|
||||
public static DataType getDataTypeTraceForward(Varnode vn) {
|
||||
DataType res = vn.getHigh().getDataType();
|
||||
PcodeOp op = vn.getLoneDescend();
|
||||
if (op != null && op.getOpcode() == PcodeOp.CAST) {
|
||||
Varnode otherVn = op.getOutput();
|
||||
res = MetaDataType.getMostSpecificDataType(res, otherVn.getHigh().getDataType());
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* Look for Varnode references that are equal to the given variable plus a
|
||||
* constant offset and store them in the offsetToDataTypeMap. The search is performed
|
||||
* constant offset and store them in the componentMap. The search is performed
|
||||
* by following data-flow paths starting at the given variable. If the variable flows
|
||||
* into a CALL instruction, put it in the addressToCallInputMap if offset is 0.
|
||||
* @param var is the given variable
|
||||
|
@ -521,6 +560,13 @@ public class FillOutStructureCmd extends BackgroundCommand {
|
|||
HashSet<Varnode> doneList = new HashSet<>();
|
||||
|
||||
todoList.add(new PointerRef(startVN, 0)); // Base Varnode on the todo list
|
||||
Varnode[] instances = var.getInstances();
|
||||
for (Varnode vn : instances) {
|
||||
doneList.add(vn); // Mark instances as done to avoid recursion issues
|
||||
if (vn != startVN) {
|
||||
todoList.add(new PointerRef(startVN, 0)); // Make sure all instances are on the todo list
|
||||
}
|
||||
}
|
||||
|
||||
// while Todo list not empty
|
||||
while (!todoList.isEmpty()) {
|
||||
|
@ -529,104 +575,102 @@ public class FillOutStructureCmd extends BackgroundCommand {
|
|||
continue;
|
||||
}
|
||||
|
||||
Varnode[] instances = currentRef.varnode.getHigh().getInstances();
|
||||
// println("");
|
||||
for (Varnode iVn : instances) {
|
||||
Iterator<PcodeOp> descendants = iVn.getDescendants();
|
||||
while (descendants.hasNext()) {
|
||||
PcodeOp pcodeOp = descendants.next();
|
||||
Varnode output = pcodeOp.getOutput();
|
||||
Varnode[] inputs = pcodeOp.getInputs();
|
||||
// println("off=" + offset + " " + pcodeOp.getSeqnum().getTarget().toString() + " : "
|
||||
// + pcodeOp.toString());
|
||||
Iterator<PcodeOp> descendants = currentRef.varnode.getDescendants();
|
||||
while (descendants.hasNext()) {
|
||||
PcodeOp pcodeOp = descendants.next();
|
||||
Varnode output = pcodeOp.getOutput();
|
||||
Varnode[] inputs = pcodeOp.getInputs();
|
||||
// println("off=" + offset + " " + pcodeOp.getSeqnum().getTarget().toString() + " : "
|
||||
// + pcodeOp.toString());
|
||||
|
||||
DataType outDt;
|
||||
long newOff;
|
||||
switch (pcodeOp.getOpcode()) {
|
||||
case PcodeOp.INT_SUB:
|
||||
case PcodeOp.INT_ADD:
|
||||
if (!inputs[1].isConstant()) {
|
||||
break;
|
||||
}
|
||||
long value = getSigned(inputs[1]);
|
||||
newOff = currentRef.offset +
|
||||
((pcodeOp.getOpcode() == PcodeOp.INT_ADD) ? value : (-value));
|
||||
if (sanityCheck(newOff)) { // should this offset create a location in the structure?
|
||||
putOnList(output, newOff, todoList, doneList);
|
||||
maxOffset = computeMax(maxOffset, newOff, 0);
|
||||
}
|
||||
DataType outDt;
|
||||
long newOff;
|
||||
switch (pcodeOp.getOpcode()) {
|
||||
case PcodeOp.INT_SUB:
|
||||
case PcodeOp.INT_ADD:
|
||||
if (!inputs[1].isConstant()) {
|
||||
break;
|
||||
case PcodeOp.PTRADD:
|
||||
if (!inputs[1].isConstant() || !inputs[2].isConstant()) {
|
||||
break;
|
||||
}
|
||||
long value = getSigned(inputs[1]);
|
||||
newOff = currentRef.offset +
|
||||
((pcodeOp.getOpcode() == PcodeOp.INT_ADD) ? value : (-value));
|
||||
if (sanityCheck(newOff)) { // should this offset create a location in the structure?
|
||||
putOnList(output, newOff, todoList, doneList);
|
||||
// Don't do componentMap.addDataType() as data-type info here is likely uninformed
|
||||
componentMap.setMinimumSize(newOff);
|
||||
}
|
||||
break;
|
||||
case PcodeOp.PTRADD:
|
||||
if (!inputs[1].isConstant() || !inputs[2].isConstant()) {
|
||||
break;
|
||||
}
|
||||
newOff =
|
||||
currentRef.offset + getSigned(inputs[1]) * inputs[2].getOffset();
|
||||
if (sanityCheck(newOff)) { // should this offset create a location in the structure?
|
||||
putOnList(output, newOff, todoList, doneList);
|
||||
// Don't do componentMap.addReference() as data-type info here is likely uninformed
|
||||
componentMap.setMinimumSize(newOff);
|
||||
}
|
||||
break;
|
||||
case PcodeOp.PTRSUB:
|
||||
if (!inputs[1].isConstant()) {
|
||||
break;
|
||||
}
|
||||
long subOff = currentRef.offset + getSigned(inputs[1]);
|
||||
if (sanityCheck(subOff)) { // should this offset create a location in the structure?
|
||||
putOnList(output, subOff, todoList, doneList);
|
||||
// Don't do componentMap.addReference() as data-type info here is likely uninformed
|
||||
componentMap.setMinimumSize(subOff);
|
||||
}
|
||||
break;
|
||||
case PcodeOp.SEGMENTOP:
|
||||
// treat segment op as if it were a cast to complete the value
|
||||
// The segment adds in some unknown base value.
|
||||
// get output and add to the Varnode Todo list
|
||||
putOnList(output, currentRef.offset, todoList, doneList);
|
||||
componentMap.setMinimumSize(currentRef.offset);
|
||||
break;
|
||||
case PcodeOp.LOAD:
|
||||
outDt = getDataTypeTraceForward(output);
|
||||
componentMap.addDataType(currentRef.offset, outDt);
|
||||
break;
|
||||
case PcodeOp.STORE:
|
||||
// create a location in the struct
|
||||
//use the type of the varnode being put in to the structure
|
||||
if (pcodeOp.getSlot(currentRef.varnode) != 1) {
|
||||
break; // store must be into the target structure
|
||||
}
|
||||
outDt = getDataTypeTraceBackward(inputs[2]);
|
||||
componentMap.addDataType(currentRef.offset, outDt);
|
||||
break;
|
||||
case PcodeOp.CAST:
|
||||
putOnList(output, currentRef.offset, todoList, doneList);
|
||||
break;
|
||||
case PcodeOp.MULTIEQUAL:
|
||||
putOnList(output, currentRef.offset, todoList, doneList);
|
||||
break;
|
||||
case PcodeOp.COPY:
|
||||
putOnList(output, currentRef.offset, todoList, doneList);
|
||||
break;
|
||||
case PcodeOp.CALL:
|
||||
if (currentRef.offset == 0) { // If pointer is passed directly (no offset)
|
||||
// find it as an input
|
||||
int slot = pcodeOp.getSlot(currentRef.varnode);
|
||||
if (slot > 0 && slot < pcodeOp.getNumInputs()) {
|
||||
putOnCallParamList(inputs[0].getAddress(), slot - 1);
|
||||
}
|
||||
newOff =
|
||||
currentRef.offset + getSigned(inputs[1]) * inputs[2].getOffset();
|
||||
if (sanityCheck(newOff)) { // should this offset create a location in the structure?
|
||||
putOnList(output, newOff, todoList, doneList);
|
||||
maxOffset = computeMax(maxOffset, newOff, 0);
|
||||
}
|
||||
break;
|
||||
case PcodeOp.PTRSUB:
|
||||
if (!inputs[1].isConstant()) {
|
||||
break;
|
||||
}
|
||||
long subOff = currentRef.offset + getSigned(inputs[1]);
|
||||
if (sanityCheck(subOff)) { // should this offset create a location in the structure?
|
||||
putOnList(output, subOff, todoList, doneList);
|
||||
maxOffset = computeMax(maxOffset, subOff, 0);
|
||||
}
|
||||
break;
|
||||
case PcodeOp.SEGMENTOP:
|
||||
// treat segment op as if it were a cast to complete the value
|
||||
// The segment adds in some unknown base value.
|
||||
// get output and add to the Varnode Todo list
|
||||
putOnList(output, currentRef.offset, todoList, doneList);
|
||||
break;
|
||||
|
||||
case PcodeOp.LOAD:
|
||||
outDt = output.getHigh().getDataType();
|
||||
if (outDt != null) {
|
||||
offsetToDataTypeMap.put(Long.valueOf(currentRef.offset), outDt);
|
||||
}
|
||||
maxOffset = computeMax(maxOffset, currentRef.offset, output.getSize());
|
||||
break;
|
||||
case PcodeOp.STORE:
|
||||
// create a location in the struct
|
||||
//use the type of the varnode being put in to the structure
|
||||
if (pcodeOp.getSlot(iVn) != 1) {
|
||||
break; // store must be into the target structure
|
||||
}
|
||||
outDt = inputs[2].getHigh().getDataType();
|
||||
int outLen = 1; // Storing at least one byte
|
||||
if (outDt != null) {
|
||||
offsetToDataTypeMap.put(Long.valueOf(currentRef.offset), outDt);
|
||||
outLen = outDt.getLength();
|
||||
}
|
||||
|
||||
maxOffset = computeMax(maxOffset, currentRef.offset, outLen);
|
||||
break;
|
||||
case PcodeOp.CAST:
|
||||
putOnList(output, currentRef.offset, todoList, doneList);
|
||||
break;
|
||||
case PcodeOp.MULTIEQUAL:
|
||||
putOnList(output, currentRef.offset, todoList, doneList);
|
||||
break;
|
||||
case PcodeOp.COPY:
|
||||
putOnList(output, currentRef.offset, todoList, doneList);
|
||||
break;
|
||||
case PcodeOp.CALL:
|
||||
if (currentRef.offset == 0) { // If pointer is passed directly (no offset)
|
||||
// find it as an input
|
||||
int slot = pcodeOp.getSlot(iVn);
|
||||
if (slot > 0 && slot < pcodeOp.getNumInputs()) {
|
||||
putOnCallParamList(pcodeOp.getInput(0).getAddress(), slot - 1);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
else {
|
||||
outDt = getDataTypeTraceBackward(currentRef.varnode);
|
||||
componentMap.addReference(currentRef.offset, outDt);
|
||||
}
|
||||
break;
|
||||
case PcodeOp.CALLIND:
|
||||
outDt = getDataTypeTraceBackward(currentRef.varnode);
|
||||
componentMap.addReference(currentRef.offset, outDt);
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -641,13 +685,6 @@ public class FillOutStructureCmd extends BackgroundCommand {
|
|||
addressToCallInputMap.put(address, j);
|
||||
}
|
||||
|
||||
private long computeMax(long max, long newOff, int length) {
|
||||
if (max < (newOff + length)) {
|
||||
max = newOff + length;
|
||||
}
|
||||
return max;
|
||||
}
|
||||
|
||||
private long getSigned(Varnode varnode) {
|
||||
long mask = 0x80L << ((varnode.getSize() - 1) * 8);
|
||||
long value = varnode.getOffset();
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.program.model.data;
|
||||
|
||||
public enum MetaDataType {
|
||||
// Enumerations are ordered in terms of how "specific" the data-type class is
|
||||
VOID, // "void" data-type
|
||||
UNKNOWN, // An unknown/undefined data-type
|
||||
INT, // Signed integer
|
||||
UINT, // Unsigned integer
|
||||
BOOL, // Boolean
|
||||
CODE, // Executable code
|
||||
FLOAT, // Floating-point
|
||||
PTR, // Pointer
|
||||
ARRAY, // Array
|
||||
STRUCT; // Structured data-type
|
||||
|
||||
public static MetaDataType getMeta(DataType dt) {
|
||||
if (dt instanceof TypeDef) {
|
||||
dt = ((TypeDef) dt).getBaseDataType();
|
||||
}
|
||||
if (dt instanceof DefaultDataType || dt instanceof Undefined) {
|
||||
return UNKNOWN;
|
||||
}
|
||||
if (dt instanceof AbstractIntegerDataType) {
|
||||
if (dt instanceof BooleanDataType) {
|
||||
return BOOL;
|
||||
}
|
||||
if (((AbstractIntegerDataType) dt).isSigned()) {
|
||||
return INT;
|
||||
}
|
||||
return UINT;
|
||||
}
|
||||
if (dt instanceof Pointer) {
|
||||
return PTR;
|
||||
}
|
||||
if (dt instanceof Array) {
|
||||
return ARRAY;
|
||||
}
|
||||
if (dt instanceof Structure) {
|
||||
return STRUCT;
|
||||
}
|
||||
if (dt instanceof AbstractFloatDataType) {
|
||||
return FLOAT;
|
||||
}
|
||||
if (dt instanceof ArrayStringable) {
|
||||
return INT;
|
||||
}
|
||||
if (dt instanceof FunctionDefinition) {
|
||||
return CODE;
|
||||
}
|
||||
if (dt instanceof Enum) {
|
||||
return UINT;
|
||||
}
|
||||
if (dt instanceof AbstractStringDataType) {
|
||||
return ARRAY;
|
||||
}
|
||||
return STRUCT;
|
||||
}
|
||||
|
||||
public static DataType getMostSpecificDataType(DataType a, DataType b) {
|
||||
if (a == null) {
|
||||
return b;
|
||||
}
|
||||
if (b == null) {
|
||||
return a;
|
||||
}
|
||||
DataType aCopy = a;
|
||||
DataType bCopy = b;
|
||||
for (;;) {
|
||||
MetaDataType aMeta = MetaDataType.getMeta(a);
|
||||
MetaDataType bMeta = MetaDataType.getMeta(b);
|
||||
int compare = aMeta.compareTo(bMeta);
|
||||
if (compare < 0) {
|
||||
return bCopy;
|
||||
}
|
||||
else if (compare > 0) {
|
||||
return aCopy;
|
||||
}
|
||||
if (aMeta == MetaDataType.PTR) {
|
||||
a = ((Pointer) a).getDataType();
|
||||
b = ((Pointer) b).getDataType();
|
||||
}
|
||||
else if (aMeta == MetaDataType.ARRAY) {
|
||||
if (!(a instanceof Array) || !(b instanceof Array)) {
|
||||
break;
|
||||
}
|
||||
a = ((Array) a).getDataType();
|
||||
b = ((Array) b).getDataType();
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return aCopy;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,172 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.program.model.data;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.TreeMap;
|
||||
|
||||
/**
|
||||
* Build a structure from a "noisy" source of field information.
|
||||
* Feed it field records, either via addDataType(), when we
|
||||
* have more definitive info about the size of the field, or via addReference()
|
||||
* when we have a pointer reference to the field with possibly less info about the field size.
|
||||
*
|
||||
* As records come in, overlaps and conflicts in specific field data-types are resolved.
|
||||
* In a conflict, less specific data-types are replaced.
|
||||
* After all information is collected a final Structure can be built by iterating over
|
||||
* the final field entries.
|
||||
*/
|
||||
public class NoisyStructureBuilder {
|
||||
private TreeMap<Long, DataType> offsetToDataTypeMap = new TreeMap<Long, DataType>();
|
||||
private Structure structDT = null;
|
||||
private long sizeOfStruct = 0;
|
||||
|
||||
private void computeMax(long newOff, int length) {
|
||||
if (sizeOfStruct < (newOff + length)) {
|
||||
sizeOfStruct = newOff + length;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the given range overlaps any existing field entries. If it does
|
||||
* return the first entry, otherwise return null.
|
||||
* @param offset is the starting of the range (in bytes)
|
||||
* @param size is the number of bytes in the range
|
||||
* @return the first overlapping entry or null
|
||||
*/
|
||||
private Entry<Long, DataType> checkForOverlap(long offset, int size) {
|
||||
Entry<Long, DataType> res = offsetToDataTypeMap.floorEntry(offset);
|
||||
if (res != null) {
|
||||
long last = res.getKey().longValue() + res.getValue().getLength();
|
||||
if (offset < last) {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
res = offsetToDataTypeMap.higherEntry(offset);
|
||||
if (res != null) {
|
||||
long last = offset + size;
|
||||
if (res.getKey() < last) {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the size of the structure in bytes (given current information)
|
||||
*/
|
||||
public long getSize() {
|
||||
return sizeOfStruct;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add data-type information about a specific field
|
||||
* @param offset of the field within the structure
|
||||
* @param dt is the data-type of field if known (null otherwise)
|
||||
*/
|
||||
public void addDataType(long offset, DataType dt) {
|
||||
if (dt == null) {
|
||||
computeMax(offset, 1);
|
||||
return;
|
||||
}
|
||||
computeMax(offset, dt.getLength());
|
||||
Entry<Long, DataType> firstEntry = checkForOverlap(offset, dt.getLength());
|
||||
if (firstEntry != null) {
|
||||
if (firstEntry.getKey().longValue() == offset &&
|
||||
firstEntry.getValue().getLength() == dt.getLength()) {
|
||||
// Matching field, compare the data-types
|
||||
if (dt != MetaDataType.getMostSpecificDataType(firstEntry.getValue(), dt)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
else if (firstEntry.getKey().longValue() <= offset &&
|
||||
offset + dt.getLength() < firstEntry.getKey().longValue() +
|
||||
firstEntry.getValue().getLength()) {
|
||||
// Completely contained within preexisting entry
|
||||
if (!(firstEntry.getValue() instanceof Undefined)) {
|
||||
// Don't override preexisting entry with a smaller one
|
||||
return;
|
||||
}
|
||||
// unless the preexising entry is undefined
|
||||
}
|
||||
else if (dt instanceof Undefined) {
|
||||
// The new field either fully or partially contains preexisting fields
|
||||
return;
|
||||
}
|
||||
offsetToDataTypeMap.subMap(firstEntry.getKey(), offset + dt.getLength()).clear(); // Clear overlapping entries
|
||||
}
|
||||
offsetToDataTypeMap.put(Long.valueOf(offset), dt);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds information for a field given a pointer reference.
|
||||
* The data-type information is not used unless it is a pointer.
|
||||
* @param offset is the offset of the field within the structure
|
||||
* @param dt is the data-type of the pointer to the field (or null)
|
||||
*/
|
||||
public void addReference(long offset, DataType dt) {
|
||||
if (dt != null && dt instanceof Pointer) {
|
||||
dt = ((Pointer) dt).getDataType();
|
||||
if (dt.equals(structDT)) {
|
||||
return; // Don't allow structure to contain itself
|
||||
}
|
||||
if (dt instanceof Structure) {
|
||||
if (((Structure) dt).getNumDefinedComponents() == 0) {
|
||||
computeMax(offset, 1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
addDataType(offset, dt);
|
||||
}
|
||||
else {
|
||||
computeMax(offset, 1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* We may have partial information about the size of the structure. This method feeds it to the
|
||||
* builder as a minimum size for the structure.
|
||||
* @param size is the minimum size in bytes
|
||||
*/
|
||||
public void setMinimumSize(long size) {
|
||||
if (size > sizeOfStruct) {
|
||||
sizeOfStruct = size;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return an iterator to the current field entries
|
||||
*/
|
||||
public Iterator<Entry<Long, DataType>> iterator() {
|
||||
return offsetToDataTypeMap.entrySet().iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Populate this builder with fields from a preexisting Structure.
|
||||
* The builder presumes it is rebuilding this Structure so it can check for
|
||||
* pathological containment issues.
|
||||
* @param dt is the preexisting Structure
|
||||
*/
|
||||
public void populateOriginalStructure(Structure dt) {
|
||||
structDT = dt;
|
||||
DataTypeComponent[] components = structDT.getDefinedComponents();
|
||||
for (DataTypeComponent component : components) {
|
||||
offsetToDataTypeMap.put((long) component.getOffset(), component.getDataType());
|
||||
}
|
||||
sizeOfStruct = structDT.getLength();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,101 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.program.model.data;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import generic.test.AbstractGTest;
|
||||
|
||||
public class NoisyStructureBuilderTest extends AbstractGTest {
|
||||
|
||||
public void testNextField(Iterator<Entry<Long, DataType>> iter, long offset, DataType dt) {
|
||||
Assert.assertTrue(iter.hasNext());
|
||||
Entry<Long, DataType> entry = iter.next();
|
||||
Assert.assertEquals(offset, entry.getKey().longValue());
|
||||
Assert.assertTrue(entry.getValue() == dt);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBasicFields() {
|
||||
NoisyStructureBuilder builder = new NoisyStructureBuilder();
|
||||
builder.addDataType(12, Undefined4DataType.dataType);
|
||||
Pointer ptr = new Pointer32DataType(DWordDataType.dataType);
|
||||
builder.addReference(4, ptr);
|
||||
builder.addDataType(18, ShortDataType.dataType);
|
||||
builder.addReference(21, null);
|
||||
|
||||
Iterator<Entry<Long, DataType>> iter = builder.iterator();
|
||||
testNextField(iter, 4, DWordDataType.dataType);
|
||||
testNextField(iter, 12, Undefined4DataType.dataType);
|
||||
testNextField(iter, 18, ShortDataType.dataType);
|
||||
Assert.assertFalse(iter.hasNext());
|
||||
Assert.assertEquals(builder.getSize(), 22);
|
||||
|
||||
builder.addDataType(12, DWordDataType.dataType);
|
||||
builder.addDataType(4, Undefined4DataType.dataType);
|
||||
|
||||
iter = builder.iterator();
|
||||
testNextField(iter, 4, DWordDataType.dataType);
|
||||
testNextField(iter, 12, DWordDataType.dataType);
|
||||
testNextField(iter, 18, ShortDataType.dataType);
|
||||
Assert.assertFalse(iter.hasNext());
|
||||
Assert.assertEquals(builder.getSize(), 22);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOverlaps() {
|
||||
NoisyStructureBuilder builder = new NoisyStructureBuilder();
|
||||
builder.addDataType(0, DWordDataType.dataType);
|
||||
builder.addDataType(4, ShortDataType.dataType);
|
||||
builder.addDataType(0, Undefined8DataType.dataType);
|
||||
Assert.assertEquals(builder.getSize(), 8);
|
||||
Iterator<Entry<Long, DataType>> iter = builder.iterator();
|
||||
testNextField(iter, 0, DWordDataType.dataType);
|
||||
testNextField(iter, 4, ShortDataType.dataType);
|
||||
Assert.assertFalse(iter.hasNext());
|
||||
Assert.assertEquals(builder.getSize(), 8); // Undefined8 should expand size even though field isn't taken
|
||||
|
||||
builder.addDataType(0, QWordDataType.dataType); // Should replace everything
|
||||
iter = builder.iterator();
|
||||
testNextField(iter, 0, QWordDataType.dataType);
|
||||
Assert.assertFalse(iter.hasNext());
|
||||
Pointer ptr = new Pointer32DataType(DWordDataType.dataType);
|
||||
builder.addDataType(6, ptr); // Partial overlap, should replace existing
|
||||
iter = builder.iterator();
|
||||
testNextField(iter, 6, ptr);
|
||||
Assert.assertFalse(iter.hasNext());
|
||||
Assert.assertEquals(builder.getSize(), 10);
|
||||
|
||||
builder.addDataType(4, DWordDataType.dataType); // Partial overlap, should replace
|
||||
iter = builder.iterator();
|
||||
testNextField(iter, 4, DWordDataType.dataType);
|
||||
Assert.assertFalse(iter.hasNext());
|
||||
|
||||
builder = new NoisyStructureBuilder();
|
||||
builder.addDataType(4, Undefined8DataType.dataType);
|
||||
builder.addDataType(4, Undefined4DataType.dataType);
|
||||
builder.addDataType(8, DWordDataType.dataType);
|
||||
builder.addDataType(8, SignedDWordDataType.dataType); // Less specific data-type
|
||||
iter = builder.iterator();
|
||||
testNextField(iter, 4, Undefined4DataType.dataType);
|
||||
testNextField(iter, 8, DWordDataType.dataType);
|
||||
Assert.assertFalse(iter.hasNext());
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue