mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2025-10-03 17:59:46 +02:00
Adjustments to data-type propagation through INT_ADD
This commit is contained in:
parent
7f2ef251e1
commit
58558981d5
11 changed files with 257 additions and 125 deletions
|
@ -231,7 +231,7 @@ Datatype *CastStrategyC::castStandard(Datatype *reqtype,Datatype *curtype,
|
|||
care_uint_int = true;
|
||||
isptr = true;
|
||||
}
|
||||
if (curtype == reqtype) return (Datatype *)0; // Different typedefs could point to the same type
|
||||
if (curbase == reqbase) return (Datatype *)0; // Different typedefs could point to the same type
|
||||
if ((reqbase->getMetatype()==TYPE_VOID)||(curtype->getMetatype()==TYPE_VOID))
|
||||
return (Datatype *)0; // Don't cast from or to VOID
|
||||
if (reqbase->getSize() != curbase->getSize()) {
|
||||
|
|
|
@ -76,7 +76,7 @@ void StackSolver::propagate(int4 varnum,int4 val)
|
|||
while(!workstack.empty()) {
|
||||
varnum = workstack.back();
|
||||
workstack.pop_back();
|
||||
|
||||
|
||||
eqn.var1 = varnum;
|
||||
top = lower_bound(eqs.begin(),eqs.end(),eqn,StackEqn::compare);
|
||||
while((top!=eqs.end())&&((*top).var1 == varnum)) {
|
||||
|
@ -212,7 +212,7 @@ void StackSolver::build(const Funcdata &data,AddrSpace *id,int4 spcbase)
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
eqn.rhs = 4; // Otherwise make a guess
|
||||
guess.push_back(eqn);
|
||||
}
|
||||
|
@ -274,7 +274,7 @@ void ActionStackPtrFlow::analyzeExtraPop(Funcdata &data,AddrSpace *stackspace,in
|
|||
}
|
||||
if (solver.getNumVariables() == 0) return;
|
||||
solver.solve(); // Solve the equations
|
||||
|
||||
|
||||
Varnode *invn = solver.getVariable(0);
|
||||
bool warningprinted = false;
|
||||
|
||||
|
@ -622,7 +622,7 @@ int4 ActionSegmentize::apply(Funcdata &data)
|
|||
vector<Varnode *> bindlist;
|
||||
bindlist.push_back((Varnode *)0);
|
||||
bindlist.push_back((Varnode *)0);
|
||||
|
||||
|
||||
for(int4 i=0;i<numops;++i) {
|
||||
SegmentOp *segdef = data.getArch()->userops.getSegmentOp(i);
|
||||
if (segdef == (SegmentOp *)0) continue;
|
||||
|
@ -703,7 +703,7 @@ int4 ActionConstbase::apply(Funcdata &data)
|
|||
// PcodeOp *op;
|
||||
// list<PcodeOp *>::const_iterator iter;
|
||||
// uintm hash;
|
||||
|
||||
|
||||
// for(iter=data.op_alive_begin();iter!=data.op_alive_end();++iter) {
|
||||
// op = *iter;
|
||||
// hash = op->getCseHash();
|
||||
|
@ -1384,7 +1384,7 @@ int4 ActionExtraPopSetup::apply(Funcdata &data)
|
|||
const VarnodeData &point(stackspace->getSpacebase(0));
|
||||
Address sb_addr(point.space,point.offset);
|
||||
int4 sb_size = point.size;
|
||||
|
||||
|
||||
for(int4 i=0;i<data.numCalls();++i) {
|
||||
fc = data.getCallSpecs(i);
|
||||
if (fc->getExtraPop() == 0) continue; // Stack pointer is undisturbed
|
||||
|
@ -1552,7 +1552,7 @@ int4 ActionParamDouble::apply(Funcdata &data)
|
|||
data.opSetInput(op,mostvn,slot+1);
|
||||
}
|
||||
count += 1; // Indicate that a change was made
|
||||
|
||||
|
||||
j -= 1; // Note we decrement j here, so that we can check nested CONCATs
|
||||
}
|
||||
}
|
||||
|
@ -1573,7 +1573,7 @@ int4 ActionParamDouble::apply(Funcdata &data)
|
|||
else if (whole.inHandLo(vn1)) {
|
||||
if (whole.getHi() != vn2) continue;
|
||||
isslothi = false;
|
||||
}
|
||||
}
|
||||
else
|
||||
continue;
|
||||
if (fc->checkInputJoin(j,isslothi,vn1,vn2)) {
|
||||
|
@ -1838,7 +1838,7 @@ int4 ActionReturnRecovery::apply(Funcdata &data)
|
|||
Varnode *vn;
|
||||
list<PcodeOp *>::const_iterator iter,iterend;
|
||||
int4 i;
|
||||
|
||||
|
||||
int4 maxancestor = data.getArch()->trim_recurse_max;
|
||||
iterend = data.endOp(CPUI_RETURN);
|
||||
AncestorRealistic ancestorReal;
|
||||
|
@ -1861,7 +1861,7 @@ int4 ActionReturnRecovery::apply(Funcdata &data)
|
|||
active->finishPass();
|
||||
if (active->getNumPasses() > active->getMaxPass())
|
||||
active->markFullyChecked();
|
||||
|
||||
|
||||
if (active->isFullyChecked()) {
|
||||
data.getFuncProto().deriveOutputMap(active);
|
||||
iterend = data.endOp(CPUI_RETURN);
|
||||
|
@ -1887,7 +1887,7 @@ int4 ActionRestrictLocal::apply(Funcdata &data)
|
|||
Varnode *vn;
|
||||
int4 i;
|
||||
vector<EffectRecord>::const_iterator eiter,endeiter;
|
||||
|
||||
|
||||
for(i=0;i<data.numCalls();++i) {
|
||||
fc = data.getCallSpecs(i);
|
||||
op = fc->getOp();
|
||||
|
@ -2126,7 +2126,7 @@ int4 ActionRestructureHigh::apply(Funcdata &data)
|
|||
l1->restructureHigh();
|
||||
if (data.syncVarnodesWithSymbols(l1,true))
|
||||
count += 1;
|
||||
|
||||
|
||||
#ifdef OPACTION_DEBUG
|
||||
if ((flags&rule_debug)==0) return 0;
|
||||
l1->turnOffDebug();
|
||||
|
@ -2151,7 +2151,7 @@ int4 ActionDefaultParams::apply(Funcdata &data)
|
|||
fc = data.getCallSpecs(i);
|
||||
if (!fc->hasModel()) {
|
||||
Funcdata *otherfunc = fc->getFuncdata();
|
||||
|
||||
|
||||
if (otherfunc != (Funcdata *)0) {
|
||||
fc->copy(otherfunc->getFuncProto());
|
||||
if ((!fc->isModelLocked())&&(!fc->hasMatchingModel(evalfp)))
|
||||
|
@ -2165,6 +2165,36 @@ int4 ActionDefaultParams::apply(Funcdata &data)
|
|||
return 0; // Indicate success
|
||||
}
|
||||
|
||||
/// \brief Test if the given cast conflict can be resolved by passing to the first structure field
|
||||
///
|
||||
/// Test if the given Varnode data-type is a pointer to a structure and if interpreting
|
||||
/// the data-type as a pointer to the structure's first field will get it to match the
|
||||
/// desired data-type.
|
||||
/// \param vn is the given Varnode
|
||||
/// \param ct is the desired data-type
|
||||
/// \param castStrategy is used to determine if the data-types are compatible
|
||||
/// \return \b true if a pointer to the first field makes sense
|
||||
bool ActionSetCasts::testStructOffset0(Varnode *vn,Datatype *ct,CastStrategy *castStrategy)
|
||||
|
||||
{
|
||||
if (ct->getMetatype() != TYPE_PTR) return false;
|
||||
Datatype *highType = vn->getHigh()->getType();
|
||||
if (highType->getMetatype() != TYPE_PTR) return false;
|
||||
Datatype *highPtrTo = ((TypePointer *)highType)->getPtrTo();
|
||||
if (highPtrTo->getMetatype() != TYPE_STRUCT) return false;
|
||||
TypeStruct *highStruct = (TypeStruct *)highPtrTo;
|
||||
if (highStruct->numDepend() == 0) return false;
|
||||
vector<TypeField>::const_iterator iter = highStruct->beginField();
|
||||
if ((*iter).offset != 0) return false;
|
||||
Datatype *reqtype = ((TypePointer *)ct)->getPtrTo();
|
||||
Datatype *curtype = (*iter).type;
|
||||
if (reqtype->getMetatype() == TYPE_ARRAY)
|
||||
reqtype = ((TypeArray *)reqtype)->getBase();
|
||||
if (curtype->getMetatype() == TYPE_ARRAY)
|
||||
curtype = ((TypeArray *)curtype)->getBase();
|
||||
return (castStrategy->castStandard(reqtype, curtype, true, true) == (Datatype *)0);
|
||||
}
|
||||
|
||||
/// \brief Insert cast to output Varnode type after given PcodeOp if it is necessary
|
||||
///
|
||||
/// \param op is the given PcodeOp
|
||||
|
@ -2229,7 +2259,7 @@ int4 ActionSetCasts::castInput(PcodeOp *op,int4 slot,Funcdata &data,CastStrategy
|
|||
|
||||
{
|
||||
Datatype *ct;
|
||||
Varnode *vn;
|
||||
Varnode *vn,*vnout;
|
||||
PcodeOp *newop;
|
||||
|
||||
ct = op->getOpcode()->getInputCast(op,slot,castStrategy); // Input type expected by this operation
|
||||
|
@ -2253,16 +2283,29 @@ int4 ActionSetCasts::castInput(PcodeOp *op,int4 slot,Funcdata &data,CastStrategy
|
|||
if (vn->getType() == ct)
|
||||
return 1;
|
||||
}
|
||||
else if (testStructOffset0(vn, ct, castStrategy)) {
|
||||
// Insert a PTRSUB(vn,#0) instead of a CAST
|
||||
newop = data.newOp(2,op->getAddr());
|
||||
vnout = data.newUniqueOut(vn->getSize(), newop);
|
||||
vnout->updateType(ct,false,false);
|
||||
vnout->setImplied();
|
||||
data.opSetOpcode(newop, CPUI_PTRSUB);
|
||||
data.opSetInput(newop,vn,0);
|
||||
data.opSetInput(newop,data.newConstant(4, 0),1);
|
||||
data.opSetInput(op,vnout,slot);
|
||||
data.opInsertBefore(newop,op);
|
||||
return 1;
|
||||
}
|
||||
newop = data.newOp(1,op->getAddr());
|
||||
vn = data.newUniqueOut(op->getIn(slot)->getSize(),newop);
|
||||
vn->updateType(ct,false,false);
|
||||
vn->setImplied();
|
||||
vnout = data.newUniqueOut(vn->getSize(),newop);
|
||||
vnout->updateType(ct,false,false);
|
||||
vnout->setImplied();
|
||||
#ifdef CPUI_STATISTICS
|
||||
data.getArch()->stats->countCast();
|
||||
#endif
|
||||
data.opSetOpcode(newop,CPUI_CAST);
|
||||
data.opSetInput(newop,op->getIn(slot),0);
|
||||
data.opSetInput(op,vn,slot);
|
||||
data.opSetInput(newop,vn,0);
|
||||
data.opSetInput(op,vnout,slot);
|
||||
data.opInsertBefore(newop,op); // Cast comes AFTER operation
|
||||
return 1;
|
||||
}
|
||||
|
@ -2614,7 +2657,7 @@ int4 ActionMarkExplicit::baseExplicit(Varnode *vn,int4 maxref)
|
|||
desccount += 1;
|
||||
if (desccount > maxref) return -1; // Must not exceed max descendants
|
||||
}
|
||||
|
||||
|
||||
return desccount;
|
||||
}
|
||||
|
||||
|
@ -3010,7 +3053,7 @@ int4 ActionDoNothing::apply(Funcdata &data)
|
|||
int4 i;
|
||||
const BlockGraph &graph(data.getBasicBlocks());
|
||||
BlockBasic *bb;
|
||||
|
||||
|
||||
for(i=0;i<graph.getSize();++i) {
|
||||
bb = (BlockBasic *) graph.getBlock(i);
|
||||
if (bb->isDoNothing()) {
|
||||
|
@ -3825,7 +3868,7 @@ int4 ActionPrototypeTypes::apply(Funcdata &data)
|
|||
data.opSetInput(op,vn,0);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (data.getFuncProto().isOutputLocked()) {
|
||||
ProtoParameter *outparam = data.getFuncProto().getOutput();
|
||||
if (outparam->getType()->getMetatype() != TYPE_VOID) {
|
||||
|
@ -3877,7 +3920,7 @@ int4 ActionPrototypeTypes::apply(Funcdata &data)
|
|||
BlockBasic *topbl = (BlockBasic *)0;
|
||||
if (data.getBasicBlocks().getSize() > 0)
|
||||
topbl = (BlockBasic *)data.getBasicBlocks().getBlock(0);
|
||||
|
||||
|
||||
int4 numparams = data.getFuncProto().numParams();
|
||||
for(i=0;i<numparams;++i) {
|
||||
ProtoParameter *param = data.getFuncProto().getParam(i);
|
||||
|
@ -4087,7 +4130,7 @@ int4 ActionPrototypeWarnings::apply(Funcdata &data)
|
|||
if (ourproto.hasOutputErrors()) {
|
||||
data.warningHeader("Cannot assign location of return value for this function: Return value may be inaccurate");
|
||||
}
|
||||
if (ourproto.isUnknownModel() && (!ourproto.hasCustomStorage()) &&
|
||||
if (ourproto.isUnknownModel() && (!ourproto.hasCustomStorage()) &&
|
||||
(ourproto.isInputLocked() || ourproto.isOutputLocked())) {
|
||||
data.warningHeader("Unknown calling convention yet parameter storage is locked");
|
||||
}
|
||||
|
@ -4201,31 +4244,64 @@ bool ActionInferTypes::writeBack(Funcdata &data)
|
|||
|
||||
/// Determine if the given data-type edge looks like a pointer
|
||||
/// propagating through an "add a constant" operation. We assume the input
|
||||
/// Varnode has a pointer data-type.
|
||||
/// to the edge has a pointer data-type. This routine returns one the commands:
|
||||
/// - 0 indicates this is "add a constant" and the constant is passed back
|
||||
/// - 1 indicating the pointer does not propagate through
|
||||
/// - 2 the input data-type propagates through untransformed
|
||||
///
|
||||
/// \param off passes back the constant offset if the command is '0'
|
||||
/// \param op is the PcodeOp propagating the data-type
|
||||
/// \param slot is the input edge being propagated
|
||||
/// \return the offset of the added constant or -1 if not a pointer add operation
|
||||
int4 ActionInferTypes::propagateAddPointer(PcodeOp *op,int4 slot)
|
||||
|
||||
/// \param sz is the size of the data-type being pointed to
|
||||
/// \return a command indicating how the op should be treated
|
||||
int4 ActionInferTypes::propagateAddPointer(uintb &off,PcodeOp *op,int4 slot,int4 sz)
|
||||
|
||||
{
|
||||
if ((op->code() == CPUI_PTRADD)&&(slot==0))
|
||||
return op->getIn(2)->getOffset();
|
||||
if ((op->code() == CPUI_PTRSUB)&&(slot==0))
|
||||
return op->getIn(1)->getOffset();
|
||||
if (op->code() == CPUI_PTRADD) {
|
||||
if (slot != 0) return 1;
|
||||
Varnode *constvn = op->getIn(1);
|
||||
uintb mult = op->getIn(2)->getOffset();
|
||||
if (constvn->isConstant()) {
|
||||
off = (constvn->getOffset() * mult) & calc_mask(constvn->getSize()) ;
|
||||
return 0;
|
||||
}
|
||||
if (sz != 0 && (mult % sz) != 0)
|
||||
return 1;
|
||||
return 2;
|
||||
}
|
||||
if (op->code() == CPUI_PTRSUB) {
|
||||
if (slot != 0) return 1;
|
||||
off = op->getIn(1)->getOffset();
|
||||
return 0;
|
||||
}
|
||||
if (op->code() == CPUI_INT_ADD) {
|
||||
Varnode *othervn = op->getIn(1-slot);
|
||||
// Check if othervn is an offset
|
||||
if (!othervn->isConstant()) {
|
||||
if ((!othervn->isWritten())||(othervn->getDef()->code() != CPUI_INT_MULT))
|
||||
return -1;
|
||||
if (othervn->isWritten()) {
|
||||
PcodeOp *multop = othervn->getDef();
|
||||
if (multop->code() == CPUI_INT_MULT) {
|
||||
Varnode *constvn = multop->getIn(1);
|
||||
if (constvn->isConstant()) {
|
||||
uintb mult = constvn->getOffset();
|
||||
if (mult == calc_mask(constvn->getSize())) // If multiplying by -1
|
||||
return 1; // Assume this is a pointer difference and don't propagate
|
||||
if (sz != 0 && (mult % sz) !=0)
|
||||
return 1;
|
||||
}
|
||||
return 2;
|
||||
}
|
||||
}
|
||||
if (sz == 1)
|
||||
return 2;
|
||||
return 1;
|
||||
}
|
||||
if (othervn->getTempType()->getMetatype() == TYPE_PTR) // Check if othervn marked as ptr
|
||||
return -1;
|
||||
if (othervn->isConstant())
|
||||
return othervn->getOffset();
|
||||
return 1;
|
||||
off = othervn->getOffset();
|
||||
return 0;
|
||||
}
|
||||
return -1;
|
||||
return 1;
|
||||
}
|
||||
|
||||
/// \brief Propagate a pointer data-type through an ADD operation.
|
||||
|
@ -4240,30 +4316,24 @@ int4 ActionInferTypes::propagateAddPointer(PcodeOp *op,int4 slot)
|
|||
/// \param inslot is the edge to propagate along
|
||||
/// \return the transformed Datatype or the original output Datatype
|
||||
Datatype *ActionInferTypes::propagateAddIn2Out(TypeFactory *typegrp,PcodeOp *op,int4 inslot)
|
||||
|
||||
|
||||
{
|
||||
Datatype *rettype = op->getIn(inslot)->getTempType(); // We know this is a pointer type
|
||||
Datatype *tstruct = ((TypePointer *)rettype)->getPtrTo();
|
||||
int4 offset = propagateAddPointer(op,inslot);
|
||||
if (offset==-1) return op->getOut()->getTempType(); // Doesn't look like a good pointer add
|
||||
uintb uoffset = AddrSpace::addressToByte(offset,((TypePointer *)rettype)->getWordSize());
|
||||
if (tstruct->getSize() > 0 && !tstruct->isVariableLength())
|
||||
uoffset = uoffset % tstruct->getSize();
|
||||
if (uoffset==0) {
|
||||
if (op->code() == CPUI_PTRSUB) // Go down at least one level
|
||||
rettype = typegrp->downChain(rettype,uoffset);
|
||||
if (rettype == (Datatype *)0)
|
||||
rettype = op->getOut()->getTempType();
|
||||
}
|
||||
else {
|
||||
while(uoffset != 0) {
|
||||
rettype = typegrp->downChain(rettype,uoffset);
|
||||
if (rettype == (Datatype *)0) {
|
||||
rettype = op->getOut()->getTempType(); // Don't propagate anything
|
||||
break;
|
||||
}
|
||||
}
|
||||
TypePointer *pointer = (TypePointer *)op->getIn(inslot)->getTempType(); // We know this is a pointer type
|
||||
uintb uoffset;
|
||||
int4 command = propagateAddPointer(uoffset,op,inslot,pointer->getPtrTo()->getSize());
|
||||
if (command == 1) return op->getOut()->getTempType(); // Doesn't look like a good pointer add
|
||||
if (command != 2) {
|
||||
uoffset = AddrSpace::addressToByte(uoffset,pointer->getWordSize());
|
||||
bool allowWrap = (op->code() != CPUI_PTRSUB);
|
||||
do {
|
||||
pointer = pointer->downChain(uoffset,allowWrap,*typegrp);
|
||||
if (pointer == (TypePointer *)0)
|
||||
return op->getOut()->getTempType();
|
||||
} while(uoffset != 0);
|
||||
}
|
||||
Datatype *rettype = pointer;
|
||||
if (rettype == (Datatype *)0)
|
||||
rettype = op->getOut()->getTempType();
|
||||
if (op->getIn(inslot)->isSpacebase()) {
|
||||
if (rettype->getMetatype() == TYPE_PTR) {
|
||||
TypePointer *ptype = (TypePointer *)rettype;
|
||||
|
@ -4367,7 +4437,7 @@ bool ActionInferTypes::propagateGoodEdge(PcodeOp *op,int4 inslot,int4 outslot,Va
|
|||
/// \param outslot indicates the edge's output Varnode
|
||||
/// \return \b true if the data-type propagates
|
||||
bool ActionInferTypes::propagateTypeEdge(TypeFactory *typegrp,PcodeOp *op,int4 inslot,int4 outslot)
|
||||
|
||||
|
||||
{
|
||||
Varnode *invn,*outvn;
|
||||
Datatype *newtype;
|
||||
|
@ -4608,7 +4678,7 @@ void ActionInferTypes::propagateRef(Funcdata &data,Varnode *vn,const Address &ad
|
|||
} while(cur != (Datatype *)0);
|
||||
}
|
||||
if (lastct->getSize() != cursize) continue;
|
||||
|
||||
|
||||
// Try to propagate the reference type into a varnode that is pointed to by that reference
|
||||
if (0>lastct->typeOrder(*curvn->getTempType())) {
|
||||
#ifdef TYPEPROP_DEBUG
|
||||
|
@ -4850,7 +4920,7 @@ void ActionDatabase::buildDefaultGroups(void)
|
|||
"cleanup", "merge", "dynamic", "casts", "analysis",
|
||||
"fixateglobals", "fixateproto",
|
||||
"segment", "returnsplit", "nodejoin", "doubleload", "doubleprecis",
|
||||
"unreachable", "subvar", "floatprecision",
|
||||
"unreachable", "subvar", "floatprecision",
|
||||
"conditionalexe", "" };
|
||||
setGroup("decompile",members);
|
||||
|
||||
|
|
|
@ -310,6 +310,7 @@ public:
|
|||
/// input. In this case, it casts to the necessary pointer type
|
||||
/// immediately.
|
||||
class ActionSetCasts : public Action {
|
||||
static bool testStructOffset0(Varnode *vn,Datatype *ct,CastStrategy *castStrategy);
|
||||
static int4 castOutput(PcodeOp *op,Funcdata &data,CastStrategy *castStrategy);
|
||||
static int4 castInput(PcodeOp *op,int4 slot,Funcdata &data,CastStrategy *castStrategy);
|
||||
public:
|
||||
|
@ -924,7 +925,7 @@ class ActionInferTypes : public Action {
|
|||
int4 localcount; ///< Number of passes performed for this function
|
||||
static void buildLocaltypes(Funcdata &data); ///< Assign initial data-type based on local info
|
||||
static bool writeBack(Funcdata &data); ///< Commit the final propagated data-types to Varnodes
|
||||
static int4 propagateAddPointer(PcodeOp *op,int4 slot); ///< Test if edge is pointer plus a constant
|
||||
static int4 propagateAddPointer(uintb &off,PcodeOp *op,int4 slot,int4 sz); ///< Test if edge is pointer plus a constant
|
||||
static Datatype *propagateAddIn2Out(TypeFactory *typegrp,PcodeOp *op,int4 inslot);
|
||||
static bool propagateGoodEdge(PcodeOp *op,int4 inslot,int4 outslot,Varnode *invn);
|
||||
static bool propagateTypeEdge(TypeFactory *typegrp,PcodeOp *op,int4 inslot,int4 outslot);
|
||||
|
|
|
@ -361,7 +361,7 @@ void Funcdata::spacebaseConstant(PcodeOp *op,int4 slot,SymbolEntry *entry,const
|
|||
bool typelock = sym->isTypeLocked();
|
||||
if (typelock && (entrytype->getMetatype() == TYPE_UNKNOWN))
|
||||
typelock = false;
|
||||
outvn->updateType(ptrentrytype,typelock,true);
|
||||
outvn->updateType(ptrentrytype,typelock,false);
|
||||
if (extra != 0) {
|
||||
if (extraOp == (PcodeOp *)0) {
|
||||
extraOp = newOp(2,op->getAddr());
|
||||
|
|
|
@ -6141,6 +6141,33 @@ bool RulePtrArith::verifyAddTreeBottom(PcodeOp *op,int4 slot)
|
|||
return true;
|
||||
}
|
||||
|
||||
/// \brief Test for other pointers in the ADD tree above the given op that might be a preferred base
|
||||
///
|
||||
/// This tests the condition of RulePushPtr, making sure that the given op isn't the lone descendant
|
||||
/// of a pointer constructed by INT_ADD on another pointer (which would then be preferred).
|
||||
/// \param op is the given op
|
||||
/// \param slot is the input slot of the pointer
|
||||
/// \return \b true if the indicated slot holds the preferred pointer
|
||||
bool RulePtrArith::verifyPreferredPointer(PcodeOp *op,int4 slot)
|
||||
|
||||
{
|
||||
Varnode *vn = op->getIn(slot);
|
||||
// Check if RulePushPtr would apply here
|
||||
if (op->getIn(1-slot)->getType()->getMetatype() != TYPE_PTR && vn->isWritten()) {
|
||||
PcodeOp *preOp = vn->getDef();
|
||||
if (preOp->code() == CPUI_INT_ADD) {
|
||||
if (vn->loneDescend() == op) {
|
||||
int ptrCount = 0;
|
||||
if (preOp->getIn(0)->getType()->getMetatype() == TYPE_PTR) ptrCount += 1;
|
||||
if (preOp->getIn(1)->getType()->getMetatype() == TYPE_PTR) ptrCount += 1;
|
||||
if (ptrCount == 1)
|
||||
return false; // RulePushPtr would apply, so we are not preferred
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/// \class RulePtrArith
|
||||
/// \brief Transform pointer arithmetic
|
||||
///
|
||||
|
@ -6180,11 +6207,14 @@ int4 RulePtrArith::applyOp(PcodeOp *op,Funcdata &data)
|
|||
}
|
||||
if (slot == op->numInput()) return 0;
|
||||
if (!verifyAddTreeBottom(op, slot)) return 0;
|
||||
if (!verifyPreferredPointer(op, slot)) return 0;
|
||||
|
||||
const TypePointer *tp = (const TypePointer *) ct;
|
||||
ct = tp->getPtrTo(); // Type being pointed to
|
||||
int4 unitsize = AddrSpace::addressToByteInt(1,tp->getWordSize());
|
||||
if (ct->getSize() == unitsize) { // Degenerate case
|
||||
if (op->getOut()->getType()->getMetatype() != TYPE_PTR) // Make sure pointer propagates thru INT_ADD
|
||||
return 0;
|
||||
vector<Varnode *> newparams;
|
||||
newparams.push_back( op->getIn(slot) );
|
||||
newparams.push_back( op->getIn(1-slot) );
|
||||
|
|
|
@ -1028,6 +1028,7 @@ public:
|
|||
};
|
||||
class RulePtrArith : public Rule {
|
||||
static bool verifyAddTreeBottom(PcodeOp *op,int4 slot);
|
||||
static bool verifyPreferredPointer(PcodeOp *op,int4 slot);
|
||||
public:
|
||||
RulePtrArith(const string &g) : Rule(g, 0, "ptrarith") {} ///< Constructor
|
||||
virtual Rule *clone(const ActionGroupList &grouplist) const {
|
||||
|
|
|
@ -556,6 +556,45 @@ void TypePointer::restoreXml(const Element *el,TypeFactory &typegrp)
|
|||
flags = ptrto->getInheritable();
|
||||
}
|
||||
|
||||
/// \brief Find a sub-type pointer given an offset into \b this
|
||||
///
|
||||
/// Add a constant offset to \b this pointer.
|
||||
/// If there is a valid component at that offset, return a pointer
|
||||
/// to the data-type of the component or NULL otherwise.
|
||||
/// This routine only goes down one level at most. Pass back the
|
||||
/// renormalized offset relative to the new data-type
|
||||
/// \param off is a reference to the offset to add
|
||||
/// \param allowArrayWrap is \b true if the pointer should be treated as a pointer to an array
|
||||
/// \return a pointer datatype for the component or NULL
|
||||
TypePointer *TypePointer::downChain(uintb &off,bool allowArrayWrap,TypeFactory &typegrp)
|
||||
|
||||
{
|
||||
int4 ptrtoSize = ptrto->getSize();
|
||||
if (off >= ptrtoSize) { // Check if we are wrapping
|
||||
if (ptrtoSize != 0 && !ptrto->isVariableLength()) { // Check if pointed-to is wrappable
|
||||
if (!allowArrayWrap)
|
||||
return (TypePointer *)0;
|
||||
intb signOff = (intb)off;
|
||||
sign_extend(signOff,size*8-1);
|
||||
signOff = signOff % ptrtoSize;
|
||||
if (signOff < 0)
|
||||
signOff = signOff + ptrtoSize;
|
||||
off = signOff;
|
||||
if (off == 0) // If we've wrapped and are now at zero
|
||||
return this; // consider this going down one level
|
||||
}
|
||||
}
|
||||
|
||||
// If we know we have exactly one of an array, strip the array to get pointer to element
|
||||
bool doStrip = (ptrto->getMetatype() != TYPE_ARRAY);
|
||||
Datatype *pt = ptrto->getSubType(off,&off);
|
||||
if (pt == (Datatype *)0)
|
||||
return (TypePointer *)0;
|
||||
if (doStrip)
|
||||
return typegrp.getTypePointerStripArray(size, pt, wordsize);
|
||||
return typegrp.getTypePointer(size,pt,wordsize);
|
||||
}
|
||||
|
||||
void TypeArray::printRaw(ostream &s) const
|
||||
|
||||
{
|
||||
|
@ -2159,31 +2198,6 @@ void TypeFactory::destroyType(Datatype *ct)
|
|||
delete ct;
|
||||
}
|
||||
|
||||
/// Add a constant offset to a pointer with known data-type.
|
||||
/// If there is a valid component at that offset, return a pointer
|
||||
/// to the data-type of the component or NULL otherwise.
|
||||
/// This routine only goes down one level at most. Pass back the
|
||||
/// renormalized offset relative to the new data-type
|
||||
/// \param ptrtype is the pointer data-type being added to
|
||||
/// \param off is a reference to the offset to add
|
||||
/// \return a pointer datatype for the component or NULL
|
||||
Datatype *TypeFactory::downChain(Datatype *ptrtype,uintb &off)
|
||||
|
||||
{ // Change ptr->struct => ptr->substruct
|
||||
// where substruct starts at offset off
|
||||
if (ptrtype->metatype != TYPE_PTR) return (Datatype *)0;
|
||||
TypePointer *ptype = (TypePointer *)ptrtype;
|
||||
Datatype *pt = ptype->ptrto;
|
||||
// If we know we have exactly one of an array, strip the array to get pointer to element
|
||||
bool doStrip = (pt->getMetatype() != TYPE_ARRAY);
|
||||
pt = pt->getSubType(off,&off);
|
||||
if (pt == (Datatype *)0)
|
||||
return (Datatype *)0;
|
||||
if (doStrip)
|
||||
return getTypePointerStripArray(ptype->size, pt, ptype->getWordSize());
|
||||
return getTypePointer(ptype->size,pt,ptype->getWordSize());
|
||||
}
|
||||
|
||||
/// The data-type propagation system can push around data-types that are \e partial or are
|
||||
/// otherwise unrepresentable in the source language. This method substitutes those data-types
|
||||
/// with a concrete data-type that is representable, or returns the same data-type if is already concrete.
|
||||
|
|
|
@ -248,6 +248,7 @@ public:
|
|||
virtual int4 compareDependency(const Datatype &op) const; // For tree structure
|
||||
virtual Datatype *clone(void) const { return new TypePointer(*this); }
|
||||
virtual void saveXml(ostream &s) const;
|
||||
virtual TypePointer *downChain(uintb &off,bool allowArrayWrap,TypeFactory &typegrp);
|
||||
};
|
||||
|
||||
/// \brief Datatype object representing an array of elements
|
||||
|
@ -451,7 +452,6 @@ public:
|
|||
const vector<Datatype *> &intypes,
|
||||
bool dotdotdot); ///< Create a "function" datatype
|
||||
void destroyType(Datatype *ct); ///< Remove a data-type from \b this
|
||||
Datatype *downChain(Datatype *ptrtype,uintb &off); ///< Find a sub-type matching a pointer and offset
|
||||
Datatype *concretize(Datatype *ct); ///< Convert given data-type to concrete form
|
||||
void dependentOrder(vector<Datatype *> &deporder) const; ///< Place all data-types in dependency order
|
||||
void saveXml(ostream &s) const; ///< Save \b this container to stream
|
||||
|
|
|
@ -1711,7 +1711,7 @@ Datatype *TypeOpPtrsub::getOutputToken(const PcodeOp *op,CastStrategy *castStrat
|
|||
TypePointer *ptype = (TypePointer *)op->getIn(0)->getHigh()->getType();
|
||||
if (ptype->getMetatype() == TYPE_PTR) {
|
||||
uintb offset = AddrSpace::addressToByte(op->getIn(1)->getOffset(),ptype->getWordSize());
|
||||
Datatype *rettype = tlst->downChain(ptype,offset);
|
||||
Datatype *rettype = ptype->downChain(offset,false,*tlst);
|
||||
if ((offset==0)&&(rettype != (Datatype *)0))
|
||||
return rettype;
|
||||
}
|
||||
|
|
|
@ -270,6 +270,7 @@ ScopeLocal::ScopeLocal(uint8 id,AddrSpace *spc,Funcdata *fd,Architecture *g) : S
|
|||
|
||||
{
|
||||
space = spc;
|
||||
deepestParamOffset = ~((uintb)0);
|
||||
rangeLocked = false;
|
||||
stackGrowsNegative = true;
|
||||
restrictScope(fd);
|
||||
|
@ -310,12 +311,14 @@ void ScopeLocal::collectNameRecs(void)
|
|||
void ScopeLocal::resetLocalWindow(void)
|
||||
|
||||
{
|
||||
stackGrowsNegative = fd->getFuncProto().isStackGrowsNegative();
|
||||
deepestParamOffset = stackGrowsNegative ? ~((uintb)0) : 0;
|
||||
|
||||
if (rangeLocked) return;
|
||||
|
||||
localRange = fd->getFuncProto().getLocalRange();
|
||||
const RangeList &localRange( fd->getFuncProto().getLocalRange() );
|
||||
const RangeList ¶mrange( fd->getFuncProto().getParamRange() );
|
||||
|
||||
stackGrowsNegative = fd->getFuncProto().isStackGrowsNegative();
|
||||
RangeList newrange;
|
||||
|
||||
set<Range>::const_iterator iter;
|
||||
|
@ -375,16 +378,13 @@ void ScopeLocal::markNotMapped(AddrSpace *spc,uintb first,int4 sz,bool parameter
|
|||
last = spc->getHighest();
|
||||
if (parameter) { // Everything above parameter
|
||||
if (stackGrowsNegative) {
|
||||
const Range *rng = localRange.getRange(spc,first);
|
||||
if (rng != (const Range *)0)
|
||||
first = rng->getFirst(); // Everything less is not mapped
|
||||
if (first < deepestParamOffset)
|
||||
deepestParamOffset = first;
|
||||
}
|
||||
else {
|
||||
const Range *rng = localRange.getRange(spc,last);
|
||||
if (rng != (const Range *)0)
|
||||
last = rng->getLast(); // Everything greater is not mapped
|
||||
if (first > deepestParamOffset)
|
||||
deepestParamOffset = first;
|
||||
}
|
||||
sz = (last-first)+1;
|
||||
}
|
||||
Address addr(space,first);
|
||||
// Remove any symbols under range
|
||||
|
@ -427,6 +427,11 @@ string ScopeLocal::buildVariableName(const Address &addr,
|
|||
s << 'X'; // Indicate local stack space allocated by caller
|
||||
start = -start;
|
||||
}
|
||||
else {
|
||||
if (deepestParamOffset + 1 > 1 && stackGrowsNegative == (addr.getOffset() < deepestParamOffset)) {
|
||||
s << 'Y'; // Indicate unusual region of stack
|
||||
}
|
||||
}
|
||||
s << dec << start;
|
||||
return makeNameUnique(s.str());
|
||||
}
|
||||
|
@ -1095,6 +1100,9 @@ void ScopeLocal::markUnaliased(const vector<uintb> &alias)
|
|||
EntryMap *rangemap = maptable[space->getIndex()];
|
||||
if (rangemap == (EntryMap *)0) return;
|
||||
list<SymbolEntry>::iterator iter,enditer;
|
||||
set<Range>::const_iterator rangeIter, rangeEndIter;
|
||||
rangeIter = getRangeTree().begin();
|
||||
rangeEndIter = getRangeTree().end();
|
||||
|
||||
int4 alias_block_level = glb->alias_block_level;
|
||||
bool aliason = false;
|
||||
|
@ -1105,31 +1113,39 @@ void ScopeLocal::markUnaliased(const vector<uintb> &alias)
|
|||
enditer = rangemap->end_list();
|
||||
|
||||
while(iter!=enditer) {
|
||||
if ((i<alias.size()) && (alias[i] <= (*iter).getAddr().getOffset() + (*iter).getSize() - 1)) {
|
||||
SymbolEntry &entry(*iter++);
|
||||
uintb curoff = entry.getAddr().getOffset() + entry.getSize() - 1;
|
||||
while ((i<alias.size()) && (alias[i] <= curoff)) {
|
||||
aliason = true;
|
||||
curalias = alias[i++];
|
||||
}
|
||||
else {
|
||||
SymbolEntry &entry(*iter++);
|
||||
Symbol *symbol = entry.getSymbol();
|
||||
// Test if there is enough distance between symbol
|
||||
// and last alias to warrant ignoring the alias
|
||||
// NOTE: this is primarily to reset aliasing between
|
||||
// stack parameters and stack locals
|
||||
if (aliason && (entry.getAddr().getOffset()+entry.getSize() -1 - curalias > 0xffff))
|
||||
aliason = false;
|
||||
if (!aliason)
|
||||
symbol->getScope()->setAttribute(symbol,Varnode::nolocalalias);
|
||||
if (symbol->isTypeLocked() && alias_block_level != 0) {
|
||||
if (alias_block_level == 3)
|
||||
aliason = false; // For this level, all locked data-types block aliases
|
||||
else {
|
||||
type_metatype meta = symbol->getType()->getMetatype();
|
||||
if (meta == TYPE_STRUCT)
|
||||
aliason = false; // Only structures block aliases
|
||||
else if (meta == TYPE_ARRAY && alias_block_level > 1)
|
||||
aliason = false; // Only arrays (and structures) block aliases
|
||||
}
|
||||
// Aliases shouldn't go thru unmapped regions of the local variables
|
||||
while(rangeIter != rangeEndIter) {
|
||||
const Range &rng(*rangeIter);
|
||||
if (rng.getSpace() == space) {
|
||||
if (rng.getFirst() > curalias && curoff >= rng.getFirst())
|
||||
aliason = false;
|
||||
if (rng.getLast() >= curoff) break; // Check if symbol past end of mapped range
|
||||
if (rng.getLast() > curalias) // If past end of range AND past last alias offset
|
||||
aliason = false; // turn aliases off
|
||||
}
|
||||
++rangeIter;
|
||||
}
|
||||
Symbol *symbol = entry.getSymbol();
|
||||
// Test if there is enough distance between symbol
|
||||
// and last alias to warrant ignoring the alias
|
||||
// NOTE: this is primarily to reset aliasing between
|
||||
// stack parameters and stack locals
|
||||
if (aliason && (curoff - curalias > 0xffff)) aliason = false;
|
||||
if (!aliason) symbol->getScope()->setAttribute(symbol,Varnode::nolocalalias);
|
||||
if (symbol->isTypeLocked() && alias_block_level != 0) {
|
||||
if (alias_block_level == 3)
|
||||
aliason = false; // For this level, all locked data-types block aliases
|
||||
else {
|
||||
type_metatype meta = symbol->getType()->getMetatype();
|
||||
if (meta == TYPE_STRUCT)
|
||||
aliason = false; // Only structures block aliases
|
||||
else if (meta == TYPE_ARRAY && alias_block_level > 1) aliason = false;// Only arrays (and structures) block aliases
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -195,10 +195,10 @@ public:
|
|||
/// portions are used for temporary storage (not mapped), and what portion is for parameters.
|
||||
class ScopeLocal : public ScopeInternal {
|
||||
AddrSpace *space; ///< Address space containing the local stack
|
||||
RangeList localRange; ///< The set of addresses that might hold mapped locals (not parameters)
|
||||
list<NameRecommend> nameRecommend; ///< Symbol name recommendations for specific addresses
|
||||
list<DynamicRecommend> dynRecommend; ///< Symbol name recommendations for dynamic locations
|
||||
list<TypeRecommend> typeRecommend; ///< Data-types for specific storage locations
|
||||
uintb deepestParamOffset; ///< Deepest position of a parameter passed (to a called function) on the stack
|
||||
bool stackGrowsNegative; ///< Marked \b true if the stack is considered to \e grow towards smaller offsets
|
||||
bool rangeLocked; ///< True if the subset of addresses \e mapped to \b this scope has been locked
|
||||
bool adjustFit(RangeHint &a) const; ///< Make the given RangeHint fit in the current Symbol map
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue