diff --git a/src/base/SMPFunction.cpp b/src/base/SMPFunction.cpp index 752c3b361b2562070036b3119cffec9aeadfa157..47dffb137730aed95816c0fb3703e3a2029e5c18 100644 --- a/src/base/SMPFunction.cpp +++ b/src/base/SMPFunction.cpp @@ -2543,6 +2543,9 @@ void SMPFunction::MDFindIncomingTypes(void) { // Determine boundaries in the stack frame. void SMPFunction::BuildLocalVarTable(void) { +#if 0 + this->BuildStackAccessTables(); +#endif // Currently we just use the info that IDA Pro has inferred from the direct // addressing of stack locations. this->SemiNaiveLocalVarID(); @@ -2551,8 +2554,6 @@ void SMPFunction::BuildLocalVarTable(void) { // Build tables to characterize stack accesses. void SMPFunction::BuildStackAccessTables(void) { - list<SMPInstr *>::iterator InstIter; - this->SetLocalVarOffsetLimit(-20000); STARS_Function_t *FuncPtr = SMP_get_func(this->GetStartAddr()); if (NULL == FuncPtr) { @@ -2569,6 +2570,7 @@ void SMPFunction::BuildStackAccessTables(void) { } // Calculate min and max stack operand offsets accessed. + list<SMPInstr *>::iterator InstIter; InstIter = this->Instrs.begin(); #if SMP_USE_SSA_FNOP_MARKER if ((*InstIter)->IsMarkerInst()) @@ -2654,6 +2656,7 @@ void SMPFunction::BuildStackAccessTables(void) { // the other vector will be for nonnegative offsets (e.g. return address, inargs). // Two more vectors will hold fine-grained stack access info. // These will be our memory maps for analyzing stack usage. + STARS_sval_t AccessTableEntriesCount = this->MaxStackAccessLimit - this->MinStackAccessOffset; for (int i = this->MinStackAccessOffset; i < this->MaxStackAccessLimit; ++i) { struct StackFrameEntry TempEntry; TempEntry.VarPtr = NULL; @@ -2721,7 +2724,7 @@ void SMPFunction::BuildStackAccessTables(void) { if (this->MDGetStackOffsetAndSize(CurrInst, TempOp, this->MinStackAccessOffset, offset, DataSize, UsedFramePointer, IndexedAccess, SignedMove, UnsignedMove)) { SignedOffset = (int) offset; - if (IndexedAccess && ((0 > SignedOffset) || ((SignedOffset + DataSize) > this->StackFrameMap.size()))) { + if (IndexedAccess && ((0 > SignedOffset) || ((SignedOffset + DataSize) > AccessTableEntriesCount))) { continue; // Indexed expressions can be within frame even when offset is outside frame } assert(0 <= SignedOffset); @@ -2736,20 +2739,20 @@ void SMPFunction::BuildStackAccessTables(void) { #endif bool ESPRelative = (!(UsedFramePointer || CurrInst->HasFPNormalizedToSP())); if (SignedOffset < 0) { - for (int j = 0; j < (int) DataSize; ++j) { - this->NegativeOffsetStackFrameMap[-SignedOffset + j].Written = true; - this->NegativeOffsetStackFrameMap[-SignedOffset + j].IndexedAccess = IndexedAccess; + for (int j = 0; j < (int) DataSize; ++j) { // offset has zero-based index into negative offset vectors + this->NegativeOffsetStackFrameMap.at(offset + j).Written = true; + this->NegativeOffsetStackFrameMap[offset + j].IndexedAccess = IndexedAccess; if (ESPRelative) { - this->NegativeOffsetStackFrameMap[-SignedOffset + j].ESPRelativeAccess = true; + this->NegativeOffsetStackFrameMap[offset + j].ESPRelativeAccess = true; } else { - this->NegativeOffsetStackFrameMap[-SignedOffset + j].EBPRelativeAccess = true; + this->NegativeOffsetStackFrameMap[offset + j].EBPRelativeAccess = true; } } } else { for (int j = 0; j < (int) DataSize; ++j) { - this->PositiveOffsetStackFrameMap[SignedOffset + j].Written = true; + this->PositiveOffsetStackFrameMap.at(SignedOffset + j).Written = true; this->PositiveOffsetStackFrameMap[SignedOffset + j].IndexedAccess = IndexedAccess; if (ESPRelative) { this->PositiveOffsetStackFrameMap[SignedOffset + j].ESPRelativeAccess = true; @@ -2763,19 +2766,19 @@ void SMPFunction::BuildStackAccessTables(void) { BitWidthMask = ComputeOperandBitWidthMask(TempOp, DataSize); StackDefFG.SizeInfo = BitWidthMask; StackDefFG.SignMiscInfo = FG_MASK_WRITTEN; - if (SignedOffset < 0) { - this->NegativeOffsetFineGrainedStackTable.at(-SignedOffset).SizeInfo |= BitWidthMask; - this->NegativeOffsetFineGrainedStackTable.at(-SignedOffset).SignMiscInfo |= FG_MASK_WRITTEN; + if (SignedOffset < 0) { // offset has zero-based index into negative offset vectors + this->NegativeOffsetFineGrainedStackTable.at(offset).SizeInfo |= BitWidthMask; + this->NegativeOffsetFineGrainedStackTable.at(offset).SignMiscInfo |= FG_MASK_WRITTEN; if (IndexedAccess) { - this->NegativeOffsetFineGrainedStackTable.at(-SignedOffset).SignMiscInfo |= FG_MASK_INDEXED_ACCESS; + this->NegativeOffsetFineGrainedStackTable.at(offset).SignMiscInfo |= FG_MASK_INDEXED_ACCESS; StackDefFG.SignMiscInfo |= FG_MASK_INDEXED_ACCESS; } if (ESPRelative) { - this->NegativeOffsetFineGrainedStackTable.at(-SignedOffset).SignMiscInfo |= FG_MASK_SP_RELATIVE; + this->NegativeOffsetFineGrainedStackTable.at(offset).SignMiscInfo |= FG_MASK_SP_RELATIVE; StackDefFG.SignMiscInfo |= FG_MASK_SP_RELATIVE; } else { - this->NegativeOffsetFineGrainedStackTable.at(-SignedOffset).SignMiscInfo |= FG_MASK_FP_RELATIVE; + this->NegativeOffsetFineGrainedStackTable.at(offset).SignMiscInfo |= FG_MASK_FP_RELATIVE; StackDefFG.SignMiscInfo |= FG_MASK_FP_RELATIVE; } } @@ -2822,7 +2825,7 @@ void SMPFunction::BuildStackAccessTables(void) { if (this->MDGetStackOffsetAndSize(CurrInst, TempOp, this->MinStackAccessOffset, offset, DataSize, UsedFramePointer, IndexedAccess, SignedMove, UnsignedMove)) { SignedOffset = (int) offset; - if (IndexedAccess && ((0 > SignedOffset) || ((SignedOffset + DataSize) > this->StackFrameMap.size()))) { + if (IndexedAccess && ((0 > SignedOffset) || ((SignedOffset + DataSize) > AccessTableEntriesCount))) { continue; // Indexed expressions can be within frame but offset is outside frame } assert(0 <= SignedOffset); @@ -2836,21 +2839,21 @@ void SMPFunction::BuildStackAccessTables(void) { assert((SignedOffset + DataSize) <= this->StackFrameMap.size()); #endif bool ESPRelative = (!(UsedFramePointer || CurrInst->HasFPNormalizedToSP())); - if (SignedOffset < 0) { + if (SignedOffset < 0) { // offset has zero-based index into negative offset vectors for (int j = 0; j < (int) DataSize; ++j) { - this->NegativeOffsetStackFrameMap[-SignedOffset + j].Read = true; - this->NegativeOffsetStackFrameMap[-SignedOffset + j].IndexedAccess = IndexedAccess; + this->NegativeOffsetStackFrameMap.at(offset + j).Read = true; + this->NegativeOffsetStackFrameMap[offset + j].IndexedAccess = IndexedAccess; if (ESPRelative) { - this->NegativeOffsetStackFrameMap[-SignedOffset + j].ESPRelativeAccess = true; + this->NegativeOffsetStackFrameMap[offset + j].ESPRelativeAccess = true; } else { - this->NegativeOffsetStackFrameMap[-SignedOffset + j].EBPRelativeAccess = true; + this->NegativeOffsetStackFrameMap[offset + j].EBPRelativeAccess = true; } } } else { - for (int j = 0; j < (int)DataSize; ++j) { - this->PositiveOffsetStackFrameMap[SignedOffset + j].Read = true; + for (int j = 0; j < (int) DataSize; ++j) { + this->PositiveOffsetStackFrameMap.at(SignedOffset + j).Read = true; this->PositiveOffsetStackFrameMap[SignedOffset + j].IndexedAccess = IndexedAccess; if (ESPRelative) { this->PositiveOffsetStackFrameMap[SignedOffset + j].ESPRelativeAccess = true; @@ -2864,21 +2867,29 @@ void SMPFunction::BuildStackAccessTables(void) { BitWidthMask = ComputeOperandBitWidthMask(TempOp, DataSize); StackUseFG.SizeInfo = BitWidthMask; StackUseFG.SignMiscInfo = FG_MASK_READ; - if (SignedOffset < 0) { - this->NegativeOffsetFineGrainedStackTable.at(-SignedOffset).SizeInfo |= BitWidthMask; - this->NegativeOffsetFineGrainedStackTable.at(-SignedOffset).SignMiscInfo |= FG_MASK_READ; + if (SignedOffset < 0) { // offset has zero-based index into negative offset vectors + this->NegativeOffsetFineGrainedStackTable.at(offset).SizeInfo |= BitWidthMask; + this->NegativeOffsetFineGrainedStackTable.at(offset).SignMiscInfo |= FG_MASK_READ; if (IndexedAccess) { - this->NegativeOffsetFineGrainedStackTable.at(-SignedOffset).SignMiscInfo |= FG_MASK_INDEXED_ACCESS; + this->NegativeOffsetFineGrainedStackTable.at(offset).SignMiscInfo |= FG_MASK_INDEXED_ACCESS; StackUseFG.SignMiscInfo |= FG_MASK_INDEXED_ACCESS; } if (ESPRelative) { - this->NegativeOffsetFineGrainedStackTable.at(-SignedOffset).SignMiscInfo |= FG_MASK_SP_RELATIVE; + this->NegativeOffsetFineGrainedStackTable.at(offset).SignMiscInfo |= FG_MASK_SP_RELATIVE; StackUseFG.SignMiscInfo |= FG_MASK_SP_RELATIVE; } else { - this->NegativeOffsetFineGrainedStackTable.at(-SignedOffset).SignMiscInfo |= FG_MASK_FP_RELATIVE; + this->NegativeOffsetFineGrainedStackTable.at(offset).SignMiscInfo |= FG_MASK_FP_RELATIVE; StackUseFG.SignMiscInfo |= FG_MASK_FP_RELATIVE; } + if (SignedMove) { + this->NegativeOffsetFineGrainedStackTable.at(offset).SignMiscInfo |= FG_MASK_SIGNED; + StackUseFG.SignMiscInfo |= FG_MASK_SIGNED; + } + else if (UnsignedMove) { + this->NegativeOffsetFineGrainedStackTable.at(offset).SignMiscInfo |= FG_MASK_UNSIGNED; + StackUseFG.SignMiscInfo |= FG_MASK_UNSIGNED; + } } else { // SignedOffset >= 0 this->PositiveOffsetFineGrainedStackTable.at(SignedOffset).SizeInfo |= BitWidthMask; @@ -2895,14 +2906,14 @@ void SMPFunction::BuildStackAccessTables(void) { this->PositiveOffsetFineGrainedStackTable.at(SignedOffset).SignMiscInfo |= FG_MASK_FP_RELATIVE; StackUseFG.SignMiscInfo |= FG_MASK_FP_RELATIVE; } - } - if (SignedMove) { - this->FineGrainedStackTable.at(offset).SignMiscInfo |= FG_MASK_SIGNED; - StackUseFG.SignMiscInfo |= FG_MASK_SIGNED; - } - else if (UnsignedMove) { - this->FineGrainedStackTable.at(offset).SignMiscInfo |= FG_MASK_UNSIGNED; - StackUseFG.SignMiscInfo |= FG_MASK_UNSIGNED; + if (SignedMove) { + this->PositiveOffsetFineGrainedStackTable.at(SignedOffset).SignMiscInfo |= FG_MASK_SIGNED; + StackUseFG.SignMiscInfo |= FG_MASK_SIGNED; + } + else if (UnsignedMove) { + this->PositiveOffsetFineGrainedStackTable.at(SignedOffset).SignMiscInfo |= FG_MASK_UNSIGNED; + StackUseFG.SignMiscInfo |= FG_MASK_UNSIGNED; + } } // Insert the StackUseFG into the map of InstAddr to USE FG info. pair<map<STARS_ea_t, struct FineGrainedInfo>::iterator, bool> InsertResult;