mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2025-10-04 02:09:44 +02:00
Replace uses of LongObjectHashTable with Java's HashMap
This commit is contained in:
parent
7af55169c0
commit
e3aebe3adb
22 changed files with 407 additions and 767 deletions
|
@ -33,7 +33,6 @@ import ghidra.program.model.data.*;
|
||||||
import ghidra.program.model.data.Enum;
|
import ghidra.program.model.data.Enum;
|
||||||
import ghidra.program.model.listing.DataTypeChangeSet;
|
import ghidra.program.model.listing.DataTypeChangeSet;
|
||||||
import ghidra.util.*;
|
import ghidra.util.*;
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
|
||||||
import ghidra.util.exception.*;
|
import ghidra.util.exception.*;
|
||||||
import ghidra.util.task.TaskMonitor;
|
import ghidra.util.task.TaskMonitor;
|
||||||
|
|
||||||
|
@ -78,13 +77,13 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
private DataTypeMergePanel dtMergePanel;
|
private DataTypeMergePanel dtMergePanel;
|
||||||
private int totalConflictCount;
|
private int totalConflictCount;
|
||||||
private int currentConflictIndex;
|
private int currentConflictIndex;
|
||||||
private LongObjectHashtable<DataType> myResolvedDts; // maps My data type key -> resolved Data type
|
private Map<Long, DataType> myResolvedDts; // maps My data type key -> resolved Data type
|
||||||
private LongObjectHashtable<DataType> latestResolvedDts; // maps Latest data type key -> resolved Data type
|
private Map<Long, DataType> latestResolvedDts; // maps Latest data type key -> resolved Data type
|
||||||
private LongObjectHashtable<DataType> origResolvedDts; // maps Original data type key -> resolved Data type
|
private Map<Long, DataType> origResolvedDts; // maps Original data type key -> resolved Data type
|
||||||
private List<FixUpInfo> fixUpList; // FixUpInfo objects that must be resolved after
|
private List<FixUpInfo> fixUpList; // FixUpInfo objects that must be resolved after
|
||||||
private HashSet<Long> fixUpIDSet; // track types with fixups
|
private HashSet<Long> fixUpIDSet; // track types with fixups
|
||||||
// data types have been added and conflicts resolved.
|
// data types have been added and conflicts resolved.
|
||||||
private LongObjectHashtable<CleanUpInfo> cleanupPlaceHolderList; // placeholders that need to be removed.
|
private Map<Long, CleanUpInfo> cleanupPlaceHolderList; // placeholders that need to be removed.
|
||||||
private int progressIndex; // index for showing progress
|
private int progressIndex; // index for showing progress
|
||||||
|
|
||||||
private int categoryChoice = ASK_USER;
|
private int categoryChoice = ASK_USER;
|
||||||
|
@ -700,7 +699,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
* the data type existed
|
* the data type existed
|
||||||
*/
|
*/
|
||||||
private DataType updateDataTypeName(long id, DataType dt,
|
private DataType updateDataTypeName(long id, DataType dt,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
DataType resultDt = dtms[RESULT].getDataType(id);
|
DataType resultDt = dtms[RESULT].getDataType(id);
|
||||||
DataType newDt = null;
|
DataType newDt = null;
|
||||||
if (resultDt != null) {
|
if (resultDt != null) {
|
||||||
|
@ -771,7 +770,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataType updateDataType(long id, DataTypeManager dtm,
|
private DataType updateDataType(long id, DataTypeManager dtm,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes, boolean updatePath) {
|
Map<Long, DataType> resolvedDataTypes, boolean updatePath) {
|
||||||
DataType resultDt = dtms[RESULT].getDataType(id);
|
DataType resultDt = dtms[RESULT].getDataType(id);
|
||||||
DataType myDt = dtm.getDataType(id);
|
DataType myDt = dtm.getDataType(id);
|
||||||
|
|
||||||
|
@ -816,7 +815,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataType updateDataTypeSource(long id, DataTypeManager dtm,
|
private DataType updateDataTypeSource(long id, DataTypeManager dtm,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
DataType resultDt = dtms[RESULT].getDataType(id);
|
DataType resultDt = dtms[RESULT].getDataType(id);
|
||||||
DataType myDt = dtm.getDataType(id);
|
DataType myDt = dtm.getDataType(id);
|
||||||
SourceArchive mySourceArchive = myDt.getSourceArchive();
|
SourceArchive mySourceArchive = myDt.getSourceArchive();
|
||||||
|
@ -832,7 +831,8 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
else {
|
else {
|
||||||
|
|
||||||
SourceArchive resultSourceArchive = resultDt.getSourceArchive();
|
SourceArchive resultSourceArchive = resultDt.getSourceArchive();
|
||||||
if (!resultSourceArchive.getSourceArchiveID().equals(
|
if (!resultSourceArchive.getSourceArchiveID()
|
||||||
|
.equals(
|
||||||
mySourceArchive.getSourceArchiveID())) {
|
mySourceArchive.getSourceArchiveID())) {
|
||||||
resultDt.setSourceArchive(mySourceArchive);
|
resultDt.setSourceArchive(mySourceArchive);
|
||||||
}
|
}
|
||||||
|
@ -850,7 +850,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
* @return the resulting data type in this data type manager.
|
* @return the resulting data type in this data type manager.
|
||||||
*/
|
*/
|
||||||
private DataType addDataType(long dataTypeID, DataType dataType,
|
private DataType addDataType(long dataTypeID, DataType dataType,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
|
|
||||||
DataType existingDt = resolvedDataTypes.get(dataTypeID);
|
DataType existingDt = resolvedDataTypes.get(dataTypeID);
|
||||||
if (existingDt != null) {
|
if (existingDt != null) {
|
||||||
|
@ -899,7 +899,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
* @return resolved data type that corresponds to id
|
* @return resolved data type that corresponds to id
|
||||||
*/
|
*/
|
||||||
private DataType getResolvedBaseType(long id, DataType dt,
|
private DataType getResolvedBaseType(long id, DataType dt,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
DataTypeManager dtm = dt.getDataTypeManager();
|
DataTypeManager dtm = dt.getDataTypeManager();
|
||||||
DataType baseDt = getBaseDataType(dt);
|
DataType baseDt = getBaseDataType(dt);
|
||||||
if (baseDt == DataType.DEFAULT) {
|
if (baseDt == DataType.DEFAULT) {
|
||||||
|
@ -933,7 +933,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataType createPointer(long id, Pointer pointerDt,
|
private DataType createPointer(long id, Pointer pointerDt,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
DataType innerDt = pointerDt.getDataType();
|
DataType innerDt = pointerDt.getDataType();
|
||||||
if (innerDt == DataType.DEFAULT) {
|
if (innerDt == DataType.DEFAULT) {
|
||||||
return pointerDt;
|
return pointerDt;
|
||||||
|
@ -954,7 +954,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataType createTypeDef(long id, TypeDef originalTypeDef,
|
private DataType createTypeDef(long id, TypeDef originalTypeDef,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
DataType innerDataType = originalTypeDef.getDataType();
|
DataType innerDataType = originalTypeDef.getDataType();
|
||||||
if (innerDataType == DataType.DEFAULT) {
|
if (innerDataType == DataType.DEFAULT) {
|
||||||
return originalTypeDef;
|
return originalTypeDef;
|
||||||
|
@ -985,7 +985,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataType createArray(long id, Array array,
|
private DataType createArray(long id, Array array,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
DataType dt = array.getDataType();
|
DataType dt = array.getDataType();
|
||||||
if (dt == DataType.DEFAULT) {
|
if (dt == DataType.DEFAULT) {
|
||||||
return array;
|
return array;
|
||||||
|
@ -1006,7 +1006,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataType addComposite(long id, Composite myDt,
|
private DataType addComposite(long id, Composite myDt,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
|
|
||||||
long oldLastChangeTime = myDt.getLastChangeTime();
|
long oldLastChangeTime = myDt.getLastChangeTime();
|
||||||
long oldLastChangeTimeInSourceArchive = myDt.getLastChangeTimeInSourceArchive();
|
long oldLastChangeTimeInSourceArchive = myDt.getLastChangeTimeInSourceArchive();
|
||||||
|
@ -1026,7 +1026,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataType addFunctionDef(long id, FunctionDefinition myDt,
|
private DataType addFunctionDef(long id, FunctionDefinition myDt,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
FunctionDefinition newDt = (FunctionDefinition) myDt.clone(dtms[RESULT]);
|
FunctionDefinition newDt = (FunctionDefinition) myDt.clone(dtms[RESULT]);
|
||||||
setCategoryPath(newDt, myDt.getCategoryPath());
|
setCategoryPath(newDt, myDt.getCategoryPath());
|
||||||
updateFunctionDef(id, myDt, newDt, resolvedDataTypes);
|
updateFunctionDef(id, myDt, newDt, resolvedDataTypes);
|
||||||
|
@ -1034,7 +1034,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateHashTables(long id, DataType newDt,
|
private void updateHashTables(long id, DataType newDt,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
resolvedDataTypes.put(id, newDt);
|
resolvedDataTypes.put(id, newDt);
|
||||||
if (!myDtAddedList.contains(Long.valueOf(id))) {
|
if (!myDtAddedList.contains(Long.valueOf(id))) {
|
||||||
if (resolvedDataTypes == myResolvedDts) {
|
if (resolvedDataTypes == myResolvedDts) {
|
||||||
|
@ -1053,7 +1053,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataType getResolvedComponent(long compID,
|
private DataType getResolvedComponent(long compID,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
DataType resolvedDt = resolvedDataTypes.get(compID);
|
DataType resolvedDt = resolvedDataTypes.get(compID);
|
||||||
if (resolvedDt != null) {
|
if (resolvedDt != null) {
|
||||||
// if this is a pointer, typedef, or array, check the
|
// if this is a pointer, typedef, or array, check the
|
||||||
|
@ -1095,7 +1095,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateFlexArray(long sourceDtID, Structure sourceDt, Structure destStruct,
|
private void updateFlexArray(long sourceDtID, Structure sourceDt, Structure destStruct,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
|
|
||||||
DataTypeComponent flexDtc = sourceDt.getFlexibleArrayComponent();
|
DataTypeComponent flexDtc = sourceDt.getFlexibleArrayComponent();
|
||||||
if (flexDtc == null) {
|
if (flexDtc == null) {
|
||||||
|
@ -1155,7 +1155,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateStructure(long sourceDtID, Structure sourceDt, Structure destStruct,
|
private void updateStructure(long sourceDtID, Structure sourceDt, Structure destStruct,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
|
|
||||||
// NOTE: it is possible for the same destStruct to be updated more than once;
|
// NOTE: it is possible for the same destStruct to be updated more than once;
|
||||||
// therefor we must cleanup any previous obsolete fixups
|
// therefor we must cleanup any previous obsolete fixups
|
||||||
|
@ -1356,7 +1356,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateUnion(long sourceDtID, Union sourceDt, Union destUnion,
|
private void updateUnion(long sourceDtID, Union sourceDt, Union destUnion,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
|
|
||||||
// NOTE: it is possible for the same destUnion to be updated more than once;
|
// NOTE: it is possible for the same destUnion to be updated more than once;
|
||||||
// therefor we must cleanup any previous obsolete fixups
|
// therefor we must cleanup any previous obsolete fixups
|
||||||
|
@ -1479,7 +1479,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateComposite(long sourceDtID, Composite sourceDt, Composite destDt,
|
private void updateComposite(long sourceDtID, Composite sourceDt, Composite destDt,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
|
|
||||||
if (sourceDt instanceof Structure) {
|
if (sourceDt instanceof Structure) {
|
||||||
updateStructure(sourceDtID, (Structure) sourceDt, (Structure) destDt,
|
updateStructure(sourceDtID, (Structure) sourceDt, (Structure) destDt,
|
||||||
|
@ -1493,7 +1493,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
|
|
||||||
private void updateFunctionDef(long sourceFunctionDefDtID,
|
private void updateFunctionDef(long sourceFunctionDefDtID,
|
||||||
FunctionDefinition sourceFunctionDefDt, FunctionDefinition destDt,
|
FunctionDefinition sourceFunctionDefDt, FunctionDefinition destDt,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
|
|
||||||
// NOTE: it is possible for the same function def to be updated more than once;
|
// NOTE: it is possible for the same function def to be updated more than once;
|
||||||
// therefor we must cleanup any previous obsolete fixups
|
// therefor we must cleanup any previous obsolete fixups
|
||||||
|
@ -1542,7 +1542,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
* has not been resolved yet
|
* has not been resolved yet
|
||||||
*/
|
*/
|
||||||
private DataType getResolvedParam(long id, long paramDatatypeID, int index,
|
private DataType getResolvedParam(long id, long paramDatatypeID, int index,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
DataType resolvedDt = getResolvedComponent(paramDatatypeID, resolvedDataTypes);
|
DataType resolvedDt = getResolvedComponent(paramDatatypeID, resolvedDataTypes);
|
||||||
if (resolvedDt == null) {
|
if (resolvedDt == null) {
|
||||||
if (!myDtAddedList.contains(Long.valueOf(paramDatatypeID))) {
|
if (!myDtAddedList.contains(Long.valueOf(paramDatatypeID))) {
|
||||||
|
@ -2216,7 +2216,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
* data type manager.
|
* data type manager.
|
||||||
*/
|
*/
|
||||||
private boolean equivalentDataTypeFound(long myDtID, DataType myDt) {
|
private boolean equivalentDataTypeFound(long myDtID, DataType myDt) {
|
||||||
if (myResolvedDts.contains(myDtID)) {
|
if (myResolvedDts.containsKey(myDtID)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
DataType resultDt = dtms[RESULT].getDataType(myDt.getCategoryPath(), myDt.getName());
|
DataType resultDt = dtms[RESULT].getDataType(myDt.getCategoryPath(), myDt.getName());
|
||||||
|
@ -2226,7 +2226,8 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
UniversalID resultDtUniversalID = resultDt.getUniversalID();
|
UniversalID resultDtUniversalID = resultDt.getUniversalID();
|
||||||
UniversalID myDtUniversalID = myDt.getUniversalID();
|
UniversalID myDtUniversalID = myDt.getUniversalID();
|
||||||
// UniversalID can be null if data type is BuiltIn.
|
// UniversalID can be null if data type is BuiltIn.
|
||||||
if (!resultSourceArchive.getSourceArchiveID().equals(
|
if (!resultSourceArchive.getSourceArchiveID()
|
||||||
|
.equals(
|
||||||
mySourceArchive.getSourceArchiveID()) ||
|
mySourceArchive.getSourceArchiveID()) ||
|
||||||
!SystemUtilities.isEqual(resultDtUniversalID, myDtUniversalID)) {
|
!SystemUtilities.isEqual(resultDtUniversalID, myDtUniversalID)) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -2241,7 +2242,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
|
|
||||||
private void cleanUpDataTypes() {
|
private void cleanUpDataTypes() {
|
||||||
// clean up data types
|
// clean up data types
|
||||||
long[] keys = cleanupPlaceHolderList.getKeys();
|
List<Long> keys = new ArrayList<Long>(cleanupPlaceHolderList.keySet());
|
||||||
for (long key : keys) {
|
for (long key : keys) {
|
||||||
CleanUpInfo cleanUpInfo = cleanupPlaceHolderList.get(key);
|
CleanUpInfo cleanUpInfo = cleanupPlaceHolderList.get(key);
|
||||||
cleanUpInfo.cleanUp();
|
cleanUpInfo.cleanUp();
|
||||||
|
@ -2566,7 +2567,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
private void addToCleanupList(FixUpInfo info) {
|
private void addToCleanupList(FixUpInfo info) {
|
||||||
long id = info.id;
|
long id = info.id;
|
||||||
int index = info.index;
|
int index = info.index;
|
||||||
LongObjectHashtable<DataType> ht = info.ht;
|
Map<Long, DataType> ht = info.ht;
|
||||||
CleanUpInfo cleanUpInfo = cleanupPlaceHolderList.get(id);
|
CleanUpInfo cleanUpInfo = cleanupPlaceHolderList.get(id);
|
||||||
if (cleanUpInfo == null) {
|
if (cleanUpInfo == null) {
|
||||||
cleanUpInfo = new CleanUpInfo(id);
|
cleanUpInfo = new CleanUpInfo(id);
|
||||||
|
@ -2685,7 +2686,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
private DataType resolve(long id, DataTypeManager dtm,
|
private DataType resolve(long id, DataTypeManager dtm,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
DataType dt = getResolvedComponent(id, resolvedDataTypes);
|
DataType dt = getResolvedComponent(id, resolvedDataTypes);
|
||||||
if (dt == null) {
|
if (dt == null) {
|
||||||
DataType otherDt = dtm.getDataType(id);
|
DataType otherDt = dtm.getDataType(id);
|
||||||
|
@ -2961,15 +2962,15 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
|
|
||||||
origDtConflictList = new ArrayList<>(dtConflictList);
|
origDtConflictList = new ArrayList<>(dtConflictList);
|
||||||
|
|
||||||
myResolvedDts = new LongObjectHashtable<>();
|
myResolvedDts = new HashMap<>();
|
||||||
latestResolvedDts = new LongObjectHashtable<>();
|
latestResolvedDts = new HashMap<>();
|
||||||
origResolvedDts = new LongObjectHashtable<>();
|
origResolvedDts = new HashMap<>();
|
||||||
|
|
||||||
fixUpList = new ArrayList<>();
|
fixUpList = new ArrayList<>();
|
||||||
fixUpIDSet = new HashSet<>();
|
fixUpIDSet = new HashSet<>();
|
||||||
totalConflictCount += dtConflictList.size();
|
totalConflictCount += dtConflictList.size();
|
||||||
|
|
||||||
cleanupPlaceHolderList = new LongObjectHashtable<>();
|
cleanupPlaceHolderList = new HashMap<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -3212,7 +3213,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
return new String[][] { DATA_TYPES_PHASE };
|
return new String[][] { DATA_TYPES_PHASE };
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataTypeManager getDataTypeManager(LongObjectHashtable<DataType> dataTypeMap) {
|
private DataTypeManager getDataTypeManager(Map<Long, DataType> dataTypeMap) {
|
||||||
if (dataTypeMap == origResolvedDts) {
|
if (dataTypeMap == origResolvedDts) {
|
||||||
return dtms[ORIGINAL];
|
return dtms[ORIGINAL];
|
||||||
}
|
}
|
||||||
|
@ -3238,7 +3239,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
long id;
|
long id;
|
||||||
long compID;
|
long compID;
|
||||||
int index;
|
int index;
|
||||||
LongObjectHashtable<DataType> ht;
|
Map<Long, DataType> ht;
|
||||||
|
|
||||||
// bitfield info
|
// bitfield info
|
||||||
int bitOffset = -1;
|
int bitOffset = -1;
|
||||||
|
@ -3255,7 +3256,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
* @param resolvedDataTypes hashtable used for resolving the data type
|
* @param resolvedDataTypes hashtable used for resolving the data type
|
||||||
*/
|
*/
|
||||||
FixUpInfo(long id, long compID, int index,
|
FixUpInfo(long id, long compID, int index,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
this.id = id;
|
this.id = id;
|
||||||
this.compID = compID;
|
this.compID = compID;
|
||||||
this.index = index;
|
this.index = index;
|
||||||
|
@ -3271,7 +3272,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
* @param resolvedDataTypes hashtable used for resolving the data type
|
* @param resolvedDataTypes hashtable used for resolving the data type
|
||||||
*/
|
*/
|
||||||
FixUpInfo(long id, long compID, DataTypeComponent sourceDtc,
|
FixUpInfo(long id, long compID, DataTypeComponent sourceDtc,
|
||||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
Map<Long, DataType> resolvedDataTypes) {
|
||||||
this(id, compID, getComponentFixupIndex(sourceDtc), resolvedDataTypes);
|
this(id, compID, getComponentFixupIndex(sourceDtc), resolvedDataTypes);
|
||||||
if (sourceDtc.isBitFieldComponent()) {
|
if (sourceDtc.isBitFieldComponent()) {
|
||||||
BitFieldDataType bfDt = (BitFieldDataType) sourceDtc.getDataType();
|
BitFieldDataType bfDt = (BitFieldDataType) sourceDtc.getDataType();
|
||||||
|
@ -3347,7 +3348,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
*/
|
*/
|
||||||
private class CleanUpInfo {
|
private class CleanUpInfo {
|
||||||
long id;
|
long id;
|
||||||
HashMap<LongObjectHashtable<DataType>, int[]> map; // resolvedDataTypesMap, indexArray
|
Map<Map<Long, DataType>, int[]> map; // resolvedDataTypesMap, indexArray
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct info needed to clean up place holder data types after base types
|
* Construct info needed to clean up place holder data types after base types
|
||||||
|
@ -3364,7 +3365,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
* structure; for other data types, offset is not used (specify -1)
|
* structure; for other data types, offset is not used (specify -1)
|
||||||
* @param resolvedDataTypes hashtable used for resolving the data type
|
* @param resolvedDataTypes hashtable used for resolving the data type
|
||||||
*/
|
*/
|
||||||
public void add(int index, LongObjectHashtable<DataType> resolvedDataTypes) {
|
public void add(int index, Map<Long, DataType> resolvedDataTypes) {
|
||||||
if (map == null) {
|
if (map == null) {
|
||||||
map = new HashMap<>();
|
map = new HashMap<>();
|
||||||
}
|
}
|
||||||
|
@ -3383,10 +3384,10 @@ public class DataTypeMergeManager implements MergeResolver {
|
||||||
if (map == null) {
|
if (map == null) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
Set<LongObjectHashtable<DataType>> keySet = map.keySet();
|
Set<Map<Long, DataType>> keySet = map.keySet();
|
||||||
Iterator<LongObjectHashtable<DataType>> iterator = keySet.iterator();
|
Iterator<Map<Long, DataType>> iterator = keySet.iterator();
|
||||||
while (iterator.hasNext()) {
|
while (iterator.hasNext()) {
|
||||||
LongObjectHashtable<DataType> ht = iterator.next();
|
Map<Long, DataType> ht = iterator.next();
|
||||||
DataType dt = ht.get(id);
|
DataType dt = ht.get(id);
|
||||||
if (dt instanceof Composite) {
|
if (dt instanceof Composite) {
|
||||||
int[] indexArray = map.get(ht);
|
int[] indexArray = map.get(ht);
|
||||||
|
|
|
@ -39,7 +39,6 @@ import ghidra.program.model.symbol.*;
|
||||||
import ghidra.program.util.DiffUtility;
|
import ghidra.program.util.DiffUtility;
|
||||||
import ghidra.program.util.ProgramMerge;
|
import ghidra.program.util.ProgramMerge;
|
||||||
import ghidra.util.*;
|
import ghidra.util.*;
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
|
||||||
import ghidra.util.datastruct.ObjectIntHashtable;
|
import ghidra.util.datastruct.ObjectIntHashtable;
|
||||||
import ghidra.util.exception.*;
|
import ghidra.util.exception.*;
|
||||||
import ghidra.util.task.TaskMonitor;
|
import ghidra.util.task.TaskMonitor;
|
||||||
|
@ -122,9 +121,9 @@ abstract class AbstractFunctionMerger implements ListingMergeConstants {
|
||||||
|
|
||||||
protected AddressFactory resultAddressFactory;
|
protected AddressFactory resultAddressFactory;
|
||||||
|
|
||||||
protected LongObjectHashtable<DataType> latestResolvedDts; // maps data type ID -> resolved Data type
|
protected Map<Long, DataType> latestResolvedDts; // maps data type ID -> resolved Data type
|
||||||
protected LongObjectHashtable<DataType> myResolvedDts; // maps data type ID -> resolved Data type
|
protected Map<Long, DataType> myResolvedDts; // maps data type ID -> resolved Data type
|
||||||
protected LongObjectHashtable<DataType> origResolvedDts;
|
protected Map<Long, DataType> origResolvedDts;
|
||||||
|
|
||||||
// mergePanel is a panel for listing merge conflicts.
|
// mergePanel is a panel for listing merge conflicts.
|
||||||
// listings in CENTER, conflictInfoPanel in NORTH, mergeConflicts in SOUTH.
|
// listings in CENTER, conflictInfoPanel in NORTH, mergeConflicts in SOUTH.
|
||||||
|
@ -267,7 +266,8 @@ abstract class AbstractFunctionMerger implements ListingMergeConstants {
|
||||||
// See if both changed to same value.
|
// See if both changed to same value.
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case FUNC_RETURN_ADDRESS_OFFSET:
|
case FUNC_RETURN_ADDRESS_OFFSET:
|
||||||
return (latestStack.getReturnAddressOffset() == myStack.getReturnAddressOffset())
|
return (latestStack.getReturnAddressOffset() == myStack
|
||||||
|
.getReturnAddressOffset())
|
||||||
? 0
|
? 0
|
||||||
: type;
|
: type;
|
||||||
// For now, we are not allowing you to set the parameter offset or local size outright.
|
// For now, we are not allowing you to set the parameter offset or local size outright.
|
||||||
|
@ -277,7 +277,8 @@ abstract class AbstractFunctionMerger implements ListingMergeConstants {
|
||||||
// case FUNC_LOCAL_SIZE:
|
// case FUNC_LOCAL_SIZE:
|
||||||
// return (latestStack.getLocalSize() == myStack.getLocalSize()) ? 0 : type;
|
// return (latestStack.getLocalSize() == myStack.getLocalSize()) ? 0 : type;
|
||||||
case FUNC_STACK_PURGE_SIZE:
|
case FUNC_STACK_PURGE_SIZE:
|
||||||
return (functions[LATEST].getStackPurgeSize() == functions[MY].getStackPurgeSize())
|
return (functions[LATEST].getStackPurgeSize() == functions[MY]
|
||||||
|
.getStackPurgeSize())
|
||||||
? 0
|
? 0
|
||||||
: type;
|
: type;
|
||||||
case FUNC_NAME:
|
case FUNC_NAME:
|
||||||
|
@ -292,10 +293,12 @@ abstract class AbstractFunctionMerger implements ListingMergeConstants {
|
||||||
// return (functions[LATEST].hasCustomVariableStorage() == functions[MY].hasCustomVariableStorage()) ? 0
|
// return (functions[LATEST].hasCustomVariableStorage() == functions[MY].hasCustomVariableStorage()) ? 0
|
||||||
// : type;
|
// : type;
|
||||||
case FUNC_CALLING_CONVENTION:
|
case FUNC_CALLING_CONVENTION:
|
||||||
return (functions[LATEST].getCallingConventionName().equals(
|
return (functions[LATEST].getCallingConventionName()
|
||||||
|
.equals(
|
||||||
functions[MY].getCallingConventionName())) ? 0 : type;
|
functions[MY].getCallingConventionName())) ? 0 : type;
|
||||||
case FUNC_SIGNATURE_SOURCE:
|
case FUNC_SIGNATURE_SOURCE:
|
||||||
return (functions[LATEST].getSignatureSource() == functions[MY].getSignatureSource())
|
return (functions[LATEST].getSignatureSource() == functions[MY]
|
||||||
|
.getSignatureSource())
|
||||||
? 0
|
? 0
|
||||||
: type;
|
: type;
|
||||||
default:
|
default:
|
||||||
|
|
|
@ -17,6 +17,7 @@ package ghidra.app.merge.listing;
|
||||||
|
|
||||||
import java.lang.reflect.InvocationTargetException;
|
import java.lang.reflect.InvocationTargetException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
import javax.swing.SwingUtilities;
|
import javax.swing.SwingUtilities;
|
||||||
import javax.swing.event.ChangeEvent;
|
import javax.swing.event.ChangeEvent;
|
||||||
|
@ -35,7 +36,6 @@ import ghidra.program.model.mem.MemoryAccessException;
|
||||||
import ghidra.program.model.util.CodeUnitInsertionException;
|
import ghidra.program.model.util.CodeUnitInsertionException;
|
||||||
import ghidra.program.util.*;
|
import ghidra.program.util.*;
|
||||||
import ghidra.util.Msg;
|
import ghidra.util.Msg;
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
|
||||||
import ghidra.util.exception.CancelledException;
|
import ghidra.util.exception.CancelledException;
|
||||||
import ghidra.util.exception.NotYetImplementedException;
|
import ghidra.util.exception.NotYetImplementedException;
|
||||||
import ghidra.util.task.TaskMonitor;
|
import ghidra.util.task.TaskMonitor;
|
||||||
|
@ -100,8 +100,8 @@ class CodeUnitMerger extends AbstractListingMerger {
|
||||||
ProgramMerge mergeLatest;
|
ProgramMerge mergeLatest;
|
||||||
ProgramMerge mergeOriginal;
|
ProgramMerge mergeOriginal;
|
||||||
|
|
||||||
private LongObjectHashtable<DataType> myResolvedDts; // maps data type ID -> resolved Data type
|
private Map<Long, DataType> myResolvedDts; // maps data type ID -> resolved Data type
|
||||||
private LongObjectHashtable<DataType> origResolvedDts;
|
private Map<Long, DataType> origResolvedDts;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Manages code unit changes and conflicts between the latest versioned
|
* Manages code unit changes and conflicts between the latest versioned
|
||||||
|
@ -147,9 +147,9 @@ class CodeUnitMerger extends AbstractListingMerger {
|
||||||
mergeLatest = listingMergeMgr.mergeLatest;
|
mergeLatest = listingMergeMgr.mergeLatest;
|
||||||
mergeOriginal = listingMergeMgr.mergeOriginal;
|
mergeOriginal = listingMergeMgr.mergeOriginal;
|
||||||
|
|
||||||
myResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
myResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||||
MergeConstants.RESOLVED_MY_DTS);
|
MergeConstants.RESOLVED_MY_DTS);
|
||||||
origResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
origResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||||
MergeConstants.RESOLVED_ORIGINAL_DTS);
|
MergeConstants.RESOLVED_ORIGINAL_DTS);
|
||||||
|
|
||||||
mergedCodeUnits = new AddressSet();
|
mergedCodeUnits = new AddressSet();
|
||||||
|
|
|
@ -36,7 +36,8 @@ import ghidra.program.model.mem.MemoryAccessException;
|
||||||
import ghidra.program.model.symbol.*;
|
import ghidra.program.model.symbol.*;
|
||||||
import ghidra.program.util.*;
|
import ghidra.program.util.*;
|
||||||
import ghidra.util.*;
|
import ghidra.util.*;
|
||||||
import ghidra.util.datastruct.*;
|
import ghidra.util.datastruct.LongLongHashtable;
|
||||||
|
import ghidra.util.datastruct.ObjectIntHashtable;
|
||||||
import ghidra.util.exception.*;
|
import ghidra.util.exception.*;
|
||||||
import ghidra.util.task.TaskMonitor;
|
import ghidra.util.task.TaskMonitor;
|
||||||
|
|
||||||
|
@ -454,11 +455,11 @@ public class ExternalFunctionMerger extends AbstractFunctionMerger implements Li
|
||||||
throws ProgramConflictException, MemoryAccessException, CancelledException {
|
throws ProgramConflictException, MemoryAccessException, CancelledException {
|
||||||
|
|
||||||
if (mergeManager != null) {
|
if (mergeManager != null) {
|
||||||
latestResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
latestResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||||
MergeConstants.RESOLVED_LATEST_DTS);
|
MergeConstants.RESOLVED_LATEST_DTS);
|
||||||
myResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
myResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||||
MergeConstants.RESOLVED_MY_DTS);
|
MergeConstants.RESOLVED_MY_DTS);
|
||||||
origResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
origResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||||
MergeConstants.RESOLVED_ORIGINAL_DTS);
|
MergeConstants.RESOLVED_ORIGINAL_DTS);
|
||||||
|
|
||||||
latestResolvedSymbols = (LongLongHashtable) mergeManager.getResolveInformation(
|
latestResolvedSymbols = (LongLongHashtable) mergeManager.getResolveInformation(
|
||||||
|
@ -1816,7 +1817,8 @@ public class ExternalFunctionMerger extends AbstractFunctionMerger implements Li
|
||||||
}
|
}
|
||||||
if (originalImportedName != null) {
|
if (originalImportedName != null) {
|
||||||
try {
|
try {
|
||||||
resultExternalLocation.getSymbol().setNameAndNamespace(externalLocation.getLabel(),
|
resultExternalLocation.getSymbol()
|
||||||
|
.setNameAndNamespace(externalLocation.getLabel(),
|
||||||
resolvedNamespace, externalLocation.getSource());
|
resolvedNamespace, externalLocation.getSource());
|
||||||
}
|
}
|
||||||
catch (CircularDependencyException e) {
|
catch (CircularDependencyException e) {
|
||||||
|
@ -2141,13 +2143,15 @@ public class ExternalFunctionMerger extends AbstractFunctionMerger implements Li
|
||||||
}
|
}
|
||||||
// If we have a function variable storage choice then a "Use For All" has already occurred.
|
// If we have a function variable storage choice then a "Use For All" has already occurred.
|
||||||
if (variableStorageChoice != ASK_USER) {
|
if (variableStorageChoice != ASK_USER) {
|
||||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts.getOverlappingVariables()) {
|
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts
|
||||||
|
.getOverlappingVariables()) {
|
||||||
monitor.checkCanceled();
|
monitor.checkCanceled();
|
||||||
mergeVariableStorage(functions, pair, variableStorageChoice, monitor);
|
mergeVariableStorage(functions, pair, variableStorageChoice, monitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (askUser && mergeManager != null) {
|
else if (askUser && mergeManager != null) {
|
||||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts.getOverlappingVariables()) {
|
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts
|
||||||
|
.getOverlappingVariables()) {
|
||||||
monitor.checkCanceled();
|
monitor.checkCanceled();
|
||||||
boolean useForAll = (variableStorageChoice != ASK_USER);
|
boolean useForAll = (variableStorageChoice != ASK_USER);
|
||||||
if (useForAll) {
|
if (useForAll) {
|
||||||
|
@ -2164,7 +2168,8 @@ public class ExternalFunctionMerger extends AbstractFunctionMerger implements Li
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts.getOverlappingVariables()) {
|
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts
|
||||||
|
.getOverlappingVariables()) {
|
||||||
monitor.checkCanceled();
|
monitor.checkCanceled();
|
||||||
mergeVariableStorage(functions, pair, currentConflictOption, monitor);
|
mergeVariableStorage(functions, pair, currentConflictOption, monitor);
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,6 @@ import ghidra.program.model.symbol.Namespace;
|
||||||
import ghidra.program.util.*;
|
import ghidra.program.util.*;
|
||||||
import ghidra.util.Msg;
|
import ghidra.util.Msg;
|
||||||
import ghidra.util.SystemUtilities;
|
import ghidra.util.SystemUtilities;
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
|
||||||
import ghidra.util.exception.*;
|
import ghidra.util.exception.*;
|
||||||
import ghidra.util.task.TaskMonitor;
|
import ghidra.util.task.TaskMonitor;
|
||||||
|
|
||||||
|
@ -213,11 +212,11 @@ class FunctionMerger extends AbstractFunctionMerger implements ListingMerger {
|
||||||
public void autoMerge(int progressMin, int progressMax, TaskMonitor monitor)
|
public void autoMerge(int progressMin, int progressMax, TaskMonitor monitor)
|
||||||
throws ProgramConflictException, MemoryAccessException, CancelledException {
|
throws ProgramConflictException, MemoryAccessException, CancelledException {
|
||||||
|
|
||||||
latestResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
latestResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||||
MergeConstants.RESOLVED_LATEST_DTS);
|
MergeConstants.RESOLVED_LATEST_DTS);
|
||||||
myResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
myResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||||
MergeConstants.RESOLVED_MY_DTS);
|
MergeConstants.RESOLVED_MY_DTS);
|
||||||
origResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
origResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||||
MergeConstants.RESOLVED_ORIGINAL_DTS);
|
MergeConstants.RESOLVED_ORIGINAL_DTS);
|
||||||
|
|
||||||
initializeAutoMerge("Auto-merging Functions and determining conflicts.", progressMin,
|
initializeAutoMerge("Auto-merging Functions and determining conflicts.", progressMin,
|
||||||
|
@ -999,13 +998,15 @@ class FunctionMerger extends AbstractFunctionMerger implements ListingMerger {
|
||||||
}
|
}
|
||||||
// If we have a function variable storage choice then a "Use For All" has already occurred.
|
// If we have a function variable storage choice then a "Use For All" has already occurred.
|
||||||
if (variableStorageChoice != ASK_USER) {
|
if (variableStorageChoice != ASK_USER) {
|
||||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts.getOverlappingVariables()) {
|
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts
|
||||||
|
.getOverlappingVariables()) {
|
||||||
monitor.checkCanceled();
|
monitor.checkCanceled();
|
||||||
mergeVariableStorage(addr, pair, variableStorageChoice, monitor);
|
mergeVariableStorage(addr, pair, variableStorageChoice, monitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (askUser && mergeManager != null) {
|
else if (askUser && mergeManager != null) {
|
||||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts.getOverlappingVariables()) {
|
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts
|
||||||
|
.getOverlappingVariables()) {
|
||||||
monitor.checkCanceled();
|
monitor.checkCanceled();
|
||||||
boolean useForAll = (variableStorageChoice != ASK_USER);
|
boolean useForAll = (variableStorageChoice != ASK_USER);
|
||||||
if (useForAll) {
|
if (useForAll) {
|
||||||
|
@ -1022,7 +1023,8 @@ class FunctionMerger extends AbstractFunctionMerger implements ListingMerger {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts.getOverlappingVariables()) {
|
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts
|
||||||
|
.getOverlappingVariables()) {
|
||||||
monitor.checkCanceled();
|
monitor.checkCanceled();
|
||||||
mergeVariableStorage(addr, pair, currentConflictOption, monitor);
|
mergeVariableStorage(addr, pair, currentConflictOption, monitor);
|
||||||
}
|
}
|
||||||
|
@ -1355,7 +1357,9 @@ class FunctionMerger extends AbstractFunctionMerger implements ListingMerger {
|
||||||
protected void mergeParameters(Address entryPtAddress, int chosenConflictOption,
|
protected void mergeParameters(Address entryPtAddress, int chosenConflictOption,
|
||||||
TaskMonitor monitor) {
|
TaskMonitor monitor) {
|
||||||
Function resultFunction =
|
Function resultFunction =
|
||||||
listingMergeManager.mergeLatest.getResultProgram().getFunctionManager().getFunctionAt(
|
listingMergeManager.mergeLatest.getResultProgram()
|
||||||
|
.getFunctionManager()
|
||||||
|
.getFunctionAt(
|
||||||
entryPtAddress);
|
entryPtAddress);
|
||||||
if (resultFunction == null) {
|
if (resultFunction == null) {
|
||||||
return;
|
return;
|
||||||
|
@ -1878,7 +1882,8 @@ class FunctionMerger extends AbstractFunctionMerger implements ListingMerger {
|
||||||
if (function1.getStackPurgeSize() != function2.getStackPurgeSize()) {
|
if (function1.getStackPurgeSize() != function2.getStackPurgeSize()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (function1.getStackFrame().getReturnAddressOffset() != function2.getStackFrame().getReturnAddressOffset()) {
|
if (function1.getStackFrame().getReturnAddressOffset() != function2.getStackFrame()
|
||||||
|
.getReturnAddressOffset()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (!function1.getCallingConventionName().equals(function2.getCallingConventionName())) {
|
if (!function1.getCallingConventionName().equals(function2.getCallingConventionName())) {
|
||||||
|
|
|
@ -15,16 +15,14 @@
|
||||||
*/
|
*/
|
||||||
package ghidra.app.plugin.match;
|
package ghidra.app.plugin.match;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
import ghidra.program.model.address.Address;
|
import ghidra.program.model.address.Address;
|
||||||
import ghidra.program.model.address.AddressSetView;
|
import ghidra.program.model.address.AddressSetView;
|
||||||
import ghidra.program.model.listing.*;
|
import ghidra.program.model.listing.*;
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
|
||||||
import ghidra.util.exception.CancelledException;
|
import ghidra.util.exception.CancelledException;
|
||||||
import ghidra.util.task.TaskMonitor;
|
import ghidra.util.task.TaskMonitor;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class does the work of matching subroutines. Every subroutine
|
* This class does the work of matching subroutines. Every subroutine
|
||||||
* in the current program is hashed and the start address is put into a
|
* in the current program is hashed and the start address is put into a
|
||||||
|
@ -44,12 +42,13 @@ public class MatchFunctions {
|
||||||
boolean includeOneToOne, boolean includeNonOneToOne, FunctionHasher hasher,
|
boolean includeOneToOne, boolean includeNonOneToOne, FunctionHasher hasher,
|
||||||
TaskMonitor monitor) throws CancelledException {
|
TaskMonitor monitor) throws CancelledException {
|
||||||
|
|
||||||
LongObjectHashtable<Match> functionHashes = new LongObjectHashtable<Match>();
|
Map<Long, Match> functionHashes = new HashMap<>();
|
||||||
List<MatchedFunctions> functionMatches = new ArrayList<MatchedFunctions>();
|
List<MatchedFunctions> functionMatches = new ArrayList<MatchedFunctions>();
|
||||||
FunctionIterator aProgfIter = aProgram.getFunctionManager().getFunctions(setA, true);
|
FunctionIterator aProgfIter = aProgram.getFunctionManager().getFunctions(setA, true);
|
||||||
FunctionIterator bProgfIter = bProgram.getFunctionManager().getFunctions(setB, true);
|
FunctionIterator bProgfIter = bProgram.getFunctionManager().getFunctions(setB, true);
|
||||||
monitor.setIndeterminate(false);
|
monitor.setIndeterminate(false);
|
||||||
monitor.initialize(2 * (aProgram.getFunctionManager().getFunctionCount() + bProgram.getFunctionManager().getFunctionCount()));
|
monitor.initialize(2 * (aProgram.getFunctionManager().getFunctionCount() +
|
||||||
|
bProgram.getFunctionManager().getFunctionCount()));
|
||||||
monitor.setMessage("Hashing functions in " + aProgram.getName());
|
monitor.setMessage("Hashing functions in " + aProgram.getName());
|
||||||
|
|
||||||
// Hash functions in program A
|
// Hash functions in program A
|
||||||
|
@ -73,17 +72,15 @@ public class MatchFunctions {
|
||||||
|
|
||||||
//Find the remaining hash matches ---> unique code match left and THERE is no symbol that matches
|
//Find the remaining hash matches ---> unique code match left and THERE is no symbol that matches
|
||||||
//in the other program.
|
//in the other program.
|
||||||
long[] keys = functionHashes.getKeys();
|
|
||||||
final long progress = monitor.getProgress();
|
final long progress = monitor.getProgress();
|
||||||
monitor.setMaximum(progress + keys.length);
|
monitor.setMaximum(progress + functionHashes.size());
|
||||||
monitor.setProgress(progress);
|
monitor.setProgress(progress);
|
||||||
monitor.setMessage("Finding function matches");
|
monitor.setMessage("Finding function matches");
|
||||||
for (int i = 0; i < keys.length; i++) {
|
for (Match match : functionHashes.values()) {
|
||||||
monitor.incrementProgress(1);
|
monitor.incrementProgress(1);
|
||||||
if (monitor.isCancelled()) {
|
if (monitor.isCancelled()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
Match match = functionHashes.get(keys[i]);
|
|
||||||
ArrayList<Address> aProgAddrs = match.aAddresses;
|
ArrayList<Address> aProgAddrs = match.aAddresses;
|
||||||
ArrayList<Address> bProgAddrs = match.bAddresses;
|
ArrayList<Address> bProgAddrs = match.bAddresses;
|
||||||
if ((includeOneToOne && aProgAddrs.size() == 1 && bProgAddrs.size() == 1) ||
|
if ((includeOneToOne && aProgAddrs.size() == 1 && bProgAddrs.size() == 1) ||
|
||||||
|
@ -103,15 +100,17 @@ public class MatchFunctions {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<MatchedFunctions> matchOneFunction(Program aProgram, Address aEntryPoint,
|
public static List<MatchedFunctions> matchOneFunction(Program aProgram, Address aEntryPoint,
|
||||||
Program bProgram, FunctionHasher hasher, TaskMonitor monitor) throws CancelledException {
|
Program bProgram, FunctionHasher hasher, TaskMonitor monitor)
|
||||||
|
throws CancelledException {
|
||||||
return matchOneFunction(aProgram, aEntryPoint, bProgram, null, hasher, monitor);
|
return matchOneFunction(aProgram, aEntryPoint, bProgram, null, hasher, monitor);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Finds all matches in program B to the function in Program A
|
// Finds all matches in program B to the function in Program A
|
||||||
public static List<MatchedFunctions> matchOneFunction(Program aProgram, Address aEntryPoint,
|
public static List<MatchedFunctions> matchOneFunction(Program aProgram, Address aEntryPoint,
|
||||||
Program bProgram, AddressSetView bAddressSet, FunctionHasher hasher, TaskMonitor monitor)
|
Program bProgram, AddressSetView bAddressSet, FunctionHasher hasher,
|
||||||
|
TaskMonitor monitor)
|
||||||
throws CancelledException {
|
throws CancelledException {
|
||||||
LongObjectHashtable<Match> functionHashes = new LongObjectHashtable<Match>();
|
Map<Long, Match> functionHashes = new HashMap<>();
|
||||||
List<MatchedFunctions> functionMatches = new ArrayList<MatchedFunctions>();
|
List<MatchedFunctions> functionMatches = new ArrayList<MatchedFunctions>();
|
||||||
|
|
||||||
Function aFunc = aProgram.getFunctionManager().getFunctionContaining(aEntryPoint);
|
Function aFunc = aProgram.getFunctionManager().getFunctionContaining(aEntryPoint);
|
||||||
|
@ -131,12 +130,12 @@ public class MatchFunctions {
|
||||||
|
|
||||||
//Find the remaining hash matches ---> unique code match left and THERE is no symbol that matches
|
//Find the remaining hash matches ---> unique code match left and THERE is no symbol that matches
|
||||||
//in the other program.
|
//in the other program.
|
||||||
long[] keys = functionHashes.getKeys();
|
List<Long> keys = new ArrayList<>(functionHashes.keySet());
|
||||||
for (int i = 0; i < keys.length; i++) {
|
for (long key : keys) {
|
||||||
if (monitor.isCancelled()) {
|
if (monitor.isCancelled()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
Match match = functionHashes.get(keys[i]);
|
Match match = functionHashes.get(key);
|
||||||
ArrayList<Address> aProgAddrs = match.aAddresses;
|
ArrayList<Address> aProgAddrs = match.aAddresses;
|
||||||
ArrayList<Address> bProgAddrs = match.bAddresses;
|
ArrayList<Address> bProgAddrs = match.bAddresses;
|
||||||
|
|
||||||
|
@ -149,7 +148,7 @@ public class MatchFunctions {
|
||||||
"Code Only Match");
|
"Code Only Match");
|
||||||
functionMatches.add(functionMatch);
|
functionMatches.add(functionMatch);
|
||||||
}
|
}
|
||||||
functionHashes.remove(keys[i]);
|
functionHashes.remove(key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -157,7 +156,7 @@ public class MatchFunctions {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void hashFunction(TaskMonitor monitor,
|
private static void hashFunction(TaskMonitor monitor,
|
||||||
LongObjectHashtable<Match> functionHashes, Function function, FunctionHasher hasher,
|
Map<Long, Match> functionHashes, Function function, FunctionHasher hasher,
|
||||||
boolean isProgA) throws CancelledException {
|
boolean isProgA) throws CancelledException {
|
||||||
|
|
||||||
long hash = hasher.hash(function, monitor);
|
long hash = hasher.hash(function, monitor);
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
/* ###
|
/* ###
|
||||||
* IP: GHIDRA
|
* IP: GHIDRA
|
||||||
* REVIEWED: YES
|
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
|
@ -16,13 +15,13 @@
|
||||||
*/
|
*/
|
||||||
package ghidra.server.remote;
|
package ghidra.server.remote;
|
||||||
|
|
||||||
import ghidra.util.Msg;
|
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.net.ServerSocket;
|
import java.net.ServerSocket;
|
||||||
import java.net.Socket;
|
import java.net.Socket;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
|
||||||
|
import ghidra.util.Msg;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <code>RMIClassServer</code> provides a server for serializing classes to an
|
* <code>RMIClassServer</code> provides a server for serializing classes to an
|
||||||
* RMI client as needed. This implementation starts a new listener thread each
|
* RMI client as needed. This implementation starts a new listener thread each
|
||||||
|
|
|
@ -479,7 +479,7 @@ public class AssociationDatabaseManager implements VTAssociationManager {
|
||||||
|
|
||||||
if (status == ACCEPTED) {
|
if (status == ACCEPTED) {
|
||||||
associationDB.setStatus(AVAILABLE);
|
associationDB.setStatus(AVAILABLE);
|
||||||
associationCache.invalidate(associationDB.getKey());
|
associationDB.setInvalid();
|
||||||
unblockRelatedAssociations(associationDB);
|
unblockRelatedAssociations(associationDB);
|
||||||
for (AssociationHook hook : associationHooks) {
|
for (AssociationHook hook : associationHooks) {
|
||||||
hook.associationCleared(associationDB);
|
hook.associationCleared(associationDB);
|
||||||
|
@ -545,7 +545,7 @@ public class AssociationDatabaseManager implements VTAssociationManager {
|
||||||
case AVAILABLE:
|
case AVAILABLE:
|
||||||
throw new AssertException("Attempted to unblock a non-blocked association!");
|
throw new AssertException("Attempted to unblock a non-blocked association!");
|
||||||
case BLOCKED:
|
case BLOCKED:
|
||||||
associationCache.invalidate(relatedAssociation.getKey());
|
relatedAssociation.setInvalid();
|
||||||
relatedAssociation.setStatus(computeBlockedStatus(relatedAssociation));
|
relatedAssociation.setStatus(computeBlockedStatus(relatedAssociation));
|
||||||
break;
|
break;
|
||||||
case REJECTED:
|
case REJECTED:
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.jdom.input.SAXBuilder;
|
||||||
import org.jdom.output.XMLOutputter;
|
import org.jdom.output.XMLOutputter;
|
||||||
|
|
||||||
import ghidra.framework.store.*;
|
import ghidra.framework.store.*;
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
|
||||||
import ghidra.util.xml.GenericXMLOutputter;
|
import ghidra.util.xml.GenericXMLOutputter;
|
||||||
import ghidra.util.xml.XmlUtilities;
|
import ghidra.util.xml.XmlUtilities;
|
||||||
|
|
||||||
|
@ -40,7 +39,7 @@ class CheckoutManager {
|
||||||
private long nextCheckoutId = 1;
|
private long nextCheckoutId = 1;
|
||||||
|
|
||||||
// checkouts maps long checkoutId to ItemCheckoutStatus objects
|
// checkouts maps long checkoutId to ItemCheckoutStatus objects
|
||||||
private LongObjectHashtable<ItemCheckoutStatus> checkouts;
|
private Map<Long, ItemCheckoutStatus> checkouts;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor.
|
* Constructor.
|
||||||
|
@ -53,7 +52,7 @@ class CheckoutManager {
|
||||||
CheckoutManager(LocalFolderItem item, boolean create) throws IOException {
|
CheckoutManager(LocalFolderItem item, boolean create) throws IOException {
|
||||||
this.item = item;
|
this.item = item;
|
||||||
if (create) {
|
if (create) {
|
||||||
checkouts = new LongObjectHashtable<ItemCheckoutStatus>();
|
checkouts = new HashMap<>();
|
||||||
writeCheckoutsFile();
|
writeCheckoutsFile();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -164,8 +163,7 @@ class CheckoutManager {
|
||||||
*/
|
*/
|
||||||
synchronized boolean isCheckedOut(int version) throws IOException {
|
synchronized boolean isCheckedOut(int version) throws IOException {
|
||||||
validate();
|
validate();
|
||||||
long[] ids = checkouts.getKeys();
|
for (long id : checkouts.keySet()) {
|
||||||
for (long id : ids) {
|
|
||||||
ItemCheckoutStatus coStatus = checkouts.get(id);
|
ItemCheckoutStatus coStatus = checkouts.get(id);
|
||||||
if (coStatus.getCheckoutVersion() == version) {
|
if (coStatus.getCheckoutVersion() == version) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -199,13 +197,9 @@ class CheckoutManager {
|
||||||
*/
|
*/
|
||||||
synchronized ItemCheckoutStatus[] getAllCheckouts() throws IOException {
|
synchronized ItemCheckoutStatus[] getAllCheckouts() throws IOException {
|
||||||
validate();
|
validate();
|
||||||
long[] ids = checkouts.getKeys();
|
List<ItemCheckoutStatus> list = new ArrayList<>(checkouts.values());
|
||||||
Arrays.sort(ids);
|
Collections.sort(list, (a, b) -> (int) (a.getCheckoutId() - b.getCheckoutId()));
|
||||||
ItemCheckoutStatus[] list = new ItemCheckoutStatus[ids.length];
|
return list.toArray(new ItemCheckoutStatus[list.size()]);
|
||||||
for (int i = 0; i < ids.length; i++) {
|
|
||||||
list[i] = checkouts.get(ids[i]);
|
|
||||||
}
|
|
||||||
return list;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -219,7 +213,6 @@ class CheckoutManager {
|
||||||
checkouts = null;
|
checkouts = null;
|
||||||
}
|
}
|
||||||
if (checkouts == null) {
|
if (checkouts == null) {
|
||||||
LongObjectHashtable<ItemCheckoutStatus> oldCheckouts = checkouts;
|
|
||||||
long oldNextCheckoutId = nextCheckoutId;
|
long oldNextCheckoutId = nextCheckoutId;
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
try {
|
try {
|
||||||
|
@ -229,7 +222,7 @@ class CheckoutManager {
|
||||||
finally {
|
finally {
|
||||||
if (!success) {
|
if (!success) {
|
||||||
nextCheckoutId = oldNextCheckoutId;
|
nextCheckoutId = oldNextCheckoutId;
|
||||||
checkouts = oldCheckouts;
|
checkouts = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -243,7 +236,7 @@ class CheckoutManager {
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private void readCheckoutsFile() throws IOException {
|
private void readCheckoutsFile() throws IOException {
|
||||||
|
|
||||||
checkouts = new LongObjectHashtable<ItemCheckoutStatus>();
|
checkouts = new HashMap<>();
|
||||||
|
|
||||||
File checkoutsFile = getCheckoutsFile();
|
File checkoutsFile = getCheckoutsFile();
|
||||||
if (!checkoutsFile.exists()) {
|
if (!checkoutsFile.exists()) {
|
||||||
|
@ -317,8 +310,7 @@ class CheckoutManager {
|
||||||
Element root = new Element("CHECKOUT_LIST");
|
Element root = new Element("CHECKOUT_LIST");
|
||||||
root.setAttribute("NEXT_ID", Long.toString(nextCheckoutId));
|
root.setAttribute("NEXT_ID", Long.toString(nextCheckoutId));
|
||||||
|
|
||||||
long[] ids = checkouts.getKeys();
|
for (long id : checkouts.keySet()) {
|
||||||
for (long id : ids) {
|
|
||||||
ItemCheckoutStatus coStatus = checkouts.get(id);
|
ItemCheckoutStatus coStatus = checkouts.get(id);
|
||||||
// TRANSIENT checkout data must not be persisted - the existence
|
// TRANSIENT checkout data must not be persisted - the existence
|
||||||
// of such checkouts is retained in-memory only
|
// of such checkouts is retained in-memory only
|
||||||
|
|
|
@ -15,10 +15,9 @@
|
||||||
*/
|
*/
|
||||||
package ghidra.util.datastruct;
|
package ghidra.util.datastruct;
|
||||||
|
|
||||||
|
|
||||||
import java.lang.ref.ReferenceQueue;
|
import java.lang.ref.ReferenceQueue;
|
||||||
import java.lang.ref.WeakReference;
|
import java.lang.ref.WeakReference;
|
||||||
import java.util.LinkedList;
|
import java.util.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <code>ObjectClass</code> provides a fixed-size long-key-based object cache.
|
* <code>ObjectClass</code> provides a fixed-size long-key-based object cache.
|
||||||
|
@ -31,7 +30,7 @@ import java.util.LinkedList;
|
||||||
*/
|
*/
|
||||||
public class ObjectCache {
|
public class ObjectCache {
|
||||||
|
|
||||||
private LongObjectHashtable<Object> hashTable;
|
private Map<Long, KeyedSoftReference<?>> hashTable;
|
||||||
private ReferenceQueue<Object> refQueue;
|
private ReferenceQueue<Object> refQueue;
|
||||||
private LinkedList<Object> hardCache;
|
private LinkedList<Object> hardCache;
|
||||||
private int hardCacheSize;
|
private int hardCacheSize;
|
||||||
|
@ -42,7 +41,7 @@ public class ObjectCache {
|
||||||
*/
|
*/
|
||||||
public ObjectCache(int hardCacheSize) {
|
public ObjectCache(int hardCacheSize) {
|
||||||
this.hardCacheSize = hardCacheSize;
|
this.hardCacheSize = hardCacheSize;
|
||||||
hashTable = new LongObjectHashtable<>();
|
hashTable = new HashMap<>();
|
||||||
refQueue = new ReferenceQueue<Object>();
|
refQueue = new ReferenceQueue<Object>();
|
||||||
hardCache = new LinkedList<Object>();
|
hardCache = new LinkedList<Object>();
|
||||||
}
|
}
|
||||||
|
@ -54,7 +53,7 @@ public class ObjectCache {
|
||||||
*/
|
*/
|
||||||
public synchronized boolean contains(long key) {
|
public synchronized boolean contains(long key) {
|
||||||
processQueue();
|
processQueue();
|
||||||
return hashTable.contains(key);
|
return hashTable.containsKey(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -63,7 +62,7 @@ public class ObjectCache {
|
||||||
* @return cached object
|
* @return cached object
|
||||||
*/
|
*/
|
||||||
public synchronized Object get(long key) {
|
public synchronized Object get(long key) {
|
||||||
WeakReference<?> ref = (WeakReference<?>)hashTable.get(key);
|
WeakReference<?> ref = hashTable.get(key);
|
||||||
if (ref != null) {
|
if (ref != null) {
|
||||||
Object obj = ref.get();
|
Object obj = ref.get();
|
||||||
if (obj == null) {
|
if (obj == null) {
|
||||||
|
@ -77,6 +76,7 @@ public class ObjectCache {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the hard cache size
|
* Return the hard cache size
|
||||||
|
* @return the hard cache size
|
||||||
*/
|
*/
|
||||||
public int size() {
|
public int size() {
|
||||||
return hardCacheSize;
|
return hardCacheSize;
|
||||||
|
@ -111,13 +111,11 @@ public class ObjectCache {
|
||||||
*/
|
*/
|
||||||
public synchronized void clear() {
|
public synchronized void clear() {
|
||||||
processQueue();
|
processQueue();
|
||||||
long[] keys = hashTable.getKeys();
|
for (KeyedSoftReference<?> ref : hashTable.values()) {
|
||||||
for ( long element : keys ) {
|
|
||||||
KeyedSoftReference<?> ref = (KeyedSoftReference<?>)hashTable.get(element);
|
|
||||||
ref.clear();
|
ref.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
hashTable.removeAll();
|
hashTable.clear();
|
||||||
refQueue = new ReferenceQueue<Object>();
|
refQueue = new ReferenceQueue<Object>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,12 +132,7 @@ public class ObjectCache {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
long[] keys = hashTable.getKeys();
|
hashTable.keySet().removeIf(key -> (key >= startKey && key <= endKey));
|
||||||
for ( long element : keys ) {
|
|
||||||
if (element >= startKey && element <= endKey) {
|
|
||||||
remove(element);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -150,7 +143,7 @@ public class ObjectCache {
|
||||||
*/
|
*/
|
||||||
public synchronized void remove(long key) {
|
public synchronized void remove(long key) {
|
||||||
processQueue();
|
processQueue();
|
||||||
KeyedSoftReference<?> ref = (KeyedSoftReference<?>)hashTable.get(key);
|
KeyedSoftReference<?> ref = hashTable.get(key);
|
||||||
if (ref != null) {
|
if (ref != null) {
|
||||||
ref.clear();
|
ref.clear();
|
||||||
hashTable.remove(key);
|
hashTable.remove(key);
|
||||||
|
@ -197,6 +190,7 @@ public class ObjectCache {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return object key
|
* Return object key
|
||||||
|
* @return object key
|
||||||
*/
|
*/
|
||||||
long getKey() {
|
long getKey() {
|
||||||
return key;
|
return key;
|
||||||
|
|
|
@ -1,163 +0,0 @@
|
||||||
/* ###
|
|
||||||
* IP: GHIDRA
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package ghidra.util.datastruct;
|
|
||||||
|
|
||||||
import java.lang.ref.ReferenceQueue;
|
|
||||||
import java.lang.ref.SoftReference;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Soft reference cache class that caches objects for long keys. This cache will
|
|
||||||
* store at most "cacheSize" number of entries, but since it uses soft references
|
|
||||||
* for the cached values, those object may be reclaimed.
|
|
||||||
*/
|
|
||||||
|
|
||||||
public class SoftCacheLongKeyMap {
|
|
||||||
private int cacheSize;
|
|
||||||
private ReferenceQueue<Object> refQueue;
|
|
||||||
private Entry head;
|
|
||||||
private LongObjectHashtable<Object> map;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Construct a new SoftCacheLongKeyMap that caches at most cacheSize number of entries
|
|
||||||
* @param cacheSize the max number of entries to cache.
|
|
||||||
*/
|
|
||||||
public SoftCacheLongKeyMap(int cacheSize) {
|
|
||||||
this.cacheSize = Math.max(cacheSize, 10);
|
|
||||||
map = new LongObjectHashtable<>();
|
|
||||||
head = new Entry(0, null);
|
|
||||||
head.nextEntry = head;
|
|
||||||
head.prevEntry = head;
|
|
||||||
refQueue = new ReferenceQueue<>();
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Caches the given value for the given key
|
|
||||||
* @param key the key
|
|
||||||
* @param value the cached value for the given key
|
|
||||||
* @return any previous object that is cached for the given key.
|
|
||||||
*/
|
|
||||||
public Object put(long key, Object value) {
|
|
||||||
processQueue();
|
|
||||||
if (map.size() == cacheSize) {
|
|
||||||
remove(head.nextEntry.key);
|
|
||||||
}
|
|
||||||
Object obj = map.remove(key);
|
|
||||||
Entry entry = new Entry(key, value);
|
|
||||||
head.addBefore(entry);
|
|
||||||
map.put(key, entry);
|
|
||||||
return obj;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the cached value for the given key, if it exists.
|
|
||||||
* @param key the key for which to get a cached value.
|
|
||||||
* @return the object that was cached for that key, or null if none exists.
|
|
||||||
*/
|
|
||||||
public Object get(long key) {
|
|
||||||
processQueue();
|
|
||||||
Entry entry = (Entry)map.get(key);
|
|
||||||
if (entry != null) {
|
|
||||||
entry.delete();
|
|
||||||
head.addBefore(entry);
|
|
||||||
return entry.get();
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the number of items in the cache. Can change from one call to
|
|
||||||
* the next even if no entries were added or deleted.
|
|
||||||
*/
|
|
||||||
public int size() {
|
|
||||||
processQueue();
|
|
||||||
return map.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Removes all entries from the cache
|
|
||||||
*/
|
|
||||||
public void clear() {
|
|
||||||
map.removeAll();
|
|
||||||
refQueue = new ReferenceQueue<>();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns true if the cache is empty. If true, it will remain empty until a new
|
|
||||||
* entry is added. However if false, it may return true even if nothing was removed
|
|
||||||
*/
|
|
||||||
public boolean isEmpty() {
|
|
||||||
processQueue();
|
|
||||||
return map.size() == 0;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns true if the cache currently contains the given key. Not useful since even
|
|
||||||
* if it returns true, there is no guarentee that a get will work after containsKey
|
|
||||||
* returns true.
|
|
||||||
* @param key the Key to check
|
|
||||||
*/
|
|
||||||
public boolean containsKey(long key) {
|
|
||||||
processQueue();
|
|
||||||
return map.contains(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Removes any cached value for the given key.
|
|
||||||
* @param key the key for which to remove cached values.
|
|
||||||
* @return the cached object that was stored for the given key, or null
|
|
||||||
*/
|
|
||||||
public Object remove(long key) {
|
|
||||||
Entry entry = (Entry)map.remove(key);
|
|
||||||
if (entry != null) {
|
|
||||||
entry.delete();
|
|
||||||
return entry.get();
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a list of all current keys.
|
|
||||||
*/
|
|
||||||
public long[] getKeys() {
|
|
||||||
processQueue();
|
|
||||||
return map.getKeys();
|
|
||||||
}
|
|
||||||
private void processQueue() {
|
|
||||||
Entry entry;
|
|
||||||
while((entry = (Entry)refQueue.poll()) != null) {
|
|
||||||
remove(entry.key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class Entry extends SoftReference<Object> {
|
|
||||||
long key;
|
|
||||||
Entry nextEntry;
|
|
||||||
Entry prevEntry;
|
|
||||||
Entry(long key, Object value) {
|
|
||||||
super(value, refQueue);
|
|
||||||
this.key = key;
|
|
||||||
}
|
|
||||||
void addBefore(Entry entry) {
|
|
||||||
entry.nextEntry = this;
|
|
||||||
entry.prevEntry = this.prevEntry;
|
|
||||||
this.prevEntry.nextEntry = entry;
|
|
||||||
this.prevEntry = entry;
|
|
||||||
}
|
|
||||||
void delete() {
|
|
||||||
prevEntry.nextEntry = nextEntry;
|
|
||||||
nextEntry.prevEntry = prevEntry;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -15,8 +15,9 @@
|
||||||
*/
|
*/
|
||||||
package ghidra.util.graph.attributes;
|
package ghidra.util.graph.attributes;
|
||||||
|
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
import java.util.HashMap;
|
||||||
import ghidra.util.exception.NoValueException;
|
import java.util.Map;
|
||||||
|
|
||||||
import ghidra.util.graph.KeyIndexableSet;
|
import ghidra.util.graph.KeyIndexableSet;
|
||||||
import ghidra.util.graph.KeyedObject;
|
import ghidra.util.graph.KeyedObject;
|
||||||
|
|
||||||
|
@ -25,7 +26,7 @@ import ghidra.util.graph.KeyedObject;
|
||||||
*/
|
*/
|
||||||
public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||||
//private Object[] values;
|
//private Object[] values;
|
||||||
private LongObjectHashtable<Object> values;
|
private Map<Long, Object> values;
|
||||||
private static String attributeType = AttributeManager.OBJECT_TYPE;
|
private static String attributeType = AttributeManager.OBJECT_TYPE;
|
||||||
|
|
||||||
/** Constructor.
|
/** Constructor.
|
||||||
|
@ -36,7 +37,7 @@ public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||||
public ObjectAttribute(String name, KeyIndexableSet<T> set) {
|
public ObjectAttribute(String name, KeyIndexableSet<T> set) {
|
||||||
super(name, set);
|
super(name, set);
|
||||||
//this.values = new Object[set.capacity()];
|
//this.values = new Object[set.capacity()];
|
||||||
values = new LongObjectHashtable<Object>();
|
values = new HashMap<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Set the value of this attribute for the specified KeyedObject.
|
/** Set the value of this attribute for the specified KeyedObject.
|
||||||
|
@ -93,7 +94,7 @@ public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||||
/** Removes all assigned values of this attribute. */
|
/** Removes all assigned values of this attribute. */
|
||||||
@Override
|
@Override
|
||||||
public void clear() {
|
public void clear() {
|
||||||
values.removeAll();
|
values.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Return the attribute of the specified KeyedObject as a String.
|
/** Return the attribute of the specified KeyedObject as a String.
|
||||||
|
@ -101,7 +102,7 @@ public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||||
@Override
|
@Override
|
||||||
public String getValueAsString(KeyedObject o) {
|
public String getValueAsString(KeyedObject o) {
|
||||||
Object v;
|
Object v;
|
||||||
if (values.contains(o.key())) {
|
if (values.containsKey(o.key())) {
|
||||||
v = getValue(o);
|
v = getValue(o);
|
||||||
if (v != null) {
|
if (v != null) {
|
||||||
return v.toString();
|
return v.toString();
|
||||||
|
|
|
@ -15,20 +15,17 @@
|
||||||
*/
|
*/
|
||||||
package ghidra.util.graph.attributes;
|
package ghidra.util.graph.attributes;
|
||||||
|
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
import java.util.*;
|
||||||
import ghidra.util.exception.NoValueException;
|
|
||||||
import ghidra.util.graph.KeyIndexableSet;
|
import ghidra.util.graph.KeyIndexableSet;
|
||||||
import ghidra.util.graph.KeyedObject;
|
import ghidra.util.graph.KeyedObject;
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Comparator;
|
|
||||||
|
|
||||||
/** This class provides a storage mechanism for String-valued information about
|
/** This class provides a storage mechanism for String-valued information about
|
||||||
* the elements of a KeyIndexableSet, e.g. the vertices of a DirectedGraph.
|
* the elements of a KeyIndexableSet, e.g. the vertices of a DirectedGraph.
|
||||||
*/
|
*/
|
||||||
public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||||
//private String[] values;
|
//private String[] values;
|
||||||
private ghidra.util.datastruct.LongObjectHashtable values;
|
private Map<Long, String> values;
|
||||||
private static String attributeType = AttributeManager.STRING_TYPE;
|
private static String attributeType = AttributeManager.STRING_TYPE;
|
||||||
|
|
||||||
/** Constructor.
|
/** Constructor.
|
||||||
|
@ -38,7 +35,7 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||||
*/
|
*/
|
||||||
public StringAttribute(String name, KeyIndexableSet<T> set) {
|
public StringAttribute(String name, KeyIndexableSet<T> set) {
|
||||||
super(name, set);
|
super(name, set);
|
||||||
this.values = new LongObjectHashtable(set.capacity());// String[set.capacity()];
|
this.values = new HashMap<>(set.capacity());// String[set.capacity()];
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Set the value of this attribute for the specified KeyedObject.
|
/** Set the value of this attribute for the specified KeyedObject.
|
||||||
|
@ -49,8 +46,9 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||||
* not a member of the owningSet.
|
* not a member of the owningSet.
|
||||||
*/
|
*/
|
||||||
public boolean setValue(T o, String value) {
|
public boolean setValue(T o, String value) {
|
||||||
if (value == null)
|
if (value == null) {
|
||||||
return false;
|
return false;
|
||||||
|
}
|
||||||
if (owningSet().contains(o)) {
|
if (owningSet().contains(o)) {
|
||||||
//values[ owningSet().index( o ) ] = value;
|
//values[ owningSet().index( o ) ] = value;
|
||||||
values.put(o.key(), value);
|
values.put(o.key(), value);
|
||||||
|
@ -66,7 +64,7 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||||
public String getValue(KeyedObject o) //throws NoValueException
|
public String getValue(KeyedObject o) //throws NoValueException
|
||||||
{
|
{
|
||||||
//return values[ owningSet().index( o ) ];
|
//return values[ owningSet().index( o ) ];
|
||||||
return (String) values.get(o.key());
|
return values.get(o.key());
|
||||||
}
|
}
|
||||||
|
|
||||||
// /** Debug printing. */
|
// /** Debug printing. */
|
||||||
|
@ -129,9 +127,10 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||||
else if ((ko1.key() - ko2.key()) > 0) {
|
else if ((ko1.key() - ko2.key()) > 0) {
|
||||||
return +1;
|
return +1;
|
||||||
}
|
}
|
||||||
else
|
else {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
//ko1 is ok, ko2 fails.
|
//ko1 is ok, ko2 fails.
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
@ -144,10 +143,11 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||||
else if ((ko1.key() - ko2.key()) > 0) {
|
else if ((ko1.key() - ko2.key()) > 0) {
|
||||||
return +1;
|
return +1;
|
||||||
}
|
}
|
||||||
else
|
else {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** Return the type of Attribute, i.e. what kind of values does
|
/** Return the type of Attribute, i.e. what kind of values does
|
||||||
* this attribute hold. "Long", "Object", "Double" are examples.
|
* this attribute hold. "Long", "Object", "Double" are examples.
|
||||||
|
@ -160,7 +160,7 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||||
/** Removes all assigned values of this attribute. */
|
/** Removes all assigned values of this attribute. */
|
||||||
@Override
|
@Override
|
||||||
public void clear() {
|
public void clear() {
|
||||||
values.removeAll();
|
values.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Return the attribute of the specified KeyedObject as a String.
|
/** Return the attribute of the specified KeyedObject as a String.
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
/* ###
|
/* ###
|
||||||
* IP: GHIDRA
|
* IP: GHIDRA
|
||||||
* REVIEWED: YES
|
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
|
@ -16,14 +15,15 @@
|
||||||
*/
|
*/
|
||||||
package ghidra.util.prop;
|
package ghidra.util.prop;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
import ghidra.util.LongIterator;
|
import ghidra.util.LongIterator;
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
|
||||||
import ghidra.util.datastruct.NoSuchIndexException;
|
import ghidra.util.datastruct.NoSuchIndexException;
|
||||||
import ghidra.util.exception.AssertException;
|
import ghidra.util.exception.AssertException;
|
||||||
import ghidra.util.exception.NoValueException;
|
import ghidra.util.exception.NoValueException;
|
||||||
|
|
||||||
import java.io.*;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base class for managing properties that are accessed by an index. Property
|
* Base class for managing properties that are accessed by an index. Property
|
||||||
* values are determined by the derived class.
|
* values are determined by the derived class.
|
||||||
|
@ -43,10 +43,9 @@ public abstract class PropertySet implements Serializable {
|
||||||
// the bit is part of the offset
|
// the bit is part of the offset
|
||||||
protected short pageSize; // max elements in each page
|
protected short pageSize; // max elements in each page
|
||||||
protected int numProperties;
|
protected int numProperties;
|
||||||
private LongObjectHashtable<PropertyPage> ht;
|
private Map<Long, PropertyPage> ht;
|
||||||
private Class<?> objectClass;
|
private Class<?> objectClass;
|
||||||
|
|
||||||
|
|
||||||
protected PropertySet(String name, Class<?> objectClass) {
|
protected PropertySet(String name, Class<?> objectClass) {
|
||||||
this(name, DEFAULT_NUMBER_PAGE_BITS, objectClass);
|
this(name, DEFAULT_NUMBER_PAGE_BITS, objectClass);
|
||||||
}
|
}
|
||||||
|
@ -60,7 +59,7 @@ public abstract class PropertySet implements Serializable {
|
||||||
*/
|
*/
|
||||||
protected PropertySet(String name, int numPageBits, Class<?> objectClass) {
|
protected PropertySet(String name, int numPageBits, Class<?> objectClass) {
|
||||||
this.objectClass = objectClass;
|
this.objectClass = objectClass;
|
||||||
ht = new LongObjectHashtable<PropertyPage>();
|
ht = new HashMap<>();
|
||||||
|
|
||||||
this.name = name;
|
this.name = name;
|
||||||
if (numPageBits > MAX_NUMBER_PAGE_BITS) {
|
if (numPageBits > MAX_NUMBER_PAGE_BITS) {
|
||||||
|
@ -138,6 +137,7 @@ public abstract class PropertySet implements Serializable {
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Removes all property values within a given range.
|
* Removes all property values within a given range.
|
||||||
* @param start begin range
|
* @param start begin range
|
||||||
|
@ -233,6 +233,7 @@ public abstract class PropertySet implements Serializable {
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* returns whether there is a property value at index.
|
* returns whether there is a property value at index.
|
||||||
* @param index the long representation of an address.
|
* @param index the long representation of an address.
|
||||||
|
@ -319,6 +320,7 @@ public abstract class PropertySet implements Serializable {
|
||||||
|
|
||||||
throw NoSuchIndexException.noSuchIndexException;
|
throw NoSuchIndexException.noSuchIndexException;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the first index where a property value exists.
|
* Get the first index where a property value exists.
|
||||||
* @throws NoSuchIndexException when there is no property value for any index.
|
* @throws NoSuchIndexException when there is no property value for any index.
|
||||||
|
@ -342,6 +344,7 @@ public abstract class PropertySet implements Serializable {
|
||||||
}
|
}
|
||||||
return getPreviousPropertyIndex(-1);
|
return getPreviousPropertyIndex(-1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the number of properties in the set.
|
* Get the number of properties in the set.
|
||||||
* @return the number of properties
|
* @return the number of properties
|
||||||
|
@ -420,8 +423,10 @@ public abstract class PropertySet implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract void moveIndex(long from, long to);
|
protected abstract void moveIndex(long from, long to);
|
||||||
|
|
||||||
protected abstract void saveProperty(ObjectOutputStream oos, long addr)
|
protected abstract void saveProperty(ObjectOutputStream oos, long addr)
|
||||||
throws IOException;
|
throws IOException;
|
||||||
|
|
||||||
protected abstract void restoreProperty(ObjectInputStream ois, long addr)
|
protected abstract void restoreProperty(ObjectInputStream ois, long addr)
|
||||||
throws IOException, ClassNotFoundException;
|
throws IOException, ClassNotFoundException;
|
||||||
|
|
||||||
|
@ -451,6 +456,7 @@ public abstract class PropertySet implements Serializable {
|
||||||
public LongIterator getPropertyIterator(long start, long end, boolean atStart) {
|
public LongIterator getPropertyIterator(long start, long end, boolean atStart) {
|
||||||
return new LongIteratorImpl(this, start, end, atStart);
|
return new LongIteratorImpl(this, start, end, atStart);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an iterator over the indices having the given property
|
* Returns an iterator over the indices having the given property
|
||||||
* value.
|
* value.
|
||||||
|
@ -478,7 +484,6 @@ public abstract class PropertySet implements Serializable {
|
||||||
return new LongIteratorImpl(this, start, before);
|
return new LongIteratorImpl(this, start, before);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Saves all property values between start and end to the output stream
|
* Saves all property values between start and end to the output stream
|
||||||
* @param oos the output stream
|
* @param oos the output stream
|
||||||
|
@ -504,7 +509,8 @@ public abstract class PropertySet implements Serializable {
|
||||||
saveProperty(oos, index);
|
saveProperty(oos, index);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch(NoSuchIndexException e) {}
|
catch (NoSuchIndexException e) {
|
||||||
|
}
|
||||||
oos.writeByte(0);
|
oos.writeByte(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -516,8 +522,8 @@ public abstract class PropertySet implements Serializable {
|
||||||
* @throws ClassNotFoundException if the a class cannot be determined for
|
* @throws ClassNotFoundException if the a class cannot be determined for
|
||||||
* the property value.
|
* the property value.
|
||||||
*/
|
*/
|
||||||
public void restoreProperties(ObjectInputStream ois) throws
|
public void restoreProperties(ObjectInputStream ois)
|
||||||
IOException, ClassNotFoundException {
|
throws IOException, ClassNotFoundException {
|
||||||
long start = ois.readLong();
|
long start = ois.readLong();
|
||||||
long end = ois.readLong();
|
long end = ois.readLong();
|
||||||
this.removeRange(start, end);
|
this.removeRange(start, end);
|
||||||
|
@ -528,40 +534,6 @@ public abstract class PropertySet implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Saves all properties to the given output stream.
|
|
||||||
* @param out the output stream.
|
|
||||||
* @throws IOException I/O error occurs while writing output.
|
|
||||||
*/
|
|
||||||
public void saveAll(ObjectOutputStream out) throws IOException {
|
|
||||||
out.writeObject(name);
|
|
||||||
out.writeObject(propertyPageIndex);
|
|
||||||
out.writeInt(numPageBits);
|
|
||||||
out.writeLong(pageMask);
|
|
||||||
out.writeShort(pageSize);
|
|
||||||
out.writeInt(numProperties);
|
|
||||||
out.writeObject(ht);
|
|
||||||
out.writeObject(objectClass);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Restores all properties values from the input stream.
|
|
||||||
* @param in the input stream.
|
|
||||||
* @throws IOException if I/O error occurs while reading from stream.
|
|
||||||
* @throws ClassNotFoundException if the a class cannot be determined for
|
|
||||||
* the property value.
|
|
||||||
*/
|
|
||||||
@SuppressWarnings("unchecked") // the type must match or it is a bug
|
|
||||||
public void restoreAll(ObjectInputStream in) throws IOException, ClassNotFoundException {
|
|
||||||
name = (String)in.readObject();
|
|
||||||
propertyPageIndex = (PropertyPageIndex)in.readObject();
|
|
||||||
numPageBits = in.readInt();
|
|
||||||
pageMask = in.readLong();
|
|
||||||
pageSize = in.readShort();
|
|
||||||
numProperties = in.readInt();
|
|
||||||
ht = (LongObjectHashtable<PropertyPage>) in.readObject();
|
|
||||||
objectClass = (Class<?>)in.readObject();
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Based upon the type of property manager that this is, the appropriate
|
* Based upon the type of property manager that this is, the appropriate
|
||||||
* visit() method will be called within the PropertyVisitor.
|
* visit() method will be called within the PropertyVisitor.
|
||||||
* @param visitor object implementing the PropertyVisitor interface.
|
* @param visitor object implementing the PropertyVisitor interface.
|
||||||
|
|
|
@ -1,108 +0,0 @@
|
||||||
/* ###
|
|
||||||
* IP: GHIDRA
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package ghidra.util.datastruct;
|
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertNull;
|
|
||||||
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import generic.test.AbstractGenericTest;
|
|
||||||
|
|
||||||
public class SoftCacheLongKeyMapTest extends AbstractGenericTest {
|
|
||||||
SoftCacheLongKeyMap cache;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public SoftCacheLongKeyMapTest() {
|
|
||||||
super();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testMap() {
|
|
||||||
cache = new SoftCacheLongKeyMap(20);
|
|
||||||
cache.put(0, "aaa");
|
|
||||||
cache.put(1, "bbb");
|
|
||||||
cache.put(2, "ccc");
|
|
||||||
|
|
||||||
assertEquals(3, cache.size());
|
|
||||||
assertEquals("aaa", cache.get(0));
|
|
||||||
assertEquals("bbb", cache.get(1));
|
|
||||||
assertEquals("ccc", cache.get(2));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testlru() {
|
|
||||||
cache = new SoftCacheLongKeyMap(10);
|
|
||||||
cache.put(0, "aaa");
|
|
||||||
cache.put(1, "bbb");
|
|
||||||
cache.put(2, "ccc");
|
|
||||||
cache.put(3, "ddd");
|
|
||||||
cache.put(4, "eee");
|
|
||||||
cache.put(5, "fff");
|
|
||||||
cache.put(6, "ggg");
|
|
||||||
cache.put(7, "hhh");
|
|
||||||
cache.put(8, "iii");
|
|
||||||
cache.put(9, "jjj");
|
|
||||||
|
|
||||||
assertEquals(10, cache.size());
|
|
||||||
cache.put(10, "kkk");
|
|
||||||
assertEquals(10, cache.size());
|
|
||||||
assertNull(cache.get(0));
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testlru2() {
|
|
||||||
cache = new SoftCacheLongKeyMap(10);
|
|
||||||
cache.put(0, "aaa");
|
|
||||||
cache.put(1, "bbb");
|
|
||||||
cache.put(2, "ccc");
|
|
||||||
cache.put(3, "ddd");
|
|
||||||
cache.put(4, "eee");
|
|
||||||
cache.put(5, "fff");
|
|
||||||
cache.put(6, "ggg");
|
|
||||||
cache.put(7, "hhh");
|
|
||||||
cache.put(8, "iii");
|
|
||||||
cache.put(9, "jjj");
|
|
||||||
cache.get(0);
|
|
||||||
assertEquals(10, cache.size());
|
|
||||||
cache.put(10, "kkk");
|
|
||||||
assertEquals(10, cache.size());
|
|
||||||
assertEquals("aaa", cache.get(0));
|
|
||||||
assertNull(cache.get(1));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testRemove() {
|
|
||||||
cache = new SoftCacheLongKeyMap(10);
|
|
||||||
cache.put(0, "aaa");
|
|
||||||
cache.put(1, "bbb");
|
|
||||||
cache.put(2, "ccc");
|
|
||||||
cache.put(3, "ddd");
|
|
||||||
cache.remove(1);
|
|
||||||
cache.remove(0);
|
|
||||||
cache.remove(3);
|
|
||||||
cache.remove(2);
|
|
||||||
assertEquals(0, cache.size());
|
|
||||||
cache.put(5, "zzz");
|
|
||||||
assertEquals(1, cache.size());
|
|
||||||
cache.remove(5);
|
|
||||||
assertEquals(0, cache.size());
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -21,7 +21,6 @@ package ghidra.program.database;
|
||||||
import java.lang.ref.ReferenceQueue;
|
import java.lang.ref.ReferenceQueue;
|
||||||
import java.lang.ref.WeakReference;
|
import java.lang.ref.WeakReference;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import db.Record;
|
import db.Record;
|
||||||
import ghidra.program.model.address.KeyRange;
|
import ghidra.program.model.address.KeyRange;
|
||||||
|
@ -199,18 +198,21 @@ public class DBObjectCache<T extends DatabaseObject> {
|
||||||
* @param keyRanges key ranges to delete
|
* @param keyRanges key ranges to delete
|
||||||
*/
|
*/
|
||||||
private void deleteLargeKeyRanges(List<KeyRange> keyRanges) {
|
private void deleteLargeKeyRanges(List<KeyRange> keyRanges) {
|
||||||
map.keySet()
|
map.values().removeIf(ref -> checkRef(ref, keyRanges));
|
||||||
.stream()
|
}
|
||||||
.filter(key -> keyRangesContain(keyRanges, key))
|
|
||||||
.collect(Collectors.toList())
|
private boolean checkRef(KeyedSoftReference ref, List<KeyRange> keyRanges) {
|
||||||
.forEach(key -> {
|
long key = ref.getKey();
|
||||||
KeyedSoftReference ref = map.remove(key);
|
if (keyRangesContain(keyRanges, key)) {
|
||||||
DatabaseObject obj = ref.get();
|
DatabaseObject obj = ref.get();
|
||||||
if (obj != null) {
|
if (obj != null) {
|
||||||
obj.setDeleted();
|
obj.setDeleted();
|
||||||
ref.clear();
|
ref.clear();
|
||||||
}
|
}
|
||||||
});
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -267,27 +269,6 @@ public class DBObjectCache<T extends DatabaseObject> {
|
||||||
return invalidateCount;
|
return invalidateCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Invalidates a range of objects in the cache.
|
|
||||||
* @param startKey the first key in the range to invalidate.
|
|
||||||
* @param endKey the last key in the range to invalidate.
|
|
||||||
*/
|
|
||||||
public synchronized void invalidate(long startKey, long endKey) {
|
|
||||||
processQueue();
|
|
||||||
if (endKey - startKey < map.size()) {
|
|
||||||
for (long i = startKey; i <= endKey; i++) {
|
|
||||||
doInvalidate(i);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
map.keySet()
|
|
||||||
.stream()
|
|
||||||
.filter(key -> (key >= startKey && key <= endKey))
|
|
||||||
.collect(Collectors.toList())
|
|
||||||
.forEach(key -> doInvalidate(key));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Removes the object with the given key from the cache.
|
* Removes the object with the given key from the cache.
|
||||||
* @param key the key of the object to remove.
|
* @param key the key of the object to remove.
|
||||||
|
@ -305,25 +286,6 @@ public class DBObjectCache<T extends DatabaseObject> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Invalidates the object with given key.
|
|
||||||
* @param key the key of the object to invalidate.
|
|
||||||
*/
|
|
||||||
public synchronized void invalidate(long key) {
|
|
||||||
processQueue();
|
|
||||||
doInvalidate(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void doInvalidate(long key) {
|
|
||||||
KeyedSoftReference ref = map.get(key);
|
|
||||||
if (ref != null) {
|
|
||||||
T obj = ref.get();
|
|
||||||
if (obj != null) {
|
|
||||||
obj.setInvalid();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void addToHardCache(T obj) {
|
private void addToHardCache(T obj) {
|
||||||
hardCache.addLast(obj);
|
hardCache.addLast(obj);
|
||||||
if (hardCache.size() > hardCacheSize) {
|
if (hardCache.size() > hardCacheSize) {
|
||||||
|
|
|
@ -16,13 +16,14 @@
|
||||||
package ghidra.program.database;
|
package ghidra.program.database;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
import db.*;
|
import db.*;
|
||||||
import ghidra.program.model.address.AddressSpace;
|
import ghidra.program.model.address.AddressSpace;
|
||||||
import ghidra.program.model.address.OverlayAddressSpace;
|
import ghidra.program.model.address.OverlayAddressSpace;
|
||||||
import ghidra.program.model.lang.Language;
|
import ghidra.program.model.lang.Language;
|
||||||
import ghidra.program.util.LanguageTranslator;
|
import ghidra.program.util.LanguageTranslator;
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
|
||||||
import ghidra.util.exception.AssertException;
|
import ghidra.util.exception.AssertException;
|
||||||
import ghidra.util.exception.DuplicateNameException;
|
import ghidra.util.exception.DuplicateNameException;
|
||||||
|
|
||||||
|
@ -115,7 +116,7 @@ class OverlaySpaceAdapterDB {
|
||||||
}
|
}
|
||||||
|
|
||||||
void updateOverlaySpaces(ProgramAddressFactory factory) throws IOException {
|
void updateOverlaySpaces(ProgramAddressFactory factory) throws IOException {
|
||||||
LongObjectHashtable<OverlayAddressSpace> map = new LongObjectHashtable<>();
|
Map<Long, OverlayAddressSpace> map = new HashMap<>();
|
||||||
for (AddressSpace space : factory.getAllAddressSpaces()) {
|
for (AddressSpace space : factory.getAllAddressSpaces()) {
|
||||||
if (space instanceof OverlayAddressSpace) {
|
if (space instanceof OverlayAddressSpace) {
|
||||||
OverlayAddressSpace os = (OverlayAddressSpace) space;
|
OverlayAddressSpace os = (OverlayAddressSpace) space;
|
||||||
|
@ -162,11 +163,10 @@ class OverlaySpaceAdapterDB {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (map.size() != 0) {
|
if (map.size() != 0) {
|
||||||
long[] keys = map.getKeys();
|
for (OverlayAddressSpace space : map.values()) {
|
||||||
for (int i = 0; i < keys.length; i++) {
|
|
||||||
OverlayAddressSpace space = map.remove(keys[i]);
|
|
||||||
factory.removeOverlaySpace(space.getName());
|
factory.removeOverlaySpace(space.getName());
|
||||||
}
|
}
|
||||||
|
map.clear();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -39,7 +39,6 @@ import ghidra.program.model.lang.CompilerSpec;
|
||||||
import ghidra.util.*;
|
import ghidra.util.*;
|
||||||
import ghidra.util.classfinder.ClassTranslator;
|
import ghidra.util.classfinder.ClassTranslator;
|
||||||
import ghidra.util.datastruct.FixedSizeHashMap;
|
import ghidra.util.datastruct.FixedSizeHashMap;
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
|
||||||
import ghidra.util.exception.*;
|
import ghidra.util.exception.*;
|
||||||
import ghidra.util.task.TaskMonitor;
|
import ghidra.util.task.TaskMonitor;
|
||||||
|
|
||||||
|
@ -3490,15 +3489,15 @@ abstract public class DataTypeManagerDB implements DataTypeManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
class IdsToDataTypeMap {
|
class IdsToDataTypeMap {
|
||||||
private Map<UniversalID, LongObjectHashtable<DataType>> map = new HashMap<>();
|
private Map<UniversalID, Map<Long, DataType>> map = new HashMap<>();
|
||||||
|
|
||||||
DataType getDataType(UniversalID sourceID, UniversalID dataTypeID) {
|
DataType getDataType(UniversalID sourceID, UniversalID dataTypeID) {
|
||||||
if (sourceID == null || sourceID.equals(universalID)) {
|
if (sourceID == null || sourceID.equals(universalID)) {
|
||||||
sourceID = LOCAL_ARCHIVE_UNIVERSAL_ID;
|
sourceID = LOCAL_ARCHIVE_UNIVERSAL_ID;
|
||||||
}
|
}
|
||||||
LongObjectHashtable<DataType> idMap = map.get(sourceID);
|
Map<Long, DataType> idMap = map.get(sourceID);
|
||||||
if (idMap == null) {
|
if (idMap == null) {
|
||||||
idMap = new LongObjectHashtable<>();
|
idMap = new HashMap<>();
|
||||||
map.put(sourceID, idMap);
|
map.put(sourceID, idMap);
|
||||||
}
|
}
|
||||||
DataType dt = idMap.get(dataTypeID.getValue());
|
DataType dt = idMap.get(dataTypeID.getValue());
|
||||||
|
@ -3527,7 +3526,7 @@ abstract public class DataTypeManagerDB implements DataTypeManager {
|
||||||
else {
|
else {
|
||||||
sourceID = sourceArchive.getSourceArchiveID();
|
sourceID = sourceArchive.getSourceArchiveID();
|
||||||
}
|
}
|
||||||
LongObjectHashtable<DataType> idMap = map.get(sourceID);
|
Map<Long, DataType> idMap = map.get(sourceID);
|
||||||
if (idMap != null) {
|
if (idMap != null) {
|
||||||
idMap.remove(dataTypeID.getValue());
|
idMap.remove(dataTypeID.getValue());
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,6 @@ import ghidra.program.model.symbol.*;
|
||||||
import ghidra.program.util.LanguageTranslator;
|
import ghidra.program.util.LanguageTranslator;
|
||||||
import ghidra.util.Lock;
|
import ghidra.util.Lock;
|
||||||
import ghidra.util.Msg;
|
import ghidra.util.Msg;
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
|
||||||
import ghidra.util.exception.*;
|
import ghidra.util.exception.*;
|
||||||
import ghidra.util.task.TaskMonitor;
|
import ghidra.util.task.TaskMonitor;
|
||||||
|
|
||||||
|
@ -122,7 +121,7 @@ public class ExternalManagerDB implements ManagerDB, ExternalManager {
|
||||||
monitor.initialize(oldNameAdapter.getRecordCount());
|
monitor.initialize(oldNameAdapter.getRecordCount());
|
||||||
int cnt = 0;
|
int cnt = 0;
|
||||||
|
|
||||||
LongObjectHashtable<String> nameMap = new LongObjectHashtable<>();
|
Map<Long, String> nameMap = new HashMap<>();
|
||||||
|
|
||||||
RecordIterator iter = oldNameAdapter.getRecords();
|
RecordIterator iter = oldNameAdapter.getRecords();
|
||||||
while (iter.hasNext()) {
|
while (iter.hasNext()) {
|
||||||
|
|
|
@ -18,7 +18,8 @@ package ghidra.program.database.map;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
import ghidra.program.model.address.*;
|
import ghidra.program.model.address.*;
|
||||||
import ghidra.util.datastruct.*;
|
import ghidra.util.datastruct.Range;
|
||||||
|
import ghidra.util.datastruct.SortedRangeList;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* AddressSetView implementation that handles image base changes. NOTE: THIS IMPLEMENTATION
|
* AddressSetView implementation that handles image base changes. NOTE: THIS IMPLEMENTATION
|
||||||
|
@ -33,8 +34,7 @@ public class NormalizedAddressSet implements AddressSetView {
|
||||||
|
|
||||||
private AddressMap addrMap;
|
private AddressMap addrMap;
|
||||||
|
|
||||||
private LongObjectHashtable<SortedRangeList> baseLists =
|
private Map<Long, SortedRangeList> baseLists = new HashMap<>();
|
||||||
new LongObjectHashtable<SortedRangeList>();
|
|
||||||
private ArrayList<Long> bases = new ArrayList<Long>();
|
private ArrayList<Long> bases = new ArrayList<Long>();
|
||||||
|
|
||||||
private Comparator<Long> baseComparator = new Comparator<Long>() {
|
private Comparator<Long> baseComparator = new Comparator<Long>() {
|
||||||
|
@ -108,7 +108,7 @@ public class NormalizedAddressSet implements AddressSetView {
|
||||||
* Removes all addresses from this set.
|
* Removes all addresses from this set.
|
||||||
*/
|
*/
|
||||||
public void clear() {
|
public void clear() {
|
||||||
baseLists = new LongObjectHashtable<SortedRangeList>();
|
baseLists = new HashMap<>();
|
||||||
bases = new ArrayList<Long>();
|
bases = new ArrayList<Long>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -251,9 +251,9 @@ public class NormalizedAddressSet implements AddressSetView {
|
||||||
@Override
|
@Override
|
||||||
public int getNumAddressRanges() {
|
public int getNumAddressRanges() {
|
||||||
int n = 0;
|
int n = 0;
|
||||||
long[] keys = baseLists.getKeys();
|
|
||||||
for (int i = 0; i < keys.length; i++) {
|
for (long key : baseLists.keySet()) {
|
||||||
SortedRangeList list = baseLists.get(keys[i]);
|
SortedRangeList list = baseLists.get(key);
|
||||||
n += list.getNumRanges();
|
n += list.getNumRanges();
|
||||||
}
|
}
|
||||||
return n;
|
return n;
|
||||||
|
@ -286,9 +286,8 @@ public class NormalizedAddressSet implements AddressSetView {
|
||||||
@Override
|
@Override
|
||||||
public long getNumAddresses() {
|
public long getNumAddresses() {
|
||||||
long n = 0;
|
long n = 0;
|
||||||
long[] keys = baseLists.getKeys();
|
for (long key : baseLists.keySet()) {
|
||||||
for (int i = 0; i < keys.length; i++) {
|
SortedRangeList list = baseLists.get(key);
|
||||||
SortedRangeList list = baseLists.get(keys[i]);
|
|
||||||
n += list.getNumValues();
|
n += list.getNumValues();
|
||||||
}
|
}
|
||||||
return n;
|
return n;
|
||||||
|
|
|
@ -32,9 +32,7 @@ import ghidra.program.model.listing.ProgramContext;
|
||||||
import ghidra.program.util.RangeMapAdapter;
|
import ghidra.program.util.RangeMapAdapter;
|
||||||
import ghidra.program.util.RegisterValueStore;
|
import ghidra.program.util.RegisterValueStore;
|
||||||
import ghidra.util.Lock;
|
import ghidra.util.Lock;
|
||||||
import ghidra.util.datastruct.LongObjectHashtable;
|
|
||||||
import ghidra.util.exception.CancelledException;
|
import ghidra.util.exception.CancelledException;
|
||||||
import ghidra.util.exception.VersionException;
|
|
||||||
import ghidra.util.task.TaskMonitor;
|
import ghidra.util.task.TaskMonitor;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -61,7 +59,7 @@ public class OldProgramContextDB implements ProgramContext, DefaultProgramContex
|
||||||
* address ranges using the PropertyMap utilities.
|
* address ranges using the PropertyMap utilities.
|
||||||
*/
|
*/
|
||||||
private HashMap<String, Register> registersMap;
|
private HashMap<String, Register> registersMap;
|
||||||
private LongObjectHashtable<AddressRangeMapDB> valueMaps;
|
private Map<Integer, AddressRangeMapDB> valueMaps;
|
||||||
private Register baseContextRegister;
|
private Register baseContextRegister;
|
||||||
protected Map<Register, RegisterValueStore> defaultRegisterValueMap;
|
protected Map<Register, RegisterValueStore> defaultRegisterValueMap;
|
||||||
|
|
||||||
|
@ -88,7 +86,7 @@ public class OldProgramContextDB implements ProgramContext, DefaultProgramContex
|
||||||
defaultRegisterValueMap = new HashMap<Register, RegisterValueStore>();
|
defaultRegisterValueMap = new HashMap<Register, RegisterValueStore>();
|
||||||
|
|
||||||
registersMap = new HashMap<String, Register>();
|
registersMap = new HashMap<String, Register>();
|
||||||
valueMaps = new LongObjectHashtable<AddressRangeMapDB>();
|
valueMaps = new HashMap<>();
|
||||||
registerSpaceSize = 0;
|
registerSpaceSize = 0;
|
||||||
|
|
||||||
for (Register register : registers) {
|
for (Register register : registers) {
|
||||||
|
@ -350,7 +348,7 @@ public class OldProgramContextDB implements ProgramContext, DefaultProgramContex
|
||||||
public void invalidateCache(boolean all) throws IOException {
|
public void invalidateCache(boolean all) throws IOException {
|
||||||
lock.acquire();
|
lock.acquire();
|
||||||
try {
|
try {
|
||||||
valueMaps.removeAll();
|
valueMaps.clear();
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
lock.release();
|
lock.release();
|
||||||
|
|
|
@ -306,29 +306,11 @@ public abstract class DefaultPropertyMap implements PropertyMap {
|
||||||
* @throws ClassNotFoundException if the class for the object being
|
* @throws ClassNotFoundException if the class for the object being
|
||||||
* read is not in the class path
|
* read is not in the class path
|
||||||
*/
|
*/
|
||||||
public void restoreProperties(ObjectInputStream ois) throws IOException, ClassNotFoundException {
|
public void restoreProperties(ObjectInputStream ois)
|
||||||
|
throws IOException, ClassNotFoundException {
|
||||||
propertyMgr.restoreProperties(ois);
|
propertyMgr.restoreProperties(ois);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Write all properties in the map to the given output stream.
|
|
||||||
* @throws IOException if there is a problem writing to the stream
|
|
||||||
*/
|
|
||||||
public void saveAll(ObjectOutputStream out) throws IOException {
|
|
||||||
propertyMgr.saveAll(out);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Restore properties read from the given input stream.
|
|
||||||
* @param in input stream
|
|
||||||
* @throws IOException if there is a problem reading from the stream
|
|
||||||
* @throws ClassNotFoundException if the class for the object being
|
|
||||||
* read is not in the class path
|
|
||||||
*/
|
|
||||||
public void restoreAll(ObjectInputStream in) throws IOException, ClassNotFoundException {
|
|
||||||
propertyMgr.restoreAll(in);
|
|
||||||
}
|
|
||||||
|
|
||||||
private class AddressPropertyIterator implements AddressIterator {
|
private class AddressPropertyIterator implements AddressIterator {
|
||||||
|
|
||||||
private LongIterator iter;
|
private LongIterator iter;
|
||||||
|
@ -350,7 +332,8 @@ public abstract class DefaultPropertyMap implements PropertyMap {
|
||||||
|
|
||||||
AddressPropertyIterator(Address start, Address end, boolean forward) {
|
AddressPropertyIterator(Address start, Address end, boolean forward) {
|
||||||
iter =
|
iter =
|
||||||
propertyMgr.getPropertyIterator(addrMap.getKey(start), addrMap.getKey(end), forward);
|
propertyMgr.getPropertyIterator(addrMap.getKey(start), addrMap.getKey(end),
|
||||||
|
forward);
|
||||||
this.forward = forward;
|
this.forward = forward;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue